]> git.proxmox.com Git - qemu.git/blob - target-i386/translate.c
converted x87 FPU ops to TCG
[qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
39
40 #ifdef TARGET_X86_64
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47 #if 1
48 #define BUGGY_64(x) NULL
49 #endif
50 #else
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
53 #define CODE64(s) 0
54 #define REX_X(s) 0
55 #define REX_B(s) 0
56 #endif
57
58 //#define MACRO_TEST 1
59
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_ptr0, cpu_ptr1;
64
65 #ifdef TARGET_X86_64
66 static int x86_64_hregs;
67 #endif
68
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
72 int prefix;
73 int aflag, dflag;
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
81 #ifdef TARGET_X86_64
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
84 int rex_x, rex_b;
85 #endif
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
91 int cpl;
92 int iopl;
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
101 int cpuid_features;
102 int cpuid_ext_features;
103 int cpuid_ext2_features;
104 } DisasContext;
105
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109
110 /* i386 arith/logic operations */
111 enum {
112 OP_ADDL,
113 OP_ORL,
114 OP_ADCL,
115 OP_SBBL,
116 OP_ANDL,
117 OP_SUBL,
118 OP_XORL,
119 OP_CMPL,
120 };
121
122 /* i386 shift ops */
123 enum {
124 OP_ROL,
125 OP_ROR,
126 OP_RCL,
127 OP_RCR,
128 OP_SHL,
129 OP_SHR,
130 OP_SHL1, /* undocumented */
131 OP_SAR = 7,
132 };
133
134 /* operand size */
135 enum {
136 OT_BYTE = 0,
137 OT_WORD,
138 OT_LONG,
139 OT_QUAD,
140 };
141
142 enum {
143 /* I386 int registers */
144 OR_EAX, /* MUST be even numbered */
145 OR_ECX,
146 OR_EDX,
147 OR_EBX,
148 OR_ESP,
149 OR_EBP,
150 OR_ESI,
151 OR_EDI,
152
153 OR_TMP0 = 16, /* temporary operand register */
154 OR_TMP1,
155 OR_A0, /* temporary register used when doing address evaluation */
156 };
157
158 static inline void gen_op_movl_T0_0(void)
159 {
160 tcg_gen_movi_tl(cpu_T[0], 0);
161 }
162
163 static inline void gen_op_movl_T0_im(int32_t val)
164 {
165 tcg_gen_movi_tl(cpu_T[0], val);
166 }
167
168 static inline void gen_op_movl_T0_imu(uint32_t val)
169 {
170 tcg_gen_movi_tl(cpu_T[0], val);
171 }
172
173 static inline void gen_op_movl_T1_im(int32_t val)
174 {
175 tcg_gen_movi_tl(cpu_T[1], val);
176 }
177
178 static inline void gen_op_movl_T1_imu(uint32_t val)
179 {
180 tcg_gen_movi_tl(cpu_T[1], val);
181 }
182
183 static inline void gen_op_movl_A0_im(uint32_t val)
184 {
185 tcg_gen_movi_tl(cpu_A0, val);
186 }
187
188 #ifdef TARGET_X86_64
189 static inline void gen_op_movq_A0_im(int64_t val)
190 {
191 tcg_gen_movi_tl(cpu_A0, val);
192 }
193 #endif
194
195 static inline void gen_movtl_T0_im(target_ulong val)
196 {
197 tcg_gen_movi_tl(cpu_T[0], val);
198 }
199
200 static inline void gen_movtl_T1_im(target_ulong val)
201 {
202 tcg_gen_movi_tl(cpu_T[1], val);
203 }
204
205 static inline void gen_op_andl_T0_ffff(void)
206 {
207 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
208 }
209
210 static inline void gen_op_andl_T0_im(uint32_t val)
211 {
212 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
213 }
214
215 static inline void gen_op_movl_T0_T1(void)
216 {
217 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
218 }
219
220 static inline void gen_op_andl_A0_ffff(void)
221 {
222 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
223 }
224
225 #ifdef TARGET_X86_64
226
227 #define NB_OP_SIZES 4
228
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
246
247 #else /* !TARGET_X86_64 */
248
249 #define NB_OP_SIZES 3
250
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
260
261 #endif /* !TARGET_X86_64 */
262
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269 #else
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
275 #endif
276
277 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
278 {
279 switch(ot) {
280 case OT_BYTE:
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283 } else {
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285 }
286 break;
287 case OT_WORD:
288 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
289 break;
290 #ifdef TARGET_X86_64
291 case OT_LONG:
292 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0, 0);
295 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
296 break;
297 default:
298 case OT_QUAD:
299 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
300 break;
301 #else
302 default:
303 case OT_LONG:
304 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
305 break;
306 #endif
307 }
308 }
309
310 static inline void gen_op_mov_reg_T0(int ot, int reg)
311 {
312 gen_op_mov_reg_TN(ot, 0, reg);
313 }
314
315 static inline void gen_op_mov_reg_T1(int ot, int reg)
316 {
317 gen_op_mov_reg_TN(ot, 1, reg);
318 }
319
320 static inline void gen_op_mov_reg_A0(int size, int reg)
321 {
322 switch(size) {
323 case 0:
324 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
325 break;
326 #ifdef TARGET_X86_64
327 case 1:
328 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0, 0);
331 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
332 break;
333 default:
334 case 2:
335 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
336 break;
337 #else
338 default:
339 case 1:
340 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
341 break;
342 #endif
343 }
344 }
345
346 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
347 {
348 switch(ot) {
349 case OT_BYTE:
350 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
351 goto std_case;
352 } else {
353 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
354 }
355 break;
356 default:
357 std_case:
358 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
359 break;
360 }
361 }
362
363 static inline void gen_op_movl_A0_reg(int reg)
364 {
365 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
366 }
367
368 static inline void gen_op_addl_A0_im(int32_t val)
369 {
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371 #ifdef TARGET_X86_64
372 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
373 #endif
374 }
375
376 #ifdef TARGET_X86_64
377 static inline void gen_op_addq_A0_im(int64_t val)
378 {
379 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
380 }
381 #endif
382
383 static void gen_add_A0_im(DisasContext *s, int val)
384 {
385 #ifdef TARGET_X86_64
386 if (CODE64(s))
387 gen_op_addq_A0_im(val);
388 else
389 #endif
390 gen_op_addl_A0_im(val);
391 }
392
393 static inline void gen_op_addl_T0_T1(void)
394 {
395 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
396 }
397
398 static inline void gen_op_jmp_T0(void)
399 {
400 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
401 }
402
403 static inline void gen_op_addw_ESP_im(int32_t val)
404 {
405 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
408 }
409
410 static inline void gen_op_addl_ESP_im(int32_t val)
411 {
412 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414 #ifdef TARGET_X86_64
415 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416 #endif
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
418 }
419
420 #ifdef TARGET_X86_64
421 static inline void gen_op_addq_ESP_im(int32_t val)
422 {
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426 }
427 #endif
428
429 static inline void gen_op_set_cc_op(int32_t val)
430 {
431 tcg_gen_movi_tl(cpu_tmp0, val);
432 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
433 }
434
435 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436 {
437 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438 if (shift != 0)
439 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441 #ifdef TARGET_X86_64
442 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
443 #endif
444 }
445
446 static inline void gen_op_movl_A0_seg(int reg)
447 {
448 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
449 }
450
451 static inline void gen_op_addl_A0_seg(int reg)
452 {
453 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455 #ifdef TARGET_X86_64
456 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457 #endif
458 }
459
460 #ifdef TARGET_X86_64
461 static inline void gen_op_movq_A0_seg(int reg)
462 {
463 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
464 }
465
466 static inline void gen_op_addq_A0_seg(int reg)
467 {
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470 }
471
472 static inline void gen_op_movq_A0_reg(int reg)
473 {
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
475 }
476
477 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478 {
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480 if (shift != 0)
481 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483 }
484 #endif
485
486 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487 [0] = {
488 DEF_REGS(gen_op_cmovw_, _T1_T0)
489 },
490 [1] = {
491 DEF_REGS(gen_op_cmovl_, _T1_T0)
492 },
493 #ifdef TARGET_X86_64
494 [2] = {
495 DEF_REGS(gen_op_cmovq_, _T1_T0)
496 },
497 #endif
498 };
499
500 #define DEF_ARITHC(SUFFIX)\
501 {\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
504 },\
505 {\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
508 },\
509 {\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
512 },\
513 {\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
516 },
517
518 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
519 DEF_ARITHC( )
520 };
521
522 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523 DEF_ARITHC(_raw)
524 #ifndef CONFIG_USER_ONLY
525 DEF_ARITHC(_kernel)
526 DEF_ARITHC(_user)
527 #endif
528 };
529
530 static const int cc_op_arithb[8] = {
531 CC_OP_ADDB,
532 CC_OP_LOGICB,
533 CC_OP_ADDB,
534 CC_OP_SUBB,
535 CC_OP_LOGICB,
536 CC_OP_SUBB,
537 CC_OP_LOGICB,
538 CC_OP_SUBB,
539 };
540
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546
547 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
548 DEF_CMPXCHG( )
549 };
550
551 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552 DEF_CMPXCHG(_raw)
553 #ifndef CONFIG_USER_ONLY
554 DEF_CMPXCHG(_kernel)
555 DEF_CMPXCHG(_user)
556 #endif
557 };
558
559 #define DEF_SHIFT(SUFFIX)\
560 {\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
569 },\
570 {\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
579 },\
580 {\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
589 },\
590 {\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
599 },
600
601 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
602 DEF_SHIFT( )
603 };
604
605 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606 DEF_SHIFT(_raw)
607 #ifndef CONFIG_USER_ONLY
608 DEF_SHIFT(_kernel)
609 DEF_SHIFT(_user)
610 #endif
611 };
612
613 #define DEF_SHIFTD(SUFFIX, op)\
614 {\
615 NULL,\
616 NULL,\
617 },\
618 {\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
621 },\
622 {\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
625 },\
626 {\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
629 },
630
631 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
632 DEF_SHIFTD(, im)
633 };
634
635 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
636 DEF_SHIFTD(, ECX)
637 };
638
639 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640 DEF_SHIFTD(_raw, im)
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel, im)
643 DEF_SHIFTD(_user, im)
644 #endif
645 };
646
647 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648 DEF_SHIFTD(_raw, ECX)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel, ECX)
651 DEF_SHIFTD(_user, ECX)
652 #endif
653 };
654
655 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
656 [0] = {
657 gen_op_btw_T0_T1_cc,
658 gen_op_btsw_T0_T1_cc,
659 gen_op_btrw_T0_T1_cc,
660 gen_op_btcw_T0_T1_cc,
661 },
662 [1] = {
663 gen_op_btl_T0_T1_cc,
664 gen_op_btsl_T0_T1_cc,
665 gen_op_btrl_T0_T1_cc,
666 gen_op_btcl_T0_T1_cc,
667 },
668 #ifdef TARGET_X86_64
669 [2] = {
670 gen_op_btq_T0_T1_cc,
671 gen_op_btsq_T0_T1_cc,
672 gen_op_btrq_T0_T1_cc,
673 gen_op_btcq_T0_T1_cc,
674 },
675 #endif
676 };
677
678 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679 gen_op_add_bitw_A0_T1,
680 gen_op_add_bitl_A0_T1,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1),
682 };
683
684 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
685 [0] = {
686 gen_op_bsfw_T0_cc,
687 gen_op_bsrw_T0_cc,
688 },
689 [1] = {
690 gen_op_bsfl_T0_cc,
691 gen_op_bsrl_T0_cc,
692 },
693 #ifdef TARGET_X86_64
694 [2] = {
695 gen_op_bsfq_T0_cc,
696 gen_op_bsrq_T0_cc,
697 },
698 #endif
699 };
700
701 static inline void gen_op_lds_T0_A0(int idx)
702 {
703 int mem_index = (idx >> 2) - 1;
704 switch(idx & 3) {
705 case 0:
706 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
707 break;
708 case 1:
709 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
710 break;
711 default:
712 case 2:
713 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
714 break;
715 }
716 }
717
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx)
720 {
721 int mem_index = (idx >> 2) - 1;
722 switch(idx & 3) {
723 case 0:
724 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
725 break;
726 case 1:
727 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
728 break;
729 case 2:
730 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
731 break;
732 default:
733 case 3:
734 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
735 break;
736 }
737 }
738
739 static inline void gen_op_ldu_T0_A0(int idx)
740 {
741 gen_op_ld_T0_A0(idx);
742 }
743
744 static inline void gen_op_ld_T1_A0(int idx)
745 {
746 int mem_index = (idx >> 2) - 1;
747 switch(idx & 3) {
748 case 0:
749 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
750 break;
751 case 1:
752 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
753 break;
754 case 2:
755 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
756 break;
757 default:
758 case 3:
759 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
760 break;
761 }
762 }
763
764 static inline void gen_op_st_T0_A0(int idx)
765 {
766 int mem_index = (idx >> 2) - 1;
767 switch(idx & 3) {
768 case 0:
769 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
770 break;
771 case 1:
772 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
773 break;
774 case 2:
775 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
776 break;
777 default:
778 case 3:
779 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
780 break;
781 }
782 }
783
784 static inline void gen_op_st_T1_A0(int idx)
785 {
786 int mem_index = (idx >> 2) - 1;
787 switch(idx & 3) {
788 case 0:
789 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
790 break;
791 case 1:
792 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
793 break;
794 case 2:
795 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
796 break;
797 default:
798 case 3:
799 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
800 break;
801 }
802 }
803
804 static inline void gen_jmp_im(target_ulong pc)
805 {
806 tcg_gen_movi_tl(cpu_tmp0, pc);
807 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
808 }
809
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
811 {
812 int override;
813
814 override = s->override;
815 #ifdef TARGET_X86_64
816 if (s->aflag == 2) {
817 if (override >= 0) {
818 gen_op_movq_A0_seg(override);
819 gen_op_addq_A0_reg_sN(0, R_ESI);
820 } else {
821 gen_op_movq_A0_reg(R_ESI);
822 }
823 } else
824 #endif
825 if (s->aflag) {
826 /* 32 bit address */
827 if (s->addseg && override < 0)
828 override = R_DS;
829 if (override >= 0) {
830 gen_op_movl_A0_seg(override);
831 gen_op_addl_A0_reg_sN(0, R_ESI);
832 } else {
833 gen_op_movl_A0_reg(R_ESI);
834 }
835 } else {
836 /* 16 address, always override */
837 if (override < 0)
838 override = R_DS;
839 gen_op_movl_A0_reg(R_ESI);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override);
842 }
843 }
844
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
846 {
847 #ifdef TARGET_X86_64
848 if (s->aflag == 2) {
849 gen_op_movq_A0_reg(R_EDI);
850 } else
851 #endif
852 if (s->aflag) {
853 if (s->addseg) {
854 gen_op_movl_A0_seg(R_ES);
855 gen_op_addl_A0_reg_sN(0, R_EDI);
856 } else {
857 gen_op_movl_A0_reg(R_EDI);
858 }
859 } else {
860 gen_op_movl_A0_reg(R_EDI);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES);
863 }
864 }
865
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871 };
872
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874 gen_op_jnz_ecxw,
875 gen_op_jnz_ecxl,
876 X86_64_ONLY(gen_op_jnz_ecxq),
877 };
878
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
880 gen_op_jz_ecxw,
881 gen_op_jz_ecxl,
882 X86_64_ONLY(gen_op_jz_ecxq),
883 };
884
885 static GenOpFunc *gen_op_dec_ECX[3] = {
886 gen_op_decw_ECX,
887 gen_op_decl_ECX,
888 X86_64_ONLY(gen_op_decq_ECX),
889 };
890
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892 {
893 gen_op_jnz_subb,
894 gen_op_jnz_subw,
895 gen_op_jnz_subl,
896 X86_64_ONLY(gen_op_jnz_subq),
897 },
898 {
899 gen_op_jz_subb,
900 gen_op_jz_subw,
901 gen_op_jz_subl,
902 X86_64_ONLY(gen_op_jz_subq),
903 },
904 };
905
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
907 gen_op_inb_DX_T0,
908 gen_op_inw_DX_T0,
909 gen_op_inl_DX_T0,
910 };
911
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
913 gen_op_outb_DX_T0,
914 gen_op_outw_DX_T0,
915 gen_op_outl_DX_T0,
916 };
917
918 static GenOpFunc *gen_op_in[3] = {
919 gen_op_inb_T0_T1,
920 gen_op_inw_T0_T1,
921 gen_op_inl_T0_T1,
922 };
923
924 static GenOpFunc *gen_op_out[3] = {
925 gen_op_outb_T0_T1,
926 gen_op_outw_T0_T1,
927 gen_op_outl_T0_T1,
928 };
929
930 static GenOpFunc *gen_check_io_T0[3] = {
931 gen_op_check_iob_T0,
932 gen_op_check_iow_T0,
933 gen_op_check_iol_T0,
934 };
935
936 static GenOpFunc *gen_check_io_DX[3] = {
937 gen_op_check_iob_DX,
938 gen_op_check_iow_DX,
939 gen_op_check_iol_DX,
940 };
941
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943 {
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
947 gen_jmp_im(cur_eip);
948 if (use_dx)
949 gen_check_io_DX[ot]();
950 else
951 gen_check_io_T0[ot]();
952 }
953 }
954
955 static inline void gen_movs(DisasContext *s, int ot)
956 {
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0(ot + s->mem_index);
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0(ot + s->mem_index);
961 gen_op_movl_T0_Dshift[ot]();
962 #ifdef TARGET_X86_64
963 if (s->aflag == 2) {
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
966 } else
967 #endif
968 if (s->aflag) {
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
971 } else {
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
974 }
975 }
976
977 static inline void gen_update_cc_op(DisasContext *s)
978 {
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
982 }
983 }
984
985 /* XXX: does not work with gdbstub "ice" single step - not a
986 serious problem */
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988 {
989 int l1, l2;
990
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
994 gen_set_label(l2);
995 gen_jmp_tb(s, next_eip, 1);
996 gen_set_label(l1);
997 return l2;
998 }
999
1000 static inline void gen_stos(DisasContext *s, int ot)
1001 {
1002 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0(ot + s->mem_index);
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1009 } else
1010 #endif
1011 if (s->aflag) {
1012 gen_op_addl_EDI_T0();
1013 } else {
1014 gen_op_addw_EDI_T0();
1015 }
1016 }
1017
1018 static inline void gen_lods(DisasContext *s, int ot)
1019 {
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0(ot + s->mem_index);
1022 gen_op_mov_reg_T0(ot, R_EAX);
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1027 } else
1028 #endif
1029 if (s->aflag) {
1030 gen_op_addl_ESI_T0();
1031 } else {
1032 gen_op_addw_ESI_T0();
1033 }
1034 }
1035
1036 static inline void gen_scas(DisasContext *s, int ot)
1037 {
1038 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0(ot + s->mem_index);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1046 } else
1047 #endif
1048 if (s->aflag) {
1049 gen_op_addl_EDI_T0();
1050 } else {
1051 gen_op_addw_EDI_T0();
1052 }
1053 }
1054
1055 static inline void gen_cmps(DisasContext *s, int ot)
1056 {
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0(ot + s->mem_index);
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0(ot + s->mem_index);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1067 } else
1068 #endif
1069 if (s->aflag) {
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1072 } else {
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1075 }
1076 }
1077
1078 static inline void gen_ins(DisasContext *s, int ot)
1079 {
1080 gen_string_movl_A0_EDI(s);
1081 gen_op_movl_T0_0();
1082 gen_op_st_T0_A0(ot + s->mem_index);
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0(ot + s->mem_index);
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1089 } else
1090 #endif
1091 if (s->aflag) {
1092 gen_op_addl_EDI_T0();
1093 } else {
1094 gen_op_addw_EDI_T0();
1095 }
1096 }
1097
1098 static inline void gen_outs(DisasContext *s, int ot)
1099 {
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0(ot + s->mem_index);
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1107 } else
1108 #endif
1109 if (s->aflag) {
1110 gen_op_addl_ESI_T0();
1111 } else {
1112 gen_op_addw_ESI_T0();
1113 }
1114 }
1115
1116 /* same method as Valgrind : we generate jumps to current or next
1117 instruction */
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1121 { \
1122 int l2;\
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1129 if (!s->jmp_opt) \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1132 }
1133
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1138 int nz) \
1139 { \
1140 int l2;\
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1147 if (!s->jmp_opt) \
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1150 }
1151
1152 GEN_REPZ(movs)
1153 GEN_REPZ(stos)
1154 GEN_REPZ(lods)
1155 GEN_REPZ(ins)
1156 GEN_REPZ(outs)
1157 GEN_REPZ2(scas)
1158 GEN_REPZ2(cmps)
1159
1160 enum {
1161 JCC_O,
1162 JCC_B,
1163 JCC_Z,
1164 JCC_BE,
1165 JCC_S,
1166 JCC_P,
1167 JCC_L,
1168 JCC_LE,
1169 };
1170
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172 [OT_BYTE] = {
1173 NULL,
1174 gen_op_jb_subb,
1175 gen_op_jz_subb,
1176 gen_op_jbe_subb,
1177 gen_op_js_subb,
1178 NULL,
1179 gen_op_jl_subb,
1180 gen_op_jle_subb,
1181 },
1182 [OT_WORD] = {
1183 NULL,
1184 gen_op_jb_subw,
1185 gen_op_jz_subw,
1186 gen_op_jbe_subw,
1187 gen_op_js_subw,
1188 NULL,
1189 gen_op_jl_subw,
1190 gen_op_jle_subw,
1191 },
1192 [OT_LONG] = {
1193 NULL,
1194 gen_op_jb_subl,
1195 gen_op_jz_subl,
1196 gen_op_jbe_subl,
1197 gen_op_js_subl,
1198 NULL,
1199 gen_op_jl_subl,
1200 gen_op_jle_subl,
1201 },
1202 #ifdef TARGET_X86_64
1203 [OT_QUAD] = {
1204 NULL,
1205 BUGGY_64(gen_op_jb_subq),
1206 gen_op_jz_subq,
1207 BUGGY_64(gen_op_jbe_subq),
1208 gen_op_js_subq,
1209 NULL,
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1212 },
1213 #endif
1214 };
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1216 [0] = {
1217 gen_op_loopnzw,
1218 gen_op_loopzw,
1219 gen_op_jnz_ecxw,
1220 },
1221 [1] = {
1222 gen_op_loopnzl,
1223 gen_op_loopzl,
1224 gen_op_jnz_ecxl,
1225 },
1226 #ifdef TARGET_X86_64
1227 [2] = {
1228 gen_op_loopnzq,
1229 gen_op_loopzq,
1230 gen_op_jnz_ecxq,
1231 },
1232 #endif
1233 };
1234
1235 static GenOpFunc *gen_setcc_slow[8] = {
1236 gen_op_seto_T0_cc,
1237 gen_op_setb_T0_cc,
1238 gen_op_setz_T0_cc,
1239 gen_op_setbe_T0_cc,
1240 gen_op_sets_T0_cc,
1241 gen_op_setp_T0_cc,
1242 gen_op_setl_T0_cc,
1243 gen_op_setle_T0_cc,
1244 };
1245
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1247 [OT_BYTE] = {
1248 NULL,
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1253 NULL,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1256 },
1257 [OT_WORD] = {
1258 NULL,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1263 NULL,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1266 },
1267 [OT_LONG] = {
1268 NULL,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1273 NULL,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1276 },
1277 #ifdef TARGET_X86_64
1278 [OT_QUAD] = {
1279 NULL,
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1284 NULL,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1287 },
1288 #endif
1289 };
1290
1291 static void *helper_fp_arith_ST0_FT0[8] = {
1292 helper_fadd_ST0_FT0,
1293 helper_fmul_ST0_FT0,
1294 helper_fcom_ST0_FT0,
1295 helper_fcom_ST0_FT0,
1296 helper_fsub_ST0_FT0,
1297 helper_fsubr_ST0_FT0,
1298 helper_fdiv_ST0_FT0,
1299 helper_fdivr_ST0_FT0,
1300 };
1301
1302 /* NOTE the exception in "r" op ordering */
1303 static void *helper_fp_arith_STN_ST0[8] = {
1304 helper_fadd_STN_ST0,
1305 helper_fmul_STN_ST0,
1306 NULL,
1307 NULL,
1308 helper_fsubr_STN_ST0,
1309 helper_fsub_STN_ST0,
1310 helper_fdivr_STN_ST0,
1311 helper_fdiv_STN_ST0,
1312 };
1313
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1316 {
1317 GenOpFunc *gen_update_cc;
1318
1319 if (d != OR_TMP0) {
1320 gen_op_mov_TN_reg(ot, 0, d);
1321 } else {
1322 gen_op_ld_T0_A0(ot + s1->mem_index);
1323 }
1324 switch(op) {
1325 case OP_ADCL:
1326 case OP_SBBL:
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1329 if (d != OR_TMP0) {
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0(ot, d);
1332 } else {
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334 }
1335 s1->cc_op = CC_OP_DYNAMIC;
1336 goto the_end;
1337 case OP_ADDL:
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1341 break;
1342 case OP_SUBL:
1343 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1346 break;
1347 default:
1348 case OP_ANDL:
1349 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350 s1->cc_op = CC_OP_LOGICB + ot;
1351 gen_update_cc = gen_op_update1_cc;
1352 break;
1353 case OP_ORL:
1354 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1357 break;
1358 case OP_XORL:
1359 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360 s1->cc_op = CC_OP_LOGICB + ot;
1361 gen_update_cc = gen_op_update1_cc;
1362 break;
1363 case OP_CMPL:
1364 gen_op_cmpl_T0_T1_cc();
1365 s1->cc_op = CC_OP_SUBB + ot;
1366 gen_update_cc = NULL;
1367 break;
1368 }
1369 if (op != OP_CMPL) {
1370 if (d != OR_TMP0)
1371 gen_op_mov_reg_T0(ot, d);
1372 else
1373 gen_op_st_T0_A0(ot + s1->mem_index);
1374 }
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1377 if (gen_update_cc)
1378 gen_update_cc();
1379 the_end: ;
1380 }
1381
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1384 {
1385 if (d != OR_TMP0)
1386 gen_op_mov_TN_reg(ot, 0, d);
1387 else
1388 gen_op_ld_T0_A0(ot + s1->mem_index);
1389 if (s1->cc_op != CC_OP_DYNAMIC)
1390 gen_op_set_cc_op(s1->cc_op);
1391 if (c > 0) {
1392 gen_op_incl_T0();
1393 s1->cc_op = CC_OP_INCB + ot;
1394 } else {
1395 gen_op_decl_T0();
1396 s1->cc_op = CC_OP_DECB + ot;
1397 }
1398 if (d != OR_TMP0)
1399 gen_op_mov_reg_T0(ot, d);
1400 else
1401 gen_op_st_T0_A0(ot + s1->mem_index);
1402 gen_op_update_inc_cc();
1403 }
1404
1405 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1406 {
1407 if (d != OR_TMP0)
1408 gen_op_mov_TN_reg(ot, 0, d);
1409 else
1410 gen_op_ld_T0_A0(ot + s1->mem_index);
1411 if (s != OR_TMP1)
1412 gen_op_mov_TN_reg(ot, 1, s);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s1->cc_op);
1416
1417 if (d != OR_TMP0)
1418 gen_op_shift_T0_T1_cc[ot][op]();
1419 else
1420 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1421 if (d != OR_TMP0)
1422 gen_op_mov_reg_T0(ot, d);
1423 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1424 }
1425
1426 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1427 {
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c);
1430 gen_shift(s1, op, ot, d, OR_TMP1);
1431 }
1432
1433 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1434 {
1435 target_long disp;
1436 int havesib;
1437 int base;
1438 int index;
1439 int scale;
1440 int opreg;
1441 int mod, rm, code, override, must_add_seg;
1442
1443 override = s->override;
1444 must_add_seg = s->addseg;
1445 if (override >= 0)
1446 must_add_seg = 1;
1447 mod = (modrm >> 6) & 3;
1448 rm = modrm & 7;
1449
1450 if (s->aflag) {
1451
1452 havesib = 0;
1453 base = rm;
1454 index = 0;
1455 scale = 0;
1456
1457 if (base == 4) {
1458 havesib = 1;
1459 code = ldub_code(s->pc++);
1460 scale = (code >> 6) & 3;
1461 index = ((code >> 3) & 7) | REX_X(s);
1462 base = (code & 7);
1463 }
1464 base |= REX_B(s);
1465
1466 switch (mod) {
1467 case 0:
1468 if ((base & 7) == 5) {
1469 base = -1;
1470 disp = (int32_t)ldl_code(s->pc);
1471 s->pc += 4;
1472 if (CODE64(s) && !havesib) {
1473 disp += s->pc + s->rip_offset;
1474 }
1475 } else {
1476 disp = 0;
1477 }
1478 break;
1479 case 1:
1480 disp = (int8_t)ldub_code(s->pc++);
1481 break;
1482 default:
1483 case 2:
1484 disp = ldl_code(s->pc);
1485 s->pc += 4;
1486 break;
1487 }
1488
1489 if (base >= 0) {
1490 /* for correct popl handling with esp */
1491 if (base == 4 && s->popl_esp_hack)
1492 disp += s->popl_esp_hack;
1493 #ifdef TARGET_X86_64
1494 if (s->aflag == 2) {
1495 gen_op_movq_A0_reg(base);
1496 if (disp != 0) {
1497 gen_op_addq_A0_im(disp);
1498 }
1499 } else
1500 #endif
1501 {
1502 gen_op_movl_A0_reg(base);
1503 if (disp != 0)
1504 gen_op_addl_A0_im(disp);
1505 }
1506 } else {
1507 #ifdef TARGET_X86_64
1508 if (s->aflag == 2) {
1509 gen_op_movq_A0_im(disp);
1510 } else
1511 #endif
1512 {
1513 gen_op_movl_A0_im(disp);
1514 }
1515 }
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN(scale, index);
1521 } else
1522 #endif
1523 {
1524 gen_op_addl_A0_reg_sN(scale, index);
1525 }
1526 }
1527 if (must_add_seg) {
1528 if (override < 0) {
1529 if (base == R_EBP || base == R_ESP)
1530 override = R_SS;
1531 else
1532 override = R_DS;
1533 }
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(override);
1537 } else
1538 #endif
1539 {
1540 gen_op_addl_A0_seg(override);
1541 }
1542 }
1543 } else {
1544 switch (mod) {
1545 case 0:
1546 if (rm == 6) {
1547 disp = lduw_code(s->pc);
1548 s->pc += 2;
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1551 goto no_rm;
1552 } else {
1553 disp = 0;
1554 }
1555 break;
1556 case 1:
1557 disp = (int8_t)ldub_code(s->pc++);
1558 break;
1559 default:
1560 case 2:
1561 disp = lduw_code(s->pc);
1562 s->pc += 2;
1563 break;
1564 }
1565 switch(rm) {
1566 case 0:
1567 gen_op_movl_A0_reg(R_EBX);
1568 gen_op_addl_A0_reg_sN(0, R_ESI);
1569 break;
1570 case 1:
1571 gen_op_movl_A0_reg(R_EBX);
1572 gen_op_addl_A0_reg_sN(0, R_EDI);
1573 break;
1574 case 2:
1575 gen_op_movl_A0_reg(R_EBP);
1576 gen_op_addl_A0_reg_sN(0, R_ESI);
1577 break;
1578 case 3:
1579 gen_op_movl_A0_reg(R_EBP);
1580 gen_op_addl_A0_reg_sN(0, R_EDI);
1581 break;
1582 case 4:
1583 gen_op_movl_A0_reg(R_ESI);
1584 break;
1585 case 5:
1586 gen_op_movl_A0_reg(R_EDI);
1587 break;
1588 case 6:
1589 gen_op_movl_A0_reg(R_EBP);
1590 break;
1591 default:
1592 case 7:
1593 gen_op_movl_A0_reg(R_EBX);
1594 break;
1595 }
1596 if (disp != 0)
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1599 no_rm:
1600 if (must_add_seg) {
1601 if (override < 0) {
1602 if (rm == 2 || rm == 3 || rm == 6)
1603 override = R_SS;
1604 else
1605 override = R_DS;
1606 }
1607 gen_op_addl_A0_seg(override);
1608 }
1609 }
1610
1611 opreg = OR_A0;
1612 disp = 0;
1613 *reg_ptr = opreg;
1614 *offset_ptr = disp;
1615 }
1616
1617 static void gen_nop_modrm(DisasContext *s, int modrm)
1618 {
1619 int mod, rm, base, code;
1620
1621 mod = (modrm >> 6) & 3;
1622 if (mod == 3)
1623 return;
1624 rm = modrm & 7;
1625
1626 if (s->aflag) {
1627
1628 base = rm;
1629
1630 if (base == 4) {
1631 code = ldub_code(s->pc++);
1632 base = (code & 7);
1633 }
1634
1635 switch (mod) {
1636 case 0:
1637 if (base == 5) {
1638 s->pc += 4;
1639 }
1640 break;
1641 case 1:
1642 s->pc++;
1643 break;
1644 default:
1645 case 2:
1646 s->pc += 4;
1647 break;
1648 }
1649 } else {
1650 switch (mod) {
1651 case 0:
1652 if (rm == 6) {
1653 s->pc += 2;
1654 }
1655 break;
1656 case 1:
1657 s->pc++;
1658 break;
1659 default:
1660 case 2:
1661 s->pc += 2;
1662 break;
1663 }
1664 }
1665 }
1666
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext *s)
1669 {
1670 int override, must_add_seg;
1671 must_add_seg = s->addseg;
1672 override = R_DS;
1673 if (s->override >= 0) {
1674 override = s->override;
1675 must_add_seg = 1;
1676 } else {
1677 override = R_DS;
1678 }
1679 if (must_add_seg) {
1680 #ifdef TARGET_X86_64
1681 if (CODE64(s)) {
1682 gen_op_addq_A0_seg(override);
1683 } else
1684 #endif
1685 {
1686 gen_op_addl_A0_seg(override);
1687 }
1688 }
1689 }
1690
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692 OR_TMP0 */
1693 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1694 {
1695 int mod, rm, opreg, disp;
1696
1697 mod = (modrm >> 6) & 3;
1698 rm = (modrm & 7) | REX_B(s);
1699 if (mod == 3) {
1700 if (is_store) {
1701 if (reg != OR_TMP0)
1702 gen_op_mov_TN_reg(ot, 0, reg);
1703 gen_op_mov_reg_T0(ot, rm);
1704 } else {
1705 gen_op_mov_TN_reg(ot, 0, rm);
1706 if (reg != OR_TMP0)
1707 gen_op_mov_reg_T0(ot, reg);
1708 }
1709 } else {
1710 gen_lea_modrm(s, modrm, &opreg, &disp);
1711 if (is_store) {
1712 if (reg != OR_TMP0)
1713 gen_op_mov_TN_reg(ot, 0, reg);
1714 gen_op_st_T0_A0(ot + s->mem_index);
1715 } else {
1716 gen_op_ld_T0_A0(ot + s->mem_index);
1717 if (reg != OR_TMP0)
1718 gen_op_mov_reg_T0(ot, reg);
1719 }
1720 }
1721 }
1722
1723 static inline uint32_t insn_get(DisasContext *s, int ot)
1724 {
1725 uint32_t ret;
1726
1727 switch(ot) {
1728 case OT_BYTE:
1729 ret = ldub_code(s->pc);
1730 s->pc++;
1731 break;
1732 case OT_WORD:
1733 ret = lduw_code(s->pc);
1734 s->pc += 2;
1735 break;
1736 default:
1737 case OT_LONG:
1738 ret = ldl_code(s->pc);
1739 s->pc += 4;
1740 break;
1741 }
1742 return ret;
1743 }
1744
1745 static inline int insn_const_size(unsigned int ot)
1746 {
1747 if (ot <= OT_LONG)
1748 return 1 << ot;
1749 else
1750 return 4;
1751 }
1752
1753 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1754 {
1755 TranslationBlock *tb;
1756 target_ulong pc;
1757
1758 pc = s->cs_base + eip;
1759 tb = s->tb;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num);
1765 gen_jmp_im(eip);
1766 tcg_gen_exit_tb((long)tb + tb_num);
1767 } else {
1768 /* jump to another page: currently not optimized */
1769 gen_jmp_im(eip);
1770 gen_eob(s);
1771 }
1772 }
1773
1774 static inline void gen_jcc(DisasContext *s, int b,
1775 target_ulong val, target_ulong next_eip)
1776 {
1777 TranslationBlock *tb;
1778 int inv, jcc_op;
1779 GenOpFunc1 *func;
1780 target_ulong tmp;
1781 int l1, l2;
1782
1783 inv = b & 1;
1784 jcc_op = (b >> 1) & 7;
1785
1786 if (s->jmp_opt) {
1787 switch(s->cc_op) {
1788 /* we optimize the cmp/jcc case */
1789 case CC_OP_SUBB:
1790 case CC_OP_SUBW:
1791 case CC_OP_SUBL:
1792 case CC_OP_SUBQ:
1793 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1794 break;
1795
1796 /* some jumps are easy to compute */
1797 case CC_OP_ADDB:
1798 case CC_OP_ADDW:
1799 case CC_OP_ADDL:
1800 case CC_OP_ADDQ:
1801
1802 case CC_OP_ADCB:
1803 case CC_OP_ADCW:
1804 case CC_OP_ADCL:
1805 case CC_OP_ADCQ:
1806
1807 case CC_OP_SBBB:
1808 case CC_OP_SBBW:
1809 case CC_OP_SBBL:
1810 case CC_OP_SBBQ:
1811
1812 case CC_OP_LOGICB:
1813 case CC_OP_LOGICW:
1814 case CC_OP_LOGICL:
1815 case CC_OP_LOGICQ:
1816
1817 case CC_OP_INCB:
1818 case CC_OP_INCW:
1819 case CC_OP_INCL:
1820 case CC_OP_INCQ:
1821
1822 case CC_OP_DECB:
1823 case CC_OP_DECW:
1824 case CC_OP_DECL:
1825 case CC_OP_DECQ:
1826
1827 case CC_OP_SHLB:
1828 case CC_OP_SHLW:
1829 case CC_OP_SHLL:
1830 case CC_OP_SHLQ:
1831
1832 case CC_OP_SARB:
1833 case CC_OP_SARW:
1834 case CC_OP_SARL:
1835 case CC_OP_SARQ:
1836 switch(jcc_op) {
1837 case JCC_Z:
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839 break;
1840 case JCC_S:
1841 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1842 break;
1843 default:
1844 func = NULL;
1845 break;
1846 }
1847 break;
1848 default:
1849 func = NULL;
1850 break;
1851 }
1852
1853 if (s->cc_op != CC_OP_DYNAMIC) {
1854 gen_op_set_cc_op(s->cc_op);
1855 s->cc_op = CC_OP_DYNAMIC;
1856 }
1857
1858 if (!func) {
1859 gen_setcc_slow[jcc_op]();
1860 func = gen_op_jnz_T0_label;
1861 }
1862
1863 if (inv) {
1864 tmp = val;
1865 val = next_eip;
1866 next_eip = tmp;
1867 }
1868 tb = s->tb;
1869
1870 l1 = gen_new_label();
1871 func(l1);
1872
1873 gen_goto_tb(s, 0, next_eip);
1874
1875 gen_set_label(l1);
1876 gen_goto_tb(s, 1, val);
1877
1878 s->is_jmp = 3;
1879 } else {
1880
1881 if (s->cc_op != CC_OP_DYNAMIC) {
1882 gen_op_set_cc_op(s->cc_op);
1883 s->cc_op = CC_OP_DYNAMIC;
1884 }
1885 gen_setcc_slow[jcc_op]();
1886 if (inv) {
1887 tmp = val;
1888 val = next_eip;
1889 next_eip = tmp;
1890 }
1891 l1 = gen_new_label();
1892 l2 = gen_new_label();
1893 gen_op_jnz_T0_label(l1);
1894 gen_jmp_im(next_eip);
1895 gen_op_jmp_label(l2);
1896 gen_set_label(l1);
1897 gen_jmp_im(val);
1898 gen_set_label(l2);
1899 gen_eob(s);
1900 }
1901 }
1902
1903 static void gen_setcc(DisasContext *s, int b)
1904 {
1905 int inv, jcc_op;
1906 GenOpFunc *func;
1907
1908 inv = b & 1;
1909 jcc_op = (b >> 1) & 7;
1910 switch(s->cc_op) {
1911 /* we optimize the cmp/jcc case */
1912 case CC_OP_SUBB:
1913 case CC_OP_SUBW:
1914 case CC_OP_SUBL:
1915 case CC_OP_SUBQ:
1916 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1917 if (!func)
1918 goto slow_jcc;
1919 break;
1920
1921 /* some jumps are easy to compute */
1922 case CC_OP_ADDB:
1923 case CC_OP_ADDW:
1924 case CC_OP_ADDL:
1925 case CC_OP_ADDQ:
1926
1927 case CC_OP_LOGICB:
1928 case CC_OP_LOGICW:
1929 case CC_OP_LOGICL:
1930 case CC_OP_LOGICQ:
1931
1932 case CC_OP_INCB:
1933 case CC_OP_INCW:
1934 case CC_OP_INCL:
1935 case CC_OP_INCQ:
1936
1937 case CC_OP_DECB:
1938 case CC_OP_DECW:
1939 case CC_OP_DECL:
1940 case CC_OP_DECQ:
1941
1942 case CC_OP_SHLB:
1943 case CC_OP_SHLW:
1944 case CC_OP_SHLL:
1945 case CC_OP_SHLQ:
1946 switch(jcc_op) {
1947 case JCC_Z:
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949 break;
1950 case JCC_S:
1951 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1952 break;
1953 default:
1954 goto slow_jcc;
1955 }
1956 break;
1957 default:
1958 slow_jcc:
1959 if (s->cc_op != CC_OP_DYNAMIC)
1960 gen_op_set_cc_op(s->cc_op);
1961 func = gen_setcc_slow[jcc_op];
1962 break;
1963 }
1964 func();
1965 if (inv) {
1966 gen_op_xor_T0_1();
1967 }
1968 }
1969
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973 {
1974 if (s->pe && !s->vm86) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s->cc_op != CC_OP_DYNAMIC)
1977 gen_op_set_cc_op(s->cc_op);
1978 gen_jmp_im(cur_eip);
1979 gen_op_movl_seg_T0(seg_reg);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1985 s->is_jmp = 3;
1986 } else {
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988 if (seg_reg == R_SS)
1989 s->is_jmp = 3;
1990 }
1991 }
1992
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1994
1995 static inline int
1996 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997 {
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000 if (s->cc_op != CC_OP_DYNAMIC)
2001 gen_op_set_cc_op(s->cc_op);
2002 SVM_movq_T1_im(s->pc - s->cs_base);
2003 gen_jmp_im(pc_start - s->cs_base);
2004 gen_op_geneflags();
2005 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006 s->cc_op = CC_OP_DYNAMIC;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2009 for now. */
2010 }
2011 #endif
2012 return 0;
2013 }
2014
2015 static inline int svm_is_rep(int prefixes)
2016 {
2017 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2018 }
2019
2020 static inline int
2021 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022 uint64_t type, uint64_t param)
2023 {
2024 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025 /* no SVM activated */
2026 return 0;
2027 switch(type) {
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030 if (s->cc_op != CC_OP_DYNAMIC) {
2031 gen_op_set_cc_op(s->cc_op);
2032 s->cc_op = CC_OP_DYNAMIC;
2033 }
2034 gen_jmp_im(pc_start - s->cs_base);
2035 SVM_movq_T1_im(param);
2036 gen_op_geneflags();
2037 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2040 return 0;
2041 case SVM_EXIT_MSR:
2042 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043 if (s->cc_op != CC_OP_DYNAMIC) {
2044 gen_op_set_cc_op(s->cc_op);
2045 s->cc_op = CC_OP_DYNAMIC;
2046 }
2047 gen_jmp_im(pc_start - s->cs_base);
2048 SVM_movq_T1_im(param);
2049 gen_op_geneflags();
2050 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2053 return 0;
2054 }
2055 break;
2056 default:
2057 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058 if (s->cc_op != CC_OP_DYNAMIC) {
2059 gen_op_set_cc_op(s->cc_op);
2060 s->cc_op = CC_OP_EFLAGS;
2061 }
2062 gen_jmp_im(pc_start - s->cs_base);
2063 SVM_movq_T1_im(param);
2064 gen_op_geneflags();
2065 gen_op_svm_vmexit(type >> 32, type);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2068 gen_eob(s);
2069 return 1;
2070 }
2071 }
2072 return 0;
2073 }
2074
2075 static inline int
2076 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2077 {
2078 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2079 }
2080
2081 static inline void gen_stack_update(DisasContext *s, int addend)
2082 {
2083 #ifdef TARGET_X86_64
2084 if (CODE64(s)) {
2085 gen_op_addq_ESP_im(addend);
2086 } else
2087 #endif
2088 if (s->ss32) {
2089 gen_op_addl_ESP_im(addend);
2090 } else {
2091 gen_op_addw_ESP_im(addend);
2092 }
2093 }
2094
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext *s)
2097 {
2098 #ifdef TARGET_X86_64
2099 if (CODE64(s)) {
2100 gen_op_movq_A0_reg(R_ESP);
2101 if (s->dflag) {
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2104 } else {
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2107 }
2108 gen_op_mov_reg_A0(2, R_ESP);
2109 } else
2110 #endif
2111 {
2112 gen_op_movl_A0_reg(R_ESP);
2113 if (!s->dflag)
2114 gen_op_addl_A0_im(-2);
2115 else
2116 gen_op_addl_A0_im(-4);
2117 if (s->ss32) {
2118 if (s->addseg) {
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS);
2121 }
2122 } else {
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS);
2126 }
2127 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128 if (s->ss32 && !s->addseg)
2129 gen_op_mov_reg_A0(1, R_ESP);
2130 else
2131 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2132 }
2133 }
2134
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext *s)
2138 {
2139 #ifdef TARGET_X86_64
2140 if (CODE64(s)) {
2141 gen_op_movq_A0_reg(R_ESP);
2142 if (s->dflag) {
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2145 } else {
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2148 }
2149 gen_op_mov_reg_A0(2, R_ESP);
2150 } else
2151 #endif
2152 {
2153 gen_op_movl_A0_reg(R_ESP);
2154 if (!s->dflag)
2155 gen_op_addl_A0_im(-2);
2156 else
2157 gen_op_addl_A0_im(-4);
2158 if (s->ss32) {
2159 if (s->addseg) {
2160 gen_op_addl_A0_seg(R_SS);
2161 }
2162 } else {
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS);
2165 }
2166 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2167
2168 if (s->ss32 && !s->addseg)
2169 gen_op_mov_reg_A0(1, R_ESP);
2170 else
2171 gen_stack_update(s, (-2) << s->dflag);
2172 }
2173 }
2174
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext *s)
2177 {
2178 #ifdef TARGET_X86_64
2179 if (CODE64(s)) {
2180 gen_op_movq_A0_reg(R_ESP);
2181 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2182 } else
2183 #endif
2184 {
2185 gen_op_movl_A0_reg(R_ESP);
2186 if (s->ss32) {
2187 if (s->addseg)
2188 gen_op_addl_A0_seg(R_SS);
2189 } else {
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS);
2192 }
2193 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2194 }
2195 }
2196
2197 static void gen_pop_update(DisasContext *s)
2198 {
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s) && s->dflag) {
2201 gen_stack_update(s, 8);
2202 } else
2203 #endif
2204 {
2205 gen_stack_update(s, 2 << s->dflag);
2206 }
2207 }
2208
2209 static void gen_stack_A0(DisasContext *s)
2210 {
2211 gen_op_movl_A0_reg(R_ESP);
2212 if (!s->ss32)
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2215 if (s->addseg)
2216 gen_op_addl_A0_seg(R_SS);
2217 }
2218
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext *s)
2221 {
2222 int i;
2223 gen_op_movl_A0_reg(R_ESP);
2224 gen_op_addl_A0_im(-16 << s->dflag);
2225 if (!s->ss32)
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2228 if (s->addseg)
2229 gen_op_addl_A0_seg(R_SS);
2230 for(i = 0;i < 8; i++) {
2231 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233 gen_op_addl_A0_im(2 << s->dflag);
2234 }
2235 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2236 }
2237
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext *s)
2240 {
2241 int i;
2242 gen_op_movl_A0_reg(R_ESP);
2243 if (!s->ss32)
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s->dflag);
2247 if (s->addseg)
2248 gen_op_addl_A0_seg(R_SS);
2249 for(i = 0;i < 8; i++) {
2250 /* ESP is not reloaded */
2251 if (i != 3) {
2252 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2254 }
2255 gen_op_addl_A0_im(2 << s->dflag);
2256 }
2257 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2258 }
2259
2260 static void gen_enter(DisasContext *s, int esp_addend, int level)
2261 {
2262 int ot, opsize;
2263
2264 level &= 0x1f;
2265 #ifdef TARGET_X86_64
2266 if (CODE64(s)) {
2267 ot = s->dflag ? OT_QUAD : OT_WORD;
2268 opsize = 1 << ot;
2269
2270 gen_op_movl_A0_reg(R_ESP);
2271 gen_op_addq_A0_im(-opsize);
2272 gen_op_movl_T1_A0();
2273
2274 /* push bp */
2275 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276 gen_op_st_T0_A0(ot + s->mem_index);
2277 if (level) {
2278 gen_op_enter64_level(level, (ot == OT_QUAD));
2279 }
2280 gen_op_mov_reg_T1(ot, R_EBP);
2281 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2282 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2283 } else
2284 #endif
2285 {
2286 ot = s->dflag + OT_WORD;
2287 opsize = 2 << s->dflag;
2288
2289 gen_op_movl_A0_reg(R_ESP);
2290 gen_op_addl_A0_im(-opsize);
2291 if (!s->ss32)
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2294 if (s->addseg)
2295 gen_op_addl_A0_seg(R_SS);
2296 /* push bp */
2297 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2298 gen_op_st_T0_A0(ot + s->mem_index);
2299 if (level) {
2300 gen_op_enter_level(level, s->dflag);
2301 }
2302 gen_op_mov_reg_T1(ot, R_EBP);
2303 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2305 }
2306 }
2307
2308 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2309 {
2310 if (s->cc_op != CC_OP_DYNAMIC)
2311 gen_op_set_cc_op(s->cc_op);
2312 gen_jmp_im(cur_eip);
2313 gen_op_raise_exception(trapno);
2314 s->is_jmp = 3;
2315 }
2316
2317 /* an interrupt is different from an exception because of the
2318 privilege checks */
2319 static void gen_interrupt(DisasContext *s, int intno,
2320 target_ulong cur_eip, target_ulong next_eip)
2321 {
2322 if (s->cc_op != CC_OP_DYNAMIC)
2323 gen_op_set_cc_op(s->cc_op);
2324 gen_jmp_im(cur_eip);
2325 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2326 s->is_jmp = 3;
2327 }
2328
2329 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2330 {
2331 if (s->cc_op != CC_OP_DYNAMIC)
2332 gen_op_set_cc_op(s->cc_op);
2333 gen_jmp_im(cur_eip);
2334 gen_op_debug();
2335 s->is_jmp = 3;
2336 }
2337
2338 /* generate a generic end of block. Trace exception is also generated
2339 if needed */
2340 static void gen_eob(DisasContext *s)
2341 {
2342 if (s->cc_op != CC_OP_DYNAMIC)
2343 gen_op_set_cc_op(s->cc_op);
2344 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2345 gen_op_reset_inhibit_irq();
2346 }
2347 if (s->singlestep_enabled) {
2348 gen_op_debug();
2349 } else if (s->tf) {
2350 gen_op_single_step();
2351 } else {
2352 tcg_gen_exit_tb(0);
2353 }
2354 s->is_jmp = 3;
2355 }
2356
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2360 {
2361 if (s->jmp_opt) {
2362 if (s->cc_op != CC_OP_DYNAMIC) {
2363 gen_op_set_cc_op(s->cc_op);
2364 s->cc_op = CC_OP_DYNAMIC;
2365 }
2366 gen_goto_tb(s, tb_num, eip);
2367 s->is_jmp = 3;
2368 } else {
2369 gen_jmp_im(eip);
2370 gen_eob(s);
2371 }
2372 }
2373
2374 static void gen_jmp(DisasContext *s, target_ulong eip)
2375 {
2376 gen_jmp_tb(s, eip, 0);
2377 }
2378
2379 static inline void gen_ldq_env_A0(int idx, int offset)
2380 {
2381 int mem_index = (idx >> 2) - 1;
2382 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2383 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2384 }
2385
2386 static inline void gen_stq_env_A0(int idx, int offset)
2387 {
2388 int mem_index = (idx >> 2) - 1;
2389 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2390 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2391 }
2392
2393 static inline void gen_ldo_env_A0(int idx, int offset)
2394 {
2395 int mem_index = (idx >> 2) - 1;
2396 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2397 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2398 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2399 tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2400 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2401 }
2402
2403 static inline void gen_sto_env_A0(int idx, int offset)
2404 {
2405 int mem_index = (idx >> 2) - 1;
2406 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2407 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2408 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2409 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2410 tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2411 }
2412
2413 static inline void gen_op_movo(int d_offset, int s_offset)
2414 {
2415 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2416 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2417 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2418 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2419 }
2420
2421 static inline void gen_op_movq(int d_offset, int s_offset)
2422 {
2423 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2424 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2425 }
2426
2427 static inline void gen_op_movl(int d_offset, int s_offset)
2428 {
2429 tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2430 tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2431 }
2432
2433 static inline void gen_op_movq_env_0(int d_offset)
2434 {
2435 tcg_gen_movi_i64(cpu_tmp1, 0);
2436 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2437 }
2438
2439 #define SSE_SPECIAL ((void *)1)
2440 #define SSE_DUMMY ((void *)2)
2441
2442 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2443 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2444 helper_ ## x ## ss, helper_ ## x ## sd, }
2445
2446 static void *sse_op_table1[256][4] = {
2447 /* 3DNow! extensions */
2448 [0x0e] = { SSE_DUMMY }, /* femms */
2449 [0x0f] = { SSE_DUMMY }, /* pf... */
2450 /* pure SSE operations */
2451 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2452 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2453 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2454 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2455 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2456 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2457 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2458 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2459
2460 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2461 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2462 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2463 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2464 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2465 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2466 [0x2e] = { helper_ucomiss, helper_ucomisd },
2467 [0x2f] = { helper_comiss, helper_comisd },
2468 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2469 [0x51] = SSE_FOP(sqrt),
2470 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2471 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2472 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2473 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2474 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2475 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2476 [0x58] = SSE_FOP(add),
2477 [0x59] = SSE_FOP(mul),
2478 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2479 helper_cvtss2sd, helper_cvtsd2ss },
2480 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2481 [0x5c] = SSE_FOP(sub),
2482 [0x5d] = SSE_FOP(min),
2483 [0x5e] = SSE_FOP(div),
2484 [0x5f] = SSE_FOP(max),
2485
2486 [0xc2] = SSE_FOP(cmpeq),
2487 [0xc6] = { helper_shufps, helper_shufpd },
2488
2489 /* MMX ops and their SSE extensions */
2490 [0x60] = MMX_OP2(punpcklbw),
2491 [0x61] = MMX_OP2(punpcklwd),
2492 [0x62] = MMX_OP2(punpckldq),
2493 [0x63] = MMX_OP2(packsswb),
2494 [0x64] = MMX_OP2(pcmpgtb),
2495 [0x65] = MMX_OP2(pcmpgtw),
2496 [0x66] = MMX_OP2(pcmpgtl),
2497 [0x67] = MMX_OP2(packuswb),
2498 [0x68] = MMX_OP2(punpckhbw),
2499 [0x69] = MMX_OP2(punpckhwd),
2500 [0x6a] = MMX_OP2(punpckhdq),
2501 [0x6b] = MMX_OP2(packssdw),
2502 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2503 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2504 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2505 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2506 [0x70] = { helper_pshufw_mmx,
2507 helper_pshufd_xmm,
2508 helper_pshufhw_xmm,
2509 helper_pshuflw_xmm },
2510 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2511 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2512 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2513 [0x74] = MMX_OP2(pcmpeqb),
2514 [0x75] = MMX_OP2(pcmpeqw),
2515 [0x76] = MMX_OP2(pcmpeql),
2516 [0x77] = { SSE_DUMMY }, /* emms */
2517 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2518 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2519 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2520 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2521 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2522 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2523 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2524 [0xd1] = MMX_OP2(psrlw),
2525 [0xd2] = MMX_OP2(psrld),
2526 [0xd3] = MMX_OP2(psrlq),
2527 [0xd4] = MMX_OP2(paddq),
2528 [0xd5] = MMX_OP2(pmullw),
2529 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2530 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2531 [0xd8] = MMX_OP2(psubusb),
2532 [0xd9] = MMX_OP2(psubusw),
2533 [0xda] = MMX_OP2(pminub),
2534 [0xdb] = MMX_OP2(pand),
2535 [0xdc] = MMX_OP2(paddusb),
2536 [0xdd] = MMX_OP2(paddusw),
2537 [0xde] = MMX_OP2(pmaxub),
2538 [0xdf] = MMX_OP2(pandn),
2539 [0xe0] = MMX_OP2(pavgb),
2540 [0xe1] = MMX_OP2(psraw),
2541 [0xe2] = MMX_OP2(psrad),
2542 [0xe3] = MMX_OP2(pavgw),
2543 [0xe4] = MMX_OP2(pmulhuw),
2544 [0xe5] = MMX_OP2(pmulhw),
2545 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2546 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2547 [0xe8] = MMX_OP2(psubsb),
2548 [0xe9] = MMX_OP2(psubsw),
2549 [0xea] = MMX_OP2(pminsw),
2550 [0xeb] = MMX_OP2(por),
2551 [0xec] = MMX_OP2(paddsb),
2552 [0xed] = MMX_OP2(paddsw),
2553 [0xee] = MMX_OP2(pmaxsw),
2554 [0xef] = MMX_OP2(pxor),
2555 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2556 [0xf1] = MMX_OP2(psllw),
2557 [0xf2] = MMX_OP2(pslld),
2558 [0xf3] = MMX_OP2(psllq),
2559 [0xf4] = MMX_OP2(pmuludq),
2560 [0xf5] = MMX_OP2(pmaddwd),
2561 [0xf6] = MMX_OP2(psadbw),
2562 [0xf7] = MMX_OP2(maskmov),
2563 [0xf8] = MMX_OP2(psubb),
2564 [0xf9] = MMX_OP2(psubw),
2565 [0xfa] = MMX_OP2(psubl),
2566 [0xfb] = MMX_OP2(psubq),
2567 [0xfc] = MMX_OP2(paddb),
2568 [0xfd] = MMX_OP2(paddw),
2569 [0xfe] = MMX_OP2(paddl),
2570 };
2571
2572 static void *sse_op_table2[3 * 8][2] = {
2573 [0 + 2] = MMX_OP2(psrlw),
2574 [0 + 4] = MMX_OP2(psraw),
2575 [0 + 6] = MMX_OP2(psllw),
2576 [8 + 2] = MMX_OP2(psrld),
2577 [8 + 4] = MMX_OP2(psrad),
2578 [8 + 6] = MMX_OP2(pslld),
2579 [16 + 2] = MMX_OP2(psrlq),
2580 [16 + 3] = { NULL, helper_psrldq_xmm },
2581 [16 + 6] = MMX_OP2(psllq),
2582 [16 + 7] = { NULL, helper_pslldq_xmm },
2583 };
2584
2585 static void *sse_op_table3[4 * 3] = {
2586 helper_cvtsi2ss,
2587 helper_cvtsi2sd,
2588 X86_64_ONLY(helper_cvtsq2ss),
2589 X86_64_ONLY(helper_cvtsq2sd),
2590
2591 helper_cvttss2si,
2592 helper_cvttsd2si,
2593 X86_64_ONLY(helper_cvttss2sq),
2594 X86_64_ONLY(helper_cvttsd2sq),
2595
2596 helper_cvtss2si,
2597 helper_cvtsd2si,
2598 X86_64_ONLY(helper_cvtss2sq),
2599 X86_64_ONLY(helper_cvtsd2sq),
2600 };
2601
2602 static void *sse_op_table4[8][4] = {
2603 SSE_FOP(cmpeq),
2604 SSE_FOP(cmplt),
2605 SSE_FOP(cmple),
2606 SSE_FOP(cmpunord),
2607 SSE_FOP(cmpneq),
2608 SSE_FOP(cmpnlt),
2609 SSE_FOP(cmpnle),
2610 SSE_FOP(cmpord),
2611 };
2612
2613 static void *sse_op_table5[256] = {
2614 [0x0c] = helper_pi2fw,
2615 [0x0d] = helper_pi2fd,
2616 [0x1c] = helper_pf2iw,
2617 [0x1d] = helper_pf2id,
2618 [0x8a] = helper_pfnacc,
2619 [0x8e] = helper_pfpnacc,
2620 [0x90] = helper_pfcmpge,
2621 [0x94] = helper_pfmin,
2622 [0x96] = helper_pfrcp,
2623 [0x97] = helper_pfrsqrt,
2624 [0x9a] = helper_pfsub,
2625 [0x9e] = helper_pfadd,
2626 [0xa0] = helper_pfcmpgt,
2627 [0xa4] = helper_pfmax,
2628 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2629 [0xa7] = helper_movq, /* pfrsqit1 */
2630 [0xaa] = helper_pfsubr,
2631 [0xae] = helper_pfacc,
2632 [0xb0] = helper_pfcmpeq,
2633 [0xb4] = helper_pfmul,
2634 [0xb6] = helper_movq, /* pfrcpit2 */
2635 [0xb7] = helper_pmulhrw_mmx,
2636 [0xbb] = helper_pswapd,
2637 [0xbf] = helper_pavgb_mmx /* pavgusb */
2638 };
2639
2640 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2641 {
2642 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2643 int modrm, mod, rm, reg, reg_addr, offset_addr;
2644 void *sse_op2;
2645
2646 b &= 0xff;
2647 if (s->prefix & PREFIX_DATA)
2648 b1 = 1;
2649 else if (s->prefix & PREFIX_REPZ)
2650 b1 = 2;
2651 else if (s->prefix & PREFIX_REPNZ)
2652 b1 = 3;
2653 else
2654 b1 = 0;
2655 sse_op2 = sse_op_table1[b][b1];
2656 if (!sse_op2)
2657 goto illegal_op;
2658 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2659 is_xmm = 1;
2660 } else {
2661 if (b1 == 0) {
2662 /* MMX case */
2663 is_xmm = 0;
2664 } else {
2665 is_xmm = 1;
2666 }
2667 }
2668 /* simple MMX/SSE operation */
2669 if (s->flags & HF_TS_MASK) {
2670 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2671 return;
2672 }
2673 if (s->flags & HF_EM_MASK) {
2674 illegal_op:
2675 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2676 return;
2677 }
2678 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2679 goto illegal_op;
2680 if (b == 0x0e) {
2681 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2682 goto illegal_op;
2683 /* femms */
2684 tcg_gen_helper_0_0(helper_emms);
2685 return;
2686 }
2687 if (b == 0x77) {
2688 /* emms */
2689 tcg_gen_helper_0_0(helper_emms);
2690 return;
2691 }
2692 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2693 the static cpu state) */
2694 if (!is_xmm) {
2695 tcg_gen_helper_0_0(helper_enter_mmx);
2696 }
2697
2698 modrm = ldub_code(s->pc++);
2699 reg = ((modrm >> 3) & 7);
2700 if (is_xmm)
2701 reg |= rex_r;
2702 mod = (modrm >> 6) & 3;
2703 if (sse_op2 == SSE_SPECIAL) {
2704 b |= (b1 << 8);
2705 switch(b) {
2706 case 0x0e7: /* movntq */
2707 if (mod == 3)
2708 goto illegal_op;
2709 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2710 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2711 break;
2712 case 0x1e7: /* movntdq */
2713 case 0x02b: /* movntps */
2714 case 0x12b: /* movntps */
2715 case 0x3f0: /* lddqu */
2716 if (mod == 3)
2717 goto illegal_op;
2718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2719 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2720 break;
2721 case 0x6e: /* movd mm, ea */
2722 #ifdef TARGET_X86_64
2723 if (s->dflag == 2) {
2724 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2725 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2726 } else
2727 #endif
2728 {
2729 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2730 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2731 offsetof(CPUX86State,fpregs[reg].mmx));
2732 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2733 }
2734 break;
2735 case 0x16e: /* movd xmm, ea */
2736 #ifdef TARGET_X86_64
2737 if (s->dflag == 2) {
2738 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2739 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2740 offsetof(CPUX86State,xmm_regs[reg]));
2741 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2742 } else
2743 #endif
2744 {
2745 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2746 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2747 offsetof(CPUX86State,xmm_regs[reg]));
2748 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2749 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2750 }
2751 break;
2752 case 0x6f: /* movq mm, ea */
2753 if (mod != 3) {
2754 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2755 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2756 } else {
2757 rm = (modrm & 7);
2758 tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2759 offsetof(CPUX86State,fpregs[rm].mmx));
2760 tcg_gen_st_i64(cpu_tmp1, cpu_env,
2761 offsetof(CPUX86State,fpregs[reg].mmx));
2762 }
2763 break;
2764 case 0x010: /* movups */
2765 case 0x110: /* movupd */
2766 case 0x028: /* movaps */
2767 case 0x128: /* movapd */
2768 case 0x16f: /* movdqa xmm, ea */
2769 case 0x26f: /* movdqu xmm, ea */
2770 if (mod != 3) {
2771 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2772 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2773 } else {
2774 rm = (modrm & 7) | REX_B(s);
2775 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2776 offsetof(CPUX86State,xmm_regs[rm]));
2777 }
2778 break;
2779 case 0x210: /* movss xmm, ea */
2780 if (mod != 3) {
2781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2783 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2784 gen_op_movl_T0_0();
2785 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2786 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2787 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2788 } else {
2789 rm = (modrm & 7) | REX_B(s);
2790 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2791 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2792 }
2793 break;
2794 case 0x310: /* movsd xmm, ea */
2795 if (mod != 3) {
2796 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2797 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2798 gen_op_movl_T0_0();
2799 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2800 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2801 } else {
2802 rm = (modrm & 7) | REX_B(s);
2803 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2804 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2805 }
2806 break;
2807 case 0x012: /* movlps */
2808 case 0x112: /* movlpd */
2809 if (mod != 3) {
2810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2811 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2812 } else {
2813 /* movhlps */
2814 rm = (modrm & 7) | REX_B(s);
2815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2817 }
2818 break;
2819 case 0x212: /* movsldup */
2820 if (mod != 3) {
2821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2822 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2823 } else {
2824 rm = (modrm & 7) | REX_B(s);
2825 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2826 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2827 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2828 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2829 }
2830 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2831 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2834 break;
2835 case 0x312: /* movddup */
2836 if (mod != 3) {
2837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2838 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2839 } else {
2840 rm = (modrm & 7) | REX_B(s);
2841 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2842 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2843 }
2844 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2845 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2846 break;
2847 case 0x016: /* movhps */
2848 case 0x116: /* movhpd */
2849 if (mod != 3) {
2850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2851 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2852 } else {
2853 /* movlhps */
2854 rm = (modrm & 7) | REX_B(s);
2855 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2856 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2857 }
2858 break;
2859 case 0x216: /* movshdup */
2860 if (mod != 3) {
2861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2862 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2863 } else {
2864 rm = (modrm & 7) | REX_B(s);
2865 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2866 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2867 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2868 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2869 }
2870 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2871 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2872 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2873 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2874 break;
2875 case 0x7e: /* movd ea, mm */
2876 #ifdef TARGET_X86_64
2877 if (s->dflag == 2) {
2878 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2879 offsetof(CPUX86State,fpregs[reg].mmx));
2880 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2881 } else
2882 #endif
2883 {
2884 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2885 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2886 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2887 }
2888 break;
2889 case 0x17e: /* movd ea, xmm */
2890 #ifdef TARGET_X86_64
2891 if (s->dflag == 2) {
2892 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2893 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2894 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2895 } else
2896 #endif
2897 {
2898 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2899 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2900 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2901 }
2902 break;
2903 case 0x27e: /* movq xmm, ea */
2904 if (mod != 3) {
2905 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2906 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2907 } else {
2908 rm = (modrm & 7) | REX_B(s);
2909 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2910 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2911 }
2912 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2913 break;
2914 case 0x7f: /* movq ea, mm */
2915 if (mod != 3) {
2916 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2917 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2918 } else {
2919 rm = (modrm & 7);
2920 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2921 offsetof(CPUX86State,fpregs[reg].mmx));
2922 }
2923 break;
2924 case 0x011: /* movups */
2925 case 0x111: /* movupd */
2926 case 0x029: /* movaps */
2927 case 0x129: /* movapd */
2928 case 0x17f: /* movdqa ea, xmm */
2929 case 0x27f: /* movdqu ea, xmm */
2930 if (mod != 3) {
2931 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2932 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2933 } else {
2934 rm = (modrm & 7) | REX_B(s);
2935 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2936 offsetof(CPUX86State,xmm_regs[reg]));
2937 }
2938 break;
2939 case 0x211: /* movss ea, xmm */
2940 if (mod != 3) {
2941 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2942 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2943 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2944 } else {
2945 rm = (modrm & 7) | REX_B(s);
2946 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2947 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2948 }
2949 break;
2950 case 0x311: /* movsd ea, xmm */
2951 if (mod != 3) {
2952 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2953 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2954 } else {
2955 rm = (modrm & 7) | REX_B(s);
2956 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2957 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2958 }
2959 break;
2960 case 0x013: /* movlps */
2961 case 0x113: /* movlpd */
2962 if (mod != 3) {
2963 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2964 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2965 } else {
2966 goto illegal_op;
2967 }
2968 break;
2969 case 0x017: /* movhps */
2970 case 0x117: /* movhpd */
2971 if (mod != 3) {
2972 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2973 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2974 } else {
2975 goto illegal_op;
2976 }
2977 break;
2978 case 0x71: /* shift mm, im */
2979 case 0x72:
2980 case 0x73:
2981 case 0x171: /* shift xmm, im */
2982 case 0x172:
2983 case 0x173:
2984 val = ldub_code(s->pc++);
2985 if (is_xmm) {
2986 gen_op_movl_T0_im(val);
2987 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2988 gen_op_movl_T0_0();
2989 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2990 op1_offset = offsetof(CPUX86State,xmm_t0);
2991 } else {
2992 gen_op_movl_T0_im(val);
2993 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2994 gen_op_movl_T0_0();
2995 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2996 op1_offset = offsetof(CPUX86State,mmx_t0);
2997 }
2998 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2999 if (!sse_op2)
3000 goto illegal_op;
3001 if (is_xmm) {
3002 rm = (modrm & 7) | REX_B(s);
3003 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3004 } else {
3005 rm = (modrm & 7);
3006 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3007 }
3008 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3009 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3010 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3011 break;
3012 case 0x050: /* movmskps */
3013 rm = (modrm & 7) | REX_B(s);
3014 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3015 offsetof(CPUX86State,xmm_regs[rm]));
3016 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3017 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3018 gen_op_mov_reg_T0(OT_LONG, reg);
3019 break;
3020 case 0x150: /* movmskpd */
3021 rm = (modrm & 7) | REX_B(s);
3022 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3023 offsetof(CPUX86State,xmm_regs[rm]));
3024 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3025 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3026 gen_op_mov_reg_T0(OT_LONG, reg);
3027 break;
3028 case 0x02a: /* cvtpi2ps */
3029 case 0x12a: /* cvtpi2pd */
3030 tcg_gen_helper_0_0(helper_enter_mmx);
3031 if (mod != 3) {
3032 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3033 op2_offset = offsetof(CPUX86State,mmx_t0);
3034 gen_ldq_env_A0(s->mem_index, op2_offset);
3035 } else {
3036 rm = (modrm & 7);
3037 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3038 }
3039 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3040 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3041 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3042 switch(b >> 8) {
3043 case 0x0:
3044 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3045 break;
3046 default:
3047 case 0x1:
3048 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3049 break;
3050 }
3051 break;
3052 case 0x22a: /* cvtsi2ss */
3053 case 0x32a: /* cvtsi2sd */
3054 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3055 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3056 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3057 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3058 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3059 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3060 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3061 break;
3062 case 0x02c: /* cvttps2pi */
3063 case 0x12c: /* cvttpd2pi */
3064 case 0x02d: /* cvtps2pi */
3065 case 0x12d: /* cvtpd2pi */
3066 tcg_gen_helper_0_0(helper_enter_mmx);
3067 if (mod != 3) {
3068 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3069 op2_offset = offsetof(CPUX86State,xmm_t0);
3070 gen_ldo_env_A0(s->mem_index, op2_offset);
3071 } else {
3072 rm = (modrm & 7) | REX_B(s);
3073 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3074 }
3075 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3076 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3077 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3078 switch(b) {
3079 case 0x02c:
3080 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3081 break;
3082 case 0x12c:
3083 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3084 break;
3085 case 0x02d:
3086 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3087 break;
3088 case 0x12d:
3089 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3090 break;
3091 }
3092 break;
3093 case 0x22c: /* cvttss2si */
3094 case 0x32c: /* cvttsd2si */
3095 case 0x22d: /* cvtss2si */
3096 case 0x32d: /* cvtsd2si */
3097 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3098 if (mod != 3) {
3099 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3100 if ((b >> 8) & 1) {
3101 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3102 } else {
3103 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3104 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3105 }
3106 op2_offset = offsetof(CPUX86State,xmm_t0);
3107 } else {
3108 rm = (modrm & 7) | REX_B(s);
3109 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3110 }
3111 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3112 (b & 1) * 4];
3113 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3114 if (ot == OT_LONG) {
3115 tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3116 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3117 } else {
3118 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3119 }
3120 gen_op_mov_reg_T0(ot, reg);
3121 break;
3122 case 0xc4: /* pinsrw */
3123 case 0x1c4:
3124 s->rip_offset = 1;
3125 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3126 val = ldub_code(s->pc++);
3127 if (b1) {
3128 val &= 7;
3129 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3130 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3131 } else {
3132 val &= 3;
3133 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3134 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3135 }
3136 break;
3137 case 0xc5: /* pextrw */
3138 case 0x1c5:
3139 if (mod != 3)
3140 goto illegal_op;
3141 val = ldub_code(s->pc++);
3142 if (b1) {
3143 val &= 7;
3144 rm = (modrm & 7) | REX_B(s);
3145 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3146 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3147 } else {
3148 val &= 3;
3149 rm = (modrm & 7);
3150 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3151 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3152 }
3153 reg = ((modrm >> 3) & 7) | rex_r;
3154 gen_op_mov_reg_T0(OT_LONG, reg);
3155 break;
3156 case 0x1d6: /* movq ea, xmm */
3157 if (mod != 3) {
3158 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3159 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3160 } else {
3161 rm = (modrm & 7) | REX_B(s);
3162 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3163 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3164 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3165 }
3166 break;
3167 case 0x2d6: /* movq2dq */
3168 tcg_gen_helper_0_0(helper_enter_mmx);
3169 rm = (modrm & 7);
3170 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3171 offsetof(CPUX86State,fpregs[rm].mmx));
3172 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3173 break;
3174 case 0x3d6: /* movdq2q */
3175 tcg_gen_helper_0_0(helper_enter_mmx);
3176 rm = (modrm & 7) | REX_B(s);
3177 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3178 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3179 break;
3180 case 0xd7: /* pmovmskb */
3181 case 0x1d7:
3182 if (mod != 3)
3183 goto illegal_op;
3184 if (b1) {
3185 rm = (modrm & 7) | REX_B(s);
3186 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3187 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3188 } else {
3189 rm = (modrm & 7);
3190 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3191 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3192 }
3193 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3194 reg = ((modrm >> 3) & 7) | rex_r;
3195 gen_op_mov_reg_T0(OT_LONG, reg);
3196 break;
3197 default:
3198 goto illegal_op;
3199 }
3200 } else {
3201 /* generic MMX or SSE operation */
3202 switch(b) {
3203 case 0xf7:
3204 /* maskmov : we must prepare A0 */
3205 if (mod != 3)
3206 goto illegal_op;
3207 #ifdef TARGET_X86_64
3208 if (s->aflag == 2) {
3209 gen_op_movq_A0_reg(R_EDI);
3210 } else
3211 #endif
3212 {
3213 gen_op_movl_A0_reg(R_EDI);
3214 if (s->aflag == 0)
3215 gen_op_andl_A0_ffff();
3216 }
3217 gen_add_A0_ds_seg(s);
3218 break;
3219 case 0x70: /* pshufx insn */
3220 case 0xc6: /* pshufx insn */
3221 case 0xc2: /* compare insns */
3222 s->rip_offset = 1;
3223 break;
3224 default:
3225 break;
3226 }
3227 if (is_xmm) {
3228 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3229 if (mod != 3) {
3230 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3231 op2_offset = offsetof(CPUX86State,xmm_t0);
3232 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3233 b == 0xc2)) {
3234 /* specific case for SSE single instructions */
3235 if (b1 == 2) {
3236 /* 32 bit access */
3237 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3238 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3239 } else {
3240 /* 64 bit access */
3241 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3242 }
3243 } else {
3244 gen_ldo_env_A0(s->mem_index, op2_offset);
3245 }
3246 } else {
3247 rm = (modrm & 7) | REX_B(s);
3248 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3249 }
3250 } else {
3251 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3252 if (mod != 3) {
3253 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3254 op2_offset = offsetof(CPUX86State,mmx_t0);
3255 gen_ldq_env_A0(s->mem_index, op2_offset);
3256 } else {
3257 rm = (modrm & 7);
3258 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3259 }
3260 }
3261 switch(b) {
3262 case 0x0f: /* 3DNow! data insns */
3263 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3264 goto illegal_op;
3265 val = ldub_code(s->pc++);
3266 sse_op2 = sse_op_table5[val];
3267 if (!sse_op2)
3268 goto illegal_op;
3269 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3270 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3271 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3272 break;
3273 case 0x70: /* pshufx insn */
3274 case 0xc6: /* pshufx insn */
3275 val = ldub_code(s->pc++);
3276 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3277 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3278 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3279 break;
3280 case 0xc2:
3281 /* compare insns */
3282 val = ldub_code(s->pc++);
3283 if (val >= 8)
3284 goto illegal_op;
3285 sse_op2 = sse_op_table4[val][b1];
3286 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3287 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3288 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3289 break;
3290 default:
3291 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3292 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3293 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3294 break;
3295 }
3296 if (b == 0x2e || b == 0x2f) {
3297 /* just to keep the EFLAGS optimization correct */
3298 gen_op_com_dummy();
3299 s->cc_op = CC_OP_EFLAGS;
3300 }
3301 }
3302 }
3303
3304 /* convert one instruction. s->is_jmp is set if the translation must
3305 be stopped. Return the next pc value */
3306 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3307 {
3308 int b, prefixes, aflag, dflag;
3309 int shift, ot;
3310 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3311 target_ulong next_eip, tval;
3312 int rex_w, rex_r;
3313
3314 s->pc = pc_start;
3315 prefixes = 0;
3316 aflag = s->code32;
3317 dflag = s->code32;
3318 s->override = -1;
3319 rex_w = -1;
3320 rex_r = 0;
3321 #ifdef TARGET_X86_64
3322 s->rex_x = 0;
3323 s->rex_b = 0;
3324 x86_64_hregs = 0;
3325 #endif
3326 s->rip_offset = 0; /* for relative ip address */
3327 next_byte:
3328 b = ldub_code(s->pc);
3329 s->pc++;
3330 /* check prefixes */
3331 #ifdef TARGET_X86_64
3332 if (CODE64(s)) {
3333 switch (b) {
3334 case 0xf3:
3335 prefixes |= PREFIX_REPZ;
3336 goto next_byte;
3337 case 0xf2:
3338 prefixes |= PREFIX_REPNZ;
3339 goto next_byte;
3340 case 0xf0:
3341 prefixes |= PREFIX_LOCK;
3342 goto next_byte;
3343 case 0x2e:
3344 s->override = R_CS;
3345 goto next_byte;
3346 case 0x36:
3347 s->override = R_SS;
3348 goto next_byte;
3349 case 0x3e:
3350 s->override = R_DS;
3351 goto next_byte;
3352 case 0x26:
3353 s->override = R_ES;
3354 goto next_byte;
3355 case 0x64:
3356 s->override = R_FS;
3357 goto next_byte;
3358 case 0x65:
3359 s->override = R_GS;
3360 goto next_byte;
3361 case 0x66:
3362 prefixes |= PREFIX_DATA;
3363 goto next_byte;
3364 case 0x67:
3365 prefixes |= PREFIX_ADR;
3366 goto next_byte;
3367 case 0x40 ... 0x4f:
3368 /* REX prefix */
3369 rex_w = (b >> 3) & 1;
3370 rex_r = (b & 0x4) << 1;
3371 s->rex_x = (b & 0x2) << 2;
3372 REX_B(s) = (b & 0x1) << 3;
3373 x86_64_hregs = 1; /* select uniform byte register addressing */
3374 goto next_byte;
3375 }
3376 if (rex_w == 1) {
3377 /* 0x66 is ignored if rex.w is set */
3378 dflag = 2;
3379 } else {
3380 if (prefixes & PREFIX_DATA)
3381 dflag ^= 1;
3382 }
3383 if (!(prefixes & PREFIX_ADR))
3384 aflag = 2;
3385 } else
3386 #endif
3387 {
3388 switch (b) {
3389 case 0xf3:
3390 prefixes |= PREFIX_REPZ;
3391 goto next_byte;
3392 case 0xf2:
3393 prefixes |= PREFIX_REPNZ;
3394 goto next_byte;
3395 case 0xf0:
3396 prefixes |= PREFIX_LOCK;
3397 goto next_byte;
3398 case 0x2e:
3399 s->override = R_CS;
3400 goto next_byte;
3401 case 0x36:
3402 s->override = R_SS;
3403 goto next_byte;
3404 case 0x3e:
3405 s->override = R_DS;
3406 goto next_byte;
3407 case 0x26:
3408 s->override = R_ES;
3409 goto next_byte;
3410 case 0x64:
3411 s->override = R_FS;
3412 goto next_byte;
3413 case 0x65:
3414 s->override = R_GS;
3415 goto next_byte;
3416 case 0x66:
3417 prefixes |= PREFIX_DATA;
3418 goto next_byte;
3419 case 0x67:
3420 prefixes |= PREFIX_ADR;
3421 goto next_byte;
3422 }
3423 if (prefixes & PREFIX_DATA)
3424 dflag ^= 1;
3425 if (prefixes & PREFIX_ADR)
3426 aflag ^= 1;
3427 }
3428
3429 s->prefix = prefixes;
3430 s->aflag = aflag;
3431 s->dflag = dflag;
3432
3433 /* lock generation */
3434 if (prefixes & PREFIX_LOCK)
3435 gen_op_lock();
3436
3437 /* now check op code */
3438 reswitch:
3439 switch(b) {
3440 case 0x0f:
3441 /**************************/
3442 /* extended op code */
3443 b = ldub_code(s->pc++) | 0x100;
3444 goto reswitch;
3445
3446 /**************************/
3447 /* arith & logic */
3448 case 0x00 ... 0x05:
3449 case 0x08 ... 0x0d:
3450 case 0x10 ... 0x15:
3451 case 0x18 ... 0x1d:
3452 case 0x20 ... 0x25:
3453 case 0x28 ... 0x2d:
3454 case 0x30 ... 0x35:
3455 case 0x38 ... 0x3d:
3456 {
3457 int op, f, val;
3458 op = (b >> 3) & 7;
3459 f = (b >> 1) & 3;
3460
3461 if ((b & 1) == 0)
3462 ot = OT_BYTE;
3463 else
3464 ot = dflag + OT_WORD;
3465
3466 switch(f) {
3467 case 0: /* OP Ev, Gv */
3468 modrm = ldub_code(s->pc++);
3469 reg = ((modrm >> 3) & 7) | rex_r;
3470 mod = (modrm >> 6) & 3;
3471 rm = (modrm & 7) | REX_B(s);
3472 if (mod != 3) {
3473 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3474 opreg = OR_TMP0;
3475 } else if (op == OP_XORL && rm == reg) {
3476 xor_zero:
3477 /* xor reg, reg optimisation */
3478 gen_op_movl_T0_0();
3479 s->cc_op = CC_OP_LOGICB + ot;
3480 gen_op_mov_reg_T0(ot, reg);
3481 gen_op_update1_cc();
3482 break;
3483 } else {
3484 opreg = rm;
3485 }
3486 gen_op_mov_TN_reg(ot, 1, reg);
3487 gen_op(s, op, ot, opreg);
3488 break;
3489 case 1: /* OP Gv, Ev */
3490 modrm = ldub_code(s->pc++);
3491 mod = (modrm >> 6) & 3;
3492 reg = ((modrm >> 3) & 7) | rex_r;
3493 rm = (modrm & 7) | REX_B(s);
3494 if (mod != 3) {
3495 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3496 gen_op_ld_T1_A0(ot + s->mem_index);
3497 } else if (op == OP_XORL && rm == reg) {
3498 goto xor_zero;
3499 } else {
3500 gen_op_mov_TN_reg(ot, 1, rm);
3501 }
3502 gen_op(s, op, ot, reg);
3503 break;
3504 case 2: /* OP A, Iv */
3505 val = insn_get(s, ot);
3506 gen_op_movl_T1_im(val);
3507 gen_op(s, op, ot, OR_EAX);
3508 break;
3509 }
3510 }
3511 break;
3512
3513 case 0x80: /* GRP1 */
3514 case 0x81:
3515 case 0x82:
3516 case 0x83:
3517 {
3518 int val;
3519
3520 if ((b & 1) == 0)
3521 ot = OT_BYTE;
3522 else
3523 ot = dflag + OT_WORD;
3524
3525 modrm = ldub_code(s->pc++);
3526 mod = (modrm >> 6) & 3;
3527 rm = (modrm & 7) | REX_B(s);
3528 op = (modrm >> 3) & 7;
3529
3530 if (mod != 3) {
3531 if (b == 0x83)
3532 s->rip_offset = 1;
3533 else
3534 s->rip_offset = insn_const_size(ot);
3535 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3536 opreg = OR_TMP0;
3537 } else {
3538 opreg = rm;
3539 }
3540
3541 switch(b) {
3542 default:
3543 case 0x80:
3544 case 0x81:
3545 case 0x82:
3546 val = insn_get(s, ot);
3547 break;
3548 case 0x83:
3549 val = (int8_t)insn_get(s, OT_BYTE);
3550 break;
3551 }
3552 gen_op_movl_T1_im(val);
3553 gen_op(s, op, ot, opreg);
3554 }
3555 break;
3556
3557 /**************************/
3558 /* inc, dec, and other misc arith */
3559 case 0x40 ... 0x47: /* inc Gv */
3560 ot = dflag ? OT_LONG : OT_WORD;
3561 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3562 break;
3563 case 0x48 ... 0x4f: /* dec Gv */
3564 ot = dflag ? OT_LONG : OT_WORD;
3565 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3566 break;
3567 case 0xf6: /* GRP3 */
3568 case 0xf7:
3569 if ((b & 1) == 0)
3570 ot = OT_BYTE;
3571 else
3572 ot = dflag + OT_WORD;
3573
3574 modrm = ldub_code(s->pc++);
3575 mod = (modrm >> 6) & 3;
3576 rm = (modrm & 7) | REX_B(s);
3577 op = (modrm >> 3) & 7;
3578 if (mod != 3) {
3579 if (op == 0)
3580 s->rip_offset = insn_const_size(ot);
3581 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3582 gen_op_ld_T0_A0(ot + s->mem_index);
3583 } else {
3584 gen_op_mov_TN_reg(ot, 0, rm);
3585 }
3586
3587 switch(op) {
3588 case 0: /* test */
3589 val = insn_get(s, ot);
3590 gen_op_movl_T1_im(val);
3591 gen_op_testl_T0_T1_cc();
3592 s->cc_op = CC_OP_LOGICB + ot;
3593 break;
3594 case 2: /* not */
3595 gen_op_notl_T0();
3596 if (mod != 3) {
3597 gen_op_st_T0_A0(ot + s->mem_index);
3598 } else {
3599 gen_op_mov_reg_T0(ot, rm);
3600 }
3601 break;
3602 case 3: /* neg */
3603 gen_op_negl_T0();
3604 if (mod != 3) {
3605 gen_op_st_T0_A0(ot + s->mem_index);
3606 } else {
3607 gen_op_mov_reg_T0(ot, rm);
3608 }
3609 gen_op_update_neg_cc();
3610 s->cc_op = CC_OP_SUBB + ot;
3611 break;
3612 case 4: /* mul */
3613 switch(ot) {
3614 case OT_BYTE:
3615 gen_op_mulb_AL_T0();
3616 s->cc_op = CC_OP_MULB;
3617 break;
3618 case OT_WORD:
3619 gen_op_mulw_AX_T0();
3620 s->cc_op = CC_OP_MULW;
3621 break;
3622 default:
3623 case OT_LONG:
3624 gen_op_mull_EAX_T0();
3625 s->cc_op = CC_OP_MULL;
3626 break;
3627 #ifdef TARGET_X86_64
3628 case OT_QUAD:
3629 gen_op_mulq_EAX_T0();
3630 s->cc_op = CC_OP_MULQ;
3631 break;
3632 #endif
3633 }
3634 break;
3635 case 5: /* imul */
3636 switch(ot) {
3637 case OT_BYTE:
3638 gen_op_imulb_AL_T0();
3639 s->cc_op = CC_OP_MULB;
3640 break;
3641 case OT_WORD:
3642 gen_op_imulw_AX_T0();
3643 s->cc_op = CC_OP_MULW;
3644 break;
3645 default:
3646 case OT_LONG:
3647 gen_op_imull_EAX_T0();
3648 s->cc_op = CC_OP_MULL;
3649 break;
3650 #ifdef TARGET_X86_64
3651 case OT_QUAD:
3652 gen_op_imulq_EAX_T0();
3653 s->cc_op = CC_OP_MULQ;
3654 break;
3655 #endif
3656 }
3657 break;
3658 case 6: /* div */
3659 switch(ot) {
3660 case OT_BYTE:
3661 gen_jmp_im(pc_start - s->cs_base);
3662 gen_op_divb_AL_T0();
3663 break;
3664 case OT_WORD:
3665 gen_jmp_im(pc_start - s->cs_base);
3666 gen_op_divw_AX_T0();
3667 break;
3668 default:
3669 case OT_LONG:
3670 gen_jmp_im(pc_start - s->cs_base);
3671 #ifdef MACRO_TEST
3672 /* XXX: this is just a test */
3673 tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3674 #else
3675 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3676 #endif
3677 break;
3678 #ifdef TARGET_X86_64
3679 case OT_QUAD:
3680 gen_jmp_im(pc_start - s->cs_base);
3681 gen_op_divq_EAX_T0();
3682 break;
3683 #endif
3684 }
3685 break;
3686 case 7: /* idiv */
3687 switch(ot) {
3688 case OT_BYTE:
3689 gen_jmp_im(pc_start - s->cs_base);
3690 gen_op_idivb_AL_T0();
3691 break;
3692 case OT_WORD:
3693 gen_jmp_im(pc_start - s->cs_base);
3694 gen_op_idivw_AX_T0();
3695 break;
3696 default:
3697 case OT_LONG:
3698 gen_jmp_im(pc_start - s->cs_base);
3699 tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3700 break;
3701 #ifdef TARGET_X86_64
3702 case OT_QUAD:
3703 gen_jmp_im(pc_start - s->cs_base);
3704 gen_op_idivq_EAX_T0();
3705 break;
3706 #endif
3707 }
3708 break;
3709 default:
3710 goto illegal_op;
3711 }
3712 break;
3713
3714 case 0xfe: /* GRP4 */
3715 case 0xff: /* GRP5 */
3716 if ((b & 1) == 0)
3717 ot = OT_BYTE;
3718 else
3719 ot = dflag + OT_WORD;
3720
3721 modrm = ldub_code(s->pc++);
3722 mod = (modrm >> 6) & 3;
3723 rm = (modrm & 7) | REX_B(s);
3724 op = (modrm >> 3) & 7;
3725 if (op >= 2 && b == 0xfe) {
3726 goto illegal_op;
3727 }
3728 if (CODE64(s)) {
3729 if (op == 2 || op == 4) {
3730 /* operand size for jumps is 64 bit */
3731 ot = OT_QUAD;
3732 } else if (op == 3 || op == 5) {
3733 /* for call calls, the operand is 16 or 32 bit, even
3734 in long mode */
3735 ot = dflag ? OT_LONG : OT_WORD;
3736 } else if (op == 6) {
3737 /* default push size is 64 bit */
3738 ot = dflag ? OT_QUAD : OT_WORD;
3739 }
3740 }
3741 if (mod != 3) {
3742 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3743 if (op >= 2 && op != 3 && op != 5)
3744 gen_op_ld_T0_A0(ot + s->mem_index);
3745 } else {
3746 gen_op_mov_TN_reg(ot, 0, rm);
3747 }
3748
3749 switch(op) {
3750 case 0: /* inc Ev */
3751 if (mod != 3)
3752 opreg = OR_TMP0;
3753 else
3754 opreg = rm;
3755 gen_inc(s, ot, opreg, 1);
3756 break;
3757 case 1: /* dec Ev */
3758 if (mod != 3)
3759 opreg = OR_TMP0;
3760 else
3761 opreg = rm;
3762 gen_inc(s, ot, opreg, -1);
3763 break;
3764 case 2: /* call Ev */
3765 /* XXX: optimize if memory (no 'and' is necessary) */
3766 if (s->dflag == 0)
3767 gen_op_andl_T0_ffff();
3768 next_eip = s->pc - s->cs_base;
3769 gen_movtl_T1_im(next_eip);
3770 gen_push_T1(s);
3771 gen_op_jmp_T0();
3772 gen_eob(s);
3773 break;
3774 case 3: /* lcall Ev */
3775 gen_op_ld_T1_A0(ot + s->mem_index);
3776 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3777 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3778 do_lcall:
3779 if (s->pe && !s->vm86) {
3780 if (s->cc_op != CC_OP_DYNAMIC)
3781 gen_op_set_cc_op(s->cc_op);
3782 gen_jmp_im(pc_start - s->cs_base);
3783 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3784 } else {
3785 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3786 }
3787 gen_eob(s);
3788 break;
3789 case 4: /* jmp Ev */
3790 if (s->dflag == 0)
3791 gen_op_andl_T0_ffff();
3792 gen_op_jmp_T0();
3793 gen_eob(s);
3794 break;
3795 case 5: /* ljmp Ev */
3796 gen_op_ld_T1_A0(ot + s->mem_index);
3797 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3798 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3799 do_ljmp:
3800 if (s->pe && !s->vm86) {
3801 if (s->cc_op != CC_OP_DYNAMIC)
3802 gen_op_set_cc_op(s->cc_op);
3803 gen_jmp_im(pc_start - s->cs_base);
3804 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3805 } else {
3806 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3807 gen_op_movl_T0_T1();
3808 gen_op_jmp_T0();
3809 }
3810 gen_eob(s);
3811 break;
3812 case 6: /* push Ev */
3813 gen_push_T0(s);
3814 break;
3815 default:
3816 goto illegal_op;
3817 }
3818 break;
3819
3820 case 0x84: /* test Ev, Gv */
3821 case 0x85:
3822 if ((b & 1) == 0)
3823 ot = OT_BYTE;
3824 else
3825 ot = dflag + OT_WORD;
3826
3827 modrm = ldub_code(s->pc++);
3828 mod = (modrm >> 6) & 3;
3829 rm = (modrm & 7) | REX_B(s);
3830 reg = ((modrm >> 3) & 7) | rex_r;
3831
3832 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3833 gen_op_mov_TN_reg(ot, 1, reg);
3834 gen_op_testl_T0_T1_cc();
3835 s->cc_op = CC_OP_LOGICB + ot;
3836 break;
3837
3838 case 0xa8: /* test eAX, Iv */
3839 case 0xa9:
3840 if ((b & 1) == 0)
3841 ot = OT_BYTE;
3842 else
3843 ot = dflag + OT_WORD;
3844 val = insn_get(s, ot);
3845
3846 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3847 gen_op_movl_T1_im(val);
3848 gen_op_testl_T0_T1_cc();
3849 s->cc_op = CC_OP_LOGICB + ot;
3850 break;
3851
3852 case 0x98: /* CWDE/CBW */
3853 #ifdef TARGET_X86_64
3854 if (dflag == 2) {
3855 gen_op_movslq_RAX_EAX();
3856 } else
3857 #endif
3858 if (dflag == 1)
3859 gen_op_movswl_EAX_AX();
3860 else
3861 gen_op_movsbw_AX_AL();
3862 break;
3863 case 0x99: /* CDQ/CWD */
3864 #ifdef TARGET_X86_64
3865 if (dflag == 2) {
3866 gen_op_movsqo_RDX_RAX();
3867 } else
3868 #endif
3869 if (dflag == 1)
3870 gen_op_movslq_EDX_EAX();
3871 else
3872 gen_op_movswl_DX_AX();
3873 break;
3874 case 0x1af: /* imul Gv, Ev */
3875 case 0x69: /* imul Gv, Ev, I */
3876 case 0x6b:
3877 ot = dflag + OT_WORD;
3878 modrm = ldub_code(s->pc++);
3879 reg = ((modrm >> 3) & 7) | rex_r;
3880 if (b == 0x69)
3881 s->rip_offset = insn_const_size(ot);
3882 else if (b == 0x6b)
3883 s->rip_offset = 1;
3884 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3885 if (b == 0x69) {
3886 val = insn_get(s, ot);
3887 gen_op_movl_T1_im(val);
3888 } else if (b == 0x6b) {
3889 val = (int8_t)insn_get(s, OT_BYTE);
3890 gen_op_movl_T1_im(val);
3891 } else {
3892 gen_op_mov_TN_reg(ot, 1, reg);
3893 }
3894
3895 #ifdef TARGET_X86_64
3896 if (ot == OT_QUAD) {
3897 gen_op_imulq_T0_T1();
3898 } else
3899 #endif
3900 if (ot == OT_LONG) {
3901 gen_op_imull_T0_T1();
3902 } else {
3903 gen_op_imulw_T0_T1();
3904 }
3905 gen_op_mov_reg_T0(ot, reg);
3906 s->cc_op = CC_OP_MULB + ot;
3907 break;
3908 case 0x1c0:
3909 case 0x1c1: /* xadd Ev, Gv */
3910 if ((b & 1) == 0)
3911 ot = OT_BYTE;
3912 else
3913 ot = dflag + OT_WORD;
3914 modrm = ldub_code(s->pc++);
3915 reg = ((modrm >> 3) & 7) | rex_r;
3916 mod = (modrm >> 6) & 3;
3917 if (mod == 3) {
3918 rm = (modrm & 7) | REX_B(s);
3919 gen_op_mov_TN_reg(ot, 0, reg);
3920 gen_op_mov_TN_reg(ot, 1, rm);
3921 gen_op_addl_T0_T1();
3922 gen_op_mov_reg_T1(ot, reg);
3923 gen_op_mov_reg_T0(ot, rm);
3924 } else {
3925 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3926 gen_op_mov_TN_reg(ot, 0, reg);
3927 gen_op_ld_T1_A0(ot + s->mem_index);
3928 gen_op_addl_T0_T1();
3929 gen_op_st_T0_A0(ot + s->mem_index);
3930 gen_op_mov_reg_T1(ot, reg);
3931 }
3932 gen_op_update2_cc();
3933 s->cc_op = CC_OP_ADDB + ot;
3934 break;
3935 case 0x1b0:
3936 case 0x1b1: /* cmpxchg Ev, Gv */
3937 if ((b & 1) == 0)
3938 ot = OT_BYTE;
3939 else
3940 ot = dflag + OT_WORD;
3941 modrm = ldub_code(s->pc++);
3942 reg = ((modrm >> 3) & 7) | rex_r;
3943 mod = (modrm >> 6) & 3;
3944 gen_op_mov_TN_reg(ot, 1, reg);
3945 if (mod == 3) {
3946 rm = (modrm & 7) | REX_B(s);
3947 gen_op_mov_TN_reg(ot, 0, rm);
3948 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3949 gen_op_mov_reg_T0(ot, rm);
3950 } else {
3951 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3952 gen_op_ld_T0_A0(ot + s->mem_index);
3953 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3954 }
3955 s->cc_op = CC_OP_SUBB + ot;
3956 break;
3957 case 0x1c7: /* cmpxchg8b */
3958 modrm = ldub_code(s->pc++);
3959 mod = (modrm >> 6) & 3;
3960 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3961 goto illegal_op;
3962 gen_jmp_im(pc_start - s->cs_base);
3963 if (s->cc_op != CC_OP_DYNAMIC)
3964 gen_op_set_cc_op(s->cc_op);
3965 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3966 gen_op_cmpxchg8b();
3967 s->cc_op = CC_OP_EFLAGS;
3968 break;
3969
3970 /**************************/
3971 /* push/pop */
3972 case 0x50 ... 0x57: /* push */
3973 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3974 gen_push_T0(s);
3975 break;
3976 case 0x58 ... 0x5f: /* pop */
3977 if (CODE64(s)) {
3978 ot = dflag ? OT_QUAD : OT_WORD;
3979 } else {
3980 ot = dflag + OT_WORD;
3981 }
3982 gen_pop_T0(s);
3983 /* NOTE: order is important for pop %sp */
3984 gen_pop_update(s);
3985 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3986 break;
3987 case 0x60: /* pusha */
3988 if (CODE64(s))
3989 goto illegal_op;
3990 gen_pusha(s);
3991 break;
3992 case 0x61: /* popa */
3993 if (CODE64(s))
3994 goto illegal_op;
3995 gen_popa(s);
3996 break;
3997 case 0x68: /* push Iv */
3998 case 0x6a:
3999 if (CODE64(s)) {
4000 ot = dflag ? OT_QUAD : OT_WORD;
4001 } else {
4002 ot = dflag + OT_WORD;
4003 }
4004 if (b == 0x68)
4005 val = insn_get(s, ot);
4006 else
4007 val = (int8_t)insn_get(s, OT_BYTE);
4008 gen_op_movl_T0_im(val);
4009 gen_push_T0(s);
4010 break;
4011 case 0x8f: /* pop Ev */
4012 if (CODE64(s)) {
4013 ot = dflag ? OT_QUAD : OT_WORD;
4014 } else {
4015 ot = dflag + OT_WORD;
4016 }
4017 modrm = ldub_code(s->pc++);
4018 mod = (modrm >> 6) & 3;
4019 gen_pop_T0(s);
4020 if (mod == 3) {
4021 /* NOTE: order is important for pop %sp */
4022 gen_pop_update(s);
4023 rm = (modrm & 7) | REX_B(s);
4024 gen_op_mov_reg_T0(ot, rm);
4025 } else {
4026 /* NOTE: order is important too for MMU exceptions */
4027 s->popl_esp_hack = 1 << ot;
4028 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4029 s->popl_esp_hack = 0;
4030 gen_pop_update(s);
4031 }
4032 break;
4033 case 0xc8: /* enter */
4034 {
4035 int level;
4036 val = lduw_code(s->pc);
4037 s->pc += 2;
4038 level = ldub_code(s->pc++);
4039 gen_enter(s, val, level);
4040 }
4041 break;
4042 case 0xc9: /* leave */
4043 /* XXX: exception not precise (ESP is updated before potential exception) */
4044 if (CODE64(s)) {
4045 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4046 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4047 } else if (s->ss32) {
4048 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4049 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4050 } else {
4051 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4052 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4053 }
4054 gen_pop_T0(s);
4055 if (CODE64(s)) {
4056 ot = dflag ? OT_QUAD : OT_WORD;
4057 } else {
4058 ot = dflag + OT_WORD;
4059 }
4060 gen_op_mov_reg_T0(ot, R_EBP);
4061 gen_pop_update(s);
4062 break;
4063 case 0x06: /* push es */
4064 case 0x0e: /* push cs */
4065 case 0x16: /* push ss */
4066 case 0x1e: /* push ds */
4067 if (CODE64(s))
4068 goto illegal_op;
4069 gen_op_movl_T0_seg(b >> 3);
4070 gen_push_T0(s);
4071 break;
4072 case 0x1a0: /* push fs */
4073 case 0x1a8: /* push gs */
4074 gen_op_movl_T0_seg((b >> 3) & 7);
4075 gen_push_T0(s);
4076 break;
4077 case 0x07: /* pop es */
4078 case 0x17: /* pop ss */
4079 case 0x1f: /* pop ds */
4080 if (CODE64(s))
4081 goto illegal_op;
4082 reg = b >> 3;
4083 gen_pop_T0(s);
4084 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4085 gen_pop_update(s);
4086 if (reg == R_SS) {
4087 /* if reg == SS, inhibit interrupts/trace. */
4088 /* If several instructions disable interrupts, only the
4089 _first_ does it */
4090 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4091 gen_op_set_inhibit_irq();
4092 s->tf = 0;
4093 }
4094 if (s->is_jmp) {
4095 gen_jmp_im(s->pc - s->cs_base);
4096 gen_eob(s);
4097 }
4098 break;
4099 case 0x1a1: /* pop fs */
4100 case 0x1a9: /* pop gs */
4101 gen_pop_T0(s);
4102 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4103 gen_pop_update(s);
4104 if (s->is_jmp) {
4105 gen_jmp_im(s->pc - s->cs_base);
4106 gen_eob(s);
4107 }
4108 break;
4109
4110 /**************************/
4111 /* mov */
4112 case 0x88:
4113 case 0x89: /* mov Gv, Ev */
4114 if ((b & 1) == 0)
4115 ot = OT_BYTE;
4116 else
4117 ot = dflag + OT_WORD;
4118 modrm = ldub_code(s->pc++);
4119 reg = ((modrm >> 3) & 7) | rex_r;
4120
4121 /* generate a generic store */
4122 gen_ldst_modrm(s, modrm, ot, reg, 1);
4123 break;
4124 case 0xc6:
4125 case 0xc7: /* mov Ev, Iv */
4126 if ((b & 1) == 0)
4127 ot = OT_BYTE;
4128 else
4129 ot = dflag + OT_WORD;
4130 modrm = ldub_code(s->pc++);
4131 mod = (modrm >> 6) & 3;
4132 if (mod != 3) {
4133 s->rip_offset = insn_const_size(ot);
4134 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4135 }
4136 val = insn_get(s, ot);
4137 gen_op_movl_T0_im(val);
4138 if (mod != 3)
4139 gen_op_st_T0_A0(ot + s->mem_index);
4140 else
4141 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4142 break;
4143 case 0x8a:
4144 case 0x8b: /* mov Ev, Gv */
4145 if ((b & 1) == 0)
4146 ot = OT_BYTE;
4147 else
4148 ot = OT_WORD + dflag;
4149 modrm = ldub_code(s->pc++);
4150 reg = ((modrm >> 3) & 7) | rex_r;
4151
4152 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4153 gen_op_mov_reg_T0(ot, reg);
4154 break;
4155 case 0x8e: /* mov seg, Gv */
4156 modrm = ldub_code(s->pc++);
4157 reg = (modrm >> 3) & 7;
4158 if (reg >= 6 || reg == R_CS)
4159 goto illegal_op;
4160 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4161 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4162 if (reg == R_SS) {
4163 /* if reg == SS, inhibit interrupts/trace */
4164 /* If several instructions disable interrupts, only the
4165 _first_ does it */
4166 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4167 gen_op_set_inhibit_irq();
4168 s->tf = 0;
4169 }
4170 if (s->is_jmp) {
4171 gen_jmp_im(s->pc - s->cs_base);
4172 gen_eob(s);
4173 }
4174 break;
4175 case 0x8c: /* mov Gv, seg */
4176 modrm = ldub_code(s->pc++);
4177 reg = (modrm >> 3) & 7;
4178 mod = (modrm >> 6) & 3;
4179 if (reg >= 6)
4180 goto illegal_op;
4181 gen_op_movl_T0_seg(reg);
4182 if (mod == 3)
4183 ot = OT_WORD + dflag;
4184 else
4185 ot = OT_WORD;
4186 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4187 break;
4188
4189 case 0x1b6: /* movzbS Gv, Eb */
4190 case 0x1b7: /* movzwS Gv, Eb */
4191 case 0x1be: /* movsbS Gv, Eb */
4192 case 0x1bf: /* movswS Gv, Eb */
4193 {
4194 int d_ot;
4195 /* d_ot is the size of destination */
4196 d_ot = dflag + OT_WORD;
4197 /* ot is the size of source */
4198 ot = (b & 1) + OT_BYTE;
4199 modrm = ldub_code(s->pc++);
4200 reg = ((modrm >> 3) & 7) | rex_r;
4201 mod = (modrm >> 6) & 3;
4202 rm = (modrm & 7) | REX_B(s);
4203
4204 if (mod == 3) {
4205 gen_op_mov_TN_reg(ot, 0, rm);
4206 switch(ot | (b & 8)) {
4207 case OT_BYTE:
4208 gen_op_movzbl_T0_T0();
4209 break;
4210 case OT_BYTE | 8:
4211 gen_op_movsbl_T0_T0();
4212 break;
4213 case OT_WORD:
4214 gen_op_movzwl_T0_T0();
4215 break;
4216 default:
4217 case OT_WORD | 8:
4218 gen_op_movswl_T0_T0();
4219 break;
4220 }
4221 gen_op_mov_reg_T0(d_ot, reg);
4222 } else {
4223 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4224 if (b & 8) {
4225 gen_op_lds_T0_A0(ot + s->mem_index);
4226 } else {
4227 gen_op_ldu_T0_A0(ot + s->mem_index);
4228 }
4229 gen_op_mov_reg_T0(d_ot, reg);
4230 }
4231 }
4232 break;
4233
4234 case 0x8d: /* lea */
4235 ot = dflag + OT_WORD;
4236 modrm = ldub_code(s->pc++);
4237 mod = (modrm >> 6) & 3;
4238 if (mod == 3)
4239 goto illegal_op;
4240 reg = ((modrm >> 3) & 7) | rex_r;
4241 /* we must ensure that no segment is added */
4242 s->override = -1;
4243 val = s->addseg;
4244 s->addseg = 0;
4245 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4246 s->addseg = val;
4247 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4248 break;
4249
4250 case 0xa0: /* mov EAX, Ov */
4251 case 0xa1:
4252 case 0xa2: /* mov Ov, EAX */
4253 case 0xa3:
4254 {
4255 target_ulong offset_addr;
4256
4257 if ((b & 1) == 0)
4258 ot = OT_BYTE;
4259 else
4260 ot = dflag + OT_WORD;
4261 #ifdef TARGET_X86_64
4262 if (s->aflag == 2) {
4263 offset_addr = ldq_code(s->pc);
4264 s->pc += 8;
4265 gen_op_movq_A0_im(offset_addr);
4266 } else
4267 #endif
4268 {
4269 if (s->aflag) {
4270 offset_addr = insn_get(s, OT_LONG);
4271 } else {
4272 offset_addr = insn_get(s, OT_WORD);
4273 }
4274 gen_op_movl_A0_im(offset_addr);
4275 }
4276 gen_add_A0_ds_seg(s);
4277 if ((b & 2) == 0) {
4278 gen_op_ld_T0_A0(ot + s->mem_index);
4279 gen_op_mov_reg_T0(ot, R_EAX);
4280 } else {
4281 gen_op_mov_TN_reg(ot, 0, R_EAX);
4282 gen_op_st_T0_A0(ot + s->mem_index);
4283 }
4284 }
4285 break;
4286 case 0xd7: /* xlat */
4287 #ifdef TARGET_X86_64
4288 if (s->aflag == 2) {
4289 gen_op_movq_A0_reg(R_EBX);
4290 gen_op_addq_A0_AL();
4291 } else
4292 #endif
4293 {
4294 gen_op_movl_A0_reg(R_EBX);
4295 gen_op_addl_A0_AL();
4296 if (s->aflag == 0)
4297 gen_op_andl_A0_ffff();
4298 }
4299 gen_add_A0_ds_seg(s);
4300 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4301 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4302 break;
4303 case 0xb0 ... 0xb7: /* mov R, Ib */
4304 val = insn_get(s, OT_BYTE);
4305 gen_op_movl_T0_im(val);
4306 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4307 break;
4308 case 0xb8 ... 0xbf: /* mov R, Iv */
4309 #ifdef TARGET_X86_64
4310 if (dflag == 2) {
4311 uint64_t tmp;
4312 /* 64 bit case */
4313 tmp = ldq_code(s->pc);
4314 s->pc += 8;
4315 reg = (b & 7) | REX_B(s);
4316 gen_movtl_T0_im(tmp);
4317 gen_op_mov_reg_T0(OT_QUAD, reg);
4318 } else
4319 #endif
4320 {
4321 ot = dflag ? OT_LONG : OT_WORD;
4322 val = insn_get(s, ot);
4323 reg = (b & 7) | REX_B(s);
4324 gen_op_movl_T0_im(val);
4325 gen_op_mov_reg_T0(ot, reg);
4326 }
4327 break;
4328
4329 case 0x91 ... 0x97: /* xchg R, EAX */
4330 ot = dflag + OT_WORD;
4331 reg = (b & 7) | REX_B(s);
4332 rm = R_EAX;
4333 goto do_xchg_reg;
4334 case 0x86:
4335 case 0x87: /* xchg Ev, Gv */
4336 if ((b & 1) == 0)
4337 ot = OT_BYTE;
4338 else
4339 ot = dflag + OT_WORD;
4340 modrm = ldub_code(s->pc++);
4341 reg = ((modrm >> 3) & 7) | rex_r;
4342 mod = (modrm >> 6) & 3;
4343 if (mod == 3) {
4344 rm = (modrm & 7) | REX_B(s);
4345 do_xchg_reg:
4346 gen_op_mov_TN_reg(ot, 0, reg);
4347 gen_op_mov_TN_reg(ot, 1, rm);
4348 gen_op_mov_reg_T0(ot, rm);
4349 gen_op_mov_reg_T1(ot, reg);
4350 } else {
4351 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4352 gen_op_mov_TN_reg(ot, 0, reg);
4353 /* for xchg, lock is implicit */
4354 if (!(prefixes & PREFIX_LOCK))
4355 gen_op_lock();
4356 gen_op_ld_T1_A0(ot + s->mem_index);
4357 gen_op_st_T0_A0(ot + s->mem_index);
4358 if (!(prefixes & PREFIX_LOCK))
4359 gen_op_unlock();
4360 gen_op_mov_reg_T1(ot, reg);
4361 }
4362 break;
4363 case 0xc4: /* les Gv */
4364 if (CODE64(s))
4365 goto illegal_op;
4366 op = R_ES;
4367 goto do_lxx;
4368 case 0xc5: /* lds Gv */
4369 if (CODE64(s))
4370 goto illegal_op;
4371 op = R_DS;
4372 goto do_lxx;
4373 case 0x1b2: /* lss Gv */
4374 op = R_SS;
4375 goto do_lxx;
4376 case 0x1b4: /* lfs Gv */
4377 op = R_FS;
4378 goto do_lxx;
4379 case 0x1b5: /* lgs Gv */
4380 op = R_GS;
4381 do_lxx:
4382 ot = dflag ? OT_LONG : OT_WORD;
4383 modrm = ldub_code(s->pc++);
4384 reg = ((modrm >> 3) & 7) | rex_r;
4385 mod = (modrm >> 6) & 3;
4386 if (mod == 3)
4387 goto illegal_op;
4388 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4389 gen_op_ld_T1_A0(ot + s->mem_index);
4390 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4391 /* load the segment first to handle exceptions properly */
4392 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4393 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4394 /* then put the data */
4395 gen_op_mov_reg_T1(ot, reg);
4396 if (s->is_jmp) {
4397 gen_jmp_im(s->pc - s->cs_base);
4398 gen_eob(s);
4399 }
4400 break;
4401
4402 /************************/
4403 /* shifts */
4404 case 0xc0:
4405 case 0xc1:
4406 /* shift Ev,Ib */
4407 shift = 2;
4408 grp2:
4409 {
4410 if ((b & 1) == 0)
4411 ot = OT_BYTE;
4412 else
4413 ot = dflag + OT_WORD;
4414
4415 modrm = ldub_code(s->pc++);
4416 mod = (modrm >> 6) & 3;
4417 op = (modrm >> 3) & 7;
4418
4419 if (mod != 3) {
4420 if (shift == 2) {
4421 s->rip_offset = 1;
4422 }
4423 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4424 opreg = OR_TMP0;
4425 } else {
4426 opreg = (modrm & 7) | REX_B(s);
4427 }
4428
4429 /* simpler op */
4430 if (shift == 0) {
4431 gen_shift(s, op, ot, opreg, OR_ECX);
4432 } else {
4433 if (shift == 2) {
4434 shift = ldub_code(s->pc++);
4435 }
4436 gen_shifti(s, op, ot, opreg, shift);
4437 }
4438 }
4439 break;
4440 case 0xd0:
4441 case 0xd1:
4442 /* shift Ev,1 */
4443 shift = 1;
4444 goto grp2;
4445 case 0xd2:
4446 case 0xd3:
4447 /* shift Ev,cl */
4448 shift = 0;
4449 goto grp2;
4450
4451 case 0x1a4: /* shld imm */
4452 op = 0;
4453 shift = 1;
4454 goto do_shiftd;
4455 case 0x1a5: /* shld cl */
4456 op = 0;
4457 shift = 0;
4458 goto do_shiftd;
4459 case 0x1ac: /* shrd imm */
4460 op = 1;
4461 shift = 1;
4462 goto do_shiftd;
4463 case 0x1ad: /* shrd cl */
4464 op = 1;
4465 shift = 0;
4466 do_shiftd:
4467 ot = dflag + OT_WORD;
4468 modrm = ldub_code(s->pc++);
4469 mod = (modrm >> 6) & 3;
4470 rm = (modrm & 7) | REX_B(s);
4471 reg = ((modrm >> 3) & 7) | rex_r;
4472
4473 if (mod != 3) {
4474 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4475 gen_op_ld_T0_A0(ot + s->mem_index);
4476 } else {
4477 gen_op_mov_TN_reg(ot, 0, rm);
4478 }
4479 gen_op_mov_TN_reg(ot, 1, reg);
4480
4481 if (shift) {
4482 val = ldub_code(s->pc++);
4483 if (ot == OT_QUAD)
4484 val &= 0x3f;
4485 else
4486 val &= 0x1f;
4487 if (val) {
4488 if (mod == 3)
4489 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4490 else
4491 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4492 if (op == 0 && ot != OT_WORD)
4493 s->cc_op = CC_OP_SHLB + ot;
4494 else
4495 s->cc_op = CC_OP_SARB + ot;
4496 }
4497 } else {
4498 if (s->cc_op != CC_OP_DYNAMIC)
4499 gen_op_set_cc_op(s->cc_op);
4500 if (mod == 3)
4501 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4502 else
4503 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4504 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4505 }
4506 if (mod == 3) {
4507 gen_op_mov_reg_T0(ot, rm);
4508 }
4509 break;
4510
4511 /************************/
4512 /* floats */
4513 case 0xd8 ... 0xdf:
4514 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4515 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4516 /* XXX: what to do if illegal op ? */
4517 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4518 break;
4519 }
4520 modrm = ldub_code(s->pc++);
4521 mod = (modrm >> 6) & 3;
4522 rm = modrm & 7;
4523 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4524 if (mod != 3) {
4525 /* memory op */
4526 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4527 switch(op) {
4528 case 0x00 ... 0x07: /* fxxxs */
4529 case 0x10 ... 0x17: /* fixxxl */
4530 case 0x20 ... 0x27: /* fxxxl */
4531 case 0x30 ... 0x37: /* fixxx */
4532 {
4533 int op1;
4534 op1 = op & 7;
4535
4536 switch(op >> 4) {
4537 case 0:
4538 gen_op_ld_T0_A0(OT_LONG);
4539 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4540 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2);
4541 break;
4542 case 1:
4543 gen_op_ld_T0_A0(OT_LONG);
4544 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4545 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4546 break;
4547 case 2:
4548 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4549 (s->mem_index >> 2) - 1);
4550 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1);
4551 break;
4552 case 3:
4553 default:
4554 gen_op_ld_T0_A0(OT_WORD);
4555 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4556 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4557 break;
4558 }
4559
4560 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4561 if (op1 == 3) {
4562 /* fcomp needs pop */
4563 tcg_gen_helper_0_0(helper_fpop);
4564 }
4565 }
4566 break;
4567 case 0x08: /* flds */
4568 case 0x0a: /* fsts */
4569 case 0x0b: /* fstps */
4570 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4571 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4572 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4573 switch(op & 7) {
4574 case 0:
4575 switch(op >> 4) {
4576 case 0:
4577 gen_op_ld_T0_A0(OT_LONG);
4578 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4579 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2);
4580 break;
4581 case 1:
4582 gen_op_ld_T0_A0(OT_LONG);
4583 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4584 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4585 break;
4586 case 2:
4587 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4588 (s->mem_index >> 2) - 1);
4589 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1);
4590 break;
4591 case 3:
4592 default:
4593 gen_op_ld_T0_A0(OT_WORD);
4594 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4595 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4596 break;
4597 }
4598 break;
4599 case 1:
4600 /* XXX: the corresponding CPUID bit must be tested ! */
4601 switch(op >> 4) {
4602 case 1:
4603 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2);
4604 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4605 gen_op_st_T0_A0(OT_LONG);
4606 break;
4607 case 2:
4608 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1);
4609 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4610 (s->mem_index >> 2) - 1);
4611 break;
4612 case 3:
4613 default:
4614 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2);
4615 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4616 gen_op_st_T0_A0(OT_WORD);
4617 break;
4618 }
4619 tcg_gen_helper_0_0(helper_fpop);
4620 break;
4621 default:
4622 switch(op >> 4) {
4623 case 0:
4624 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2);
4625 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4626 gen_op_st_T0_A0(OT_LONG);
4627 break;
4628 case 1:
4629 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2);
4630 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4631 gen_op_st_T0_A0(OT_LONG);
4632 break;
4633 case 2:
4634 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1);
4635 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4636 (s->mem_index >> 2) - 1);
4637 break;
4638 case 3:
4639 default:
4640 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2);
4641 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4642 gen_op_st_T0_A0(OT_WORD);
4643 break;
4644 }
4645 if ((op & 7) == 3)
4646 tcg_gen_helper_0_0(helper_fpop);
4647 break;
4648 }
4649 break;
4650 case 0x0c: /* fldenv mem */
4651 if (s->cc_op != CC_OP_DYNAMIC)
4652 gen_op_set_cc_op(s->cc_op);
4653 gen_jmp_im(pc_start - s->cs_base);
4654 tcg_gen_helper_0_2(helper_fldenv,
4655 cpu_A0, tcg_const_i32(s->dflag));
4656 break;
4657 case 0x0d: /* fldcw mem */
4658 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4659 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4660 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2);
4661 break;
4662 case 0x0e: /* fnstenv mem */
4663 if (s->cc_op != CC_OP_DYNAMIC)
4664 gen_op_set_cc_op(s->cc_op);
4665 gen_jmp_im(pc_start - s->cs_base);
4666 tcg_gen_helper_0_2(helper_fstenv,
4667 cpu_A0, tcg_const_i32(s->dflag));
4668 break;
4669 case 0x0f: /* fnstcw mem */
4670 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2);
4671 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4672 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4673 break;
4674 case 0x1d: /* fldt mem */
4675 if (s->cc_op != CC_OP_DYNAMIC)
4676 gen_op_set_cc_op(s->cc_op);
4677 gen_jmp_im(pc_start - s->cs_base);
4678 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4679 break;
4680 case 0x1f: /* fstpt mem */
4681 if (s->cc_op != CC_OP_DYNAMIC)
4682 gen_op_set_cc_op(s->cc_op);
4683 gen_jmp_im(pc_start - s->cs_base);
4684 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4685 tcg_gen_helper_0_0(helper_fpop);
4686 break;
4687 case 0x2c: /* frstor mem */
4688 if (s->cc_op != CC_OP_DYNAMIC)
4689 gen_op_set_cc_op(s->cc_op);
4690 gen_jmp_im(pc_start - s->cs_base);
4691 tcg_gen_helper_0_2(helper_frstor,
4692 cpu_A0, tcg_const_i32(s->dflag));
4693 break;
4694 case 0x2e: /* fnsave mem */
4695 if (s->cc_op != CC_OP_DYNAMIC)
4696 gen_op_set_cc_op(s->cc_op);
4697 gen_jmp_im(pc_start - s->cs_base);
4698 tcg_gen_helper_0_2(helper_fsave,
4699 cpu_A0, tcg_const_i32(s->dflag));
4700 break;
4701 case 0x2f: /* fnstsw mem */
4702 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4703 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4704 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4705 break;
4706 case 0x3c: /* fbld */
4707 if (s->cc_op != CC_OP_DYNAMIC)
4708 gen_op_set_cc_op(s->cc_op);
4709 gen_jmp_im(pc_start - s->cs_base);
4710 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4711 break;
4712 case 0x3e: /* fbstp */
4713 if (s->cc_op != CC_OP_DYNAMIC)
4714 gen_op_set_cc_op(s->cc_op);
4715 gen_jmp_im(pc_start - s->cs_base);
4716 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4717 tcg_gen_helper_0_0(helper_fpop);
4718 break;
4719 case 0x3d: /* fildll */
4720 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4721 (s->mem_index >> 2) - 1);
4722 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1);
4723 break;
4724 case 0x3f: /* fistpll */
4725 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1);
4726 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4727 (s->mem_index >> 2) - 1);
4728 tcg_gen_helper_0_0(helper_fpop);
4729 break;
4730 default:
4731 goto illegal_op;
4732 }
4733 } else {
4734 /* register float ops */
4735 opreg = rm;
4736
4737 switch(op) {
4738 case 0x08: /* fld sti */
4739 tcg_gen_helper_0_0(helper_fpush);
4740 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4741 break;
4742 case 0x09: /* fxchg sti */
4743 case 0x29: /* fxchg4 sti, undocumented op */
4744 case 0x39: /* fxchg7 sti, undocumented op */
4745 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4746 break;
4747 case 0x0a: /* grp d9/2 */
4748 switch(rm) {
4749 case 0: /* fnop */
4750 /* check exceptions (FreeBSD FPU probe) */
4751 if (s->cc_op != CC_OP_DYNAMIC)
4752 gen_op_set_cc_op(s->cc_op);
4753 gen_jmp_im(pc_start - s->cs_base);
4754 tcg_gen_helper_0_0(helper_fwait);
4755 break;
4756 default:
4757 goto illegal_op;
4758 }
4759 break;
4760 case 0x0c: /* grp d9/4 */
4761 switch(rm) {
4762 case 0: /* fchs */
4763 tcg_gen_helper_0_0(helper_fchs_ST0);
4764 break;
4765 case 1: /* fabs */
4766 tcg_gen_helper_0_0(helper_fabs_ST0);
4767 break;
4768 case 4: /* ftst */
4769 tcg_gen_helper_0_0(helper_fldz_FT0);
4770 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4771 break;
4772 case 5: /* fxam */
4773 tcg_gen_helper_0_0(helper_fxam_ST0);
4774 break;
4775 default:
4776 goto illegal_op;
4777 }
4778 break;
4779 case 0x0d: /* grp d9/5 */
4780 {
4781 switch(rm) {
4782 case 0:
4783 tcg_gen_helper_0_0(helper_fpush);
4784 tcg_gen_helper_0_0(helper_fld1_ST0);
4785 break;
4786 case 1:
4787 tcg_gen_helper_0_0(helper_fpush);
4788 tcg_gen_helper_0_0(helper_fldl2t_ST0);
4789 break;
4790 case 2:
4791 tcg_gen_helper_0_0(helper_fpush);
4792 tcg_gen_helper_0_0(helper_fldl2e_ST0);
4793 break;
4794 case 3:
4795 tcg_gen_helper_0_0(helper_fpush);
4796 tcg_gen_helper_0_0(helper_fldpi_ST0);
4797 break;
4798 case 4:
4799 tcg_gen_helper_0_0(helper_fpush);
4800 tcg_gen_helper_0_0(helper_fldlg2_ST0);
4801 break;
4802 case 5:
4803 tcg_gen_helper_0_0(helper_fpush);
4804 tcg_gen_helper_0_0(helper_fldln2_ST0);
4805 break;
4806 case 6:
4807 tcg_gen_helper_0_0(helper_fpush);
4808 tcg_gen_helper_0_0(helper_fldz_ST0);
4809 break;
4810 default:
4811 goto illegal_op;
4812 }
4813 }
4814 break;
4815 case 0x0e: /* grp d9/6 */
4816 switch(rm) {
4817 case 0: /* f2xm1 */
4818 tcg_gen_helper_0_0(helper_f2xm1);
4819 break;
4820 case 1: /* fyl2x */
4821 tcg_gen_helper_0_0(helper_fyl2x);
4822 break;
4823 case 2: /* fptan */
4824 tcg_gen_helper_0_0(helper_fptan);
4825 break;
4826 case 3: /* fpatan */
4827 tcg_gen_helper_0_0(helper_fpatan);
4828 break;
4829 case 4: /* fxtract */
4830 tcg_gen_helper_0_0(helper_fxtract);
4831 break;
4832 case 5: /* fprem1 */
4833 tcg_gen_helper_0_0(helper_fprem1);
4834 break;
4835 case 6: /* fdecstp */
4836 tcg_gen_helper_0_0(helper_fdecstp);
4837 break;
4838 default:
4839 case 7: /* fincstp */
4840 tcg_gen_helper_0_0(helper_fincstp);
4841 break;
4842 }
4843 break;
4844 case 0x0f: /* grp d9/7 */
4845 switch(rm) {
4846 case 0: /* fprem */
4847 tcg_gen_helper_0_0(helper_fprem);
4848 break;
4849 case 1: /* fyl2xp1 */
4850 tcg_gen_helper_0_0(helper_fyl2xp1);
4851 break;
4852 case 2: /* fsqrt */
4853 tcg_gen_helper_0_0(helper_fsqrt);
4854 break;
4855 case 3: /* fsincos */
4856 tcg_gen_helper_0_0(helper_fsincos);
4857 break;
4858 case 5: /* fscale */
4859 tcg_gen_helper_0_0(helper_fscale);
4860 break;
4861 case 4: /* frndint */
4862 tcg_gen_helper_0_0(helper_frndint);
4863 break;
4864 case 6: /* fsin */
4865 tcg_gen_helper_0_0(helper_fsin);
4866 break;
4867 default:
4868 case 7: /* fcos */
4869 tcg_gen_helper_0_0(helper_fcos);
4870 break;
4871 }
4872 break;
4873 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4874 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4875 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4876 {
4877 int op1;
4878
4879 op1 = op & 7;
4880 if (op >= 0x20) {
4881 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
4882 if (op >= 0x30)
4883 tcg_gen_helper_0_0(helper_fpop);
4884 } else {
4885 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4886 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4887 }
4888 }
4889 break;
4890 case 0x02: /* fcom */
4891 case 0x22: /* fcom2, undocumented op */
4892 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4893 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4894 break;
4895 case 0x03: /* fcomp */
4896 case 0x23: /* fcomp3, undocumented op */
4897 case 0x32: /* fcomp5, undocumented op */
4898 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4899 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4900 tcg_gen_helper_0_0(helper_fpop);
4901 break;
4902 case 0x15: /* da/5 */
4903 switch(rm) {
4904 case 1: /* fucompp */
4905 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4906 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4907 tcg_gen_helper_0_0(helper_fpop);
4908 tcg_gen_helper_0_0(helper_fpop);
4909 break;
4910 default:
4911 goto illegal_op;
4912 }
4913 break;
4914 case 0x1c:
4915 switch(rm) {
4916 case 0: /* feni (287 only, just do nop here) */
4917 break;
4918 case 1: /* fdisi (287 only, just do nop here) */
4919 break;
4920 case 2: /* fclex */
4921 tcg_gen_helper_0_0(helper_fclex);
4922 break;
4923 case 3: /* fninit */
4924 tcg_gen_helper_0_0(helper_fninit);
4925 break;
4926 case 4: /* fsetpm (287 only, just do nop here) */
4927 break;
4928 default:
4929 goto illegal_op;
4930 }
4931 break;
4932 case 0x1d: /* fucomi */
4933 if (s->cc_op != CC_OP_DYNAMIC)
4934 gen_op_set_cc_op(s->cc_op);
4935 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4936 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
4937 gen_op_fcomi_dummy();
4938 s->cc_op = CC_OP_EFLAGS;
4939 break;
4940 case 0x1e: /* fcomi */
4941 if (s->cc_op != CC_OP_DYNAMIC)
4942 gen_op_set_cc_op(s->cc_op);
4943 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4944 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
4945 gen_op_fcomi_dummy();
4946 s->cc_op = CC_OP_EFLAGS;
4947 break;
4948 case 0x28: /* ffree sti */
4949 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4950 break;
4951 case 0x2a: /* fst sti */
4952 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4953 break;
4954 case 0x2b: /* fstp sti */
4955 case 0x0b: /* fstp1 sti, undocumented op */
4956 case 0x3a: /* fstp8 sti, undocumented op */
4957 case 0x3b: /* fstp9 sti, undocumented op */
4958 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4959 tcg_gen_helper_0_0(helper_fpop);
4960 break;
4961 case 0x2c: /* fucom st(i) */
4962 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4963 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4964 break;
4965 case 0x2d: /* fucomp st(i) */
4966 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4967 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4968 tcg_gen_helper_0_0(helper_fpop);
4969 break;
4970 case 0x33: /* de/3 */
4971 switch(rm) {
4972 case 1: /* fcompp */
4973 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4974 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4975 tcg_gen_helper_0_0(helper_fpop);
4976 tcg_gen_helper_0_0(helper_fpop);
4977 break;
4978 default:
4979 goto illegal_op;
4980 }
4981 break;
4982 case 0x38: /* ffreep sti, undocumented op */
4983 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4984 tcg_gen_helper_0_0(helper_fpop);
4985 break;
4986 case 0x3c: /* df/4 */
4987 switch(rm) {
4988 case 0:
4989 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4990 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4991 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4992 break;
4993 default:
4994 goto illegal_op;
4995 }
4996 break;
4997 case 0x3d: /* fucomip */
4998 if (s->cc_op != CC_OP_DYNAMIC)
4999 gen_op_set_cc_op(s->cc_op);
5000 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5001 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5002 tcg_gen_helper_0_0(helper_fpop);
5003 gen_op_fcomi_dummy();
5004 s->cc_op = CC_OP_EFLAGS;
5005 break;
5006 case 0x3e: /* fcomip */
5007 if (s->cc_op != CC_OP_DYNAMIC)
5008 gen_op_set_cc_op(s->cc_op);
5009 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5010 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5011 tcg_gen_helper_0_0(helper_fpop);
5012 gen_op_fcomi_dummy();
5013 s->cc_op = CC_OP_EFLAGS;
5014 break;
5015 case 0x10 ... 0x13: /* fcmovxx */
5016 case 0x18 ... 0x1b:
5017 {
5018 int op1, l1;
5019 const static uint8_t fcmov_cc[8] = {
5020 (JCC_B << 1),
5021 (JCC_Z << 1),
5022 (JCC_BE << 1),
5023 (JCC_P << 1),
5024 };
5025 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5026 gen_setcc(s, op1);
5027 l1 = gen_new_label();
5028 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5029 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5030 gen_set_label(l1);
5031 }
5032 break;
5033 default:
5034 goto illegal_op;
5035 }
5036 }
5037 break;
5038 /************************/
5039 /* string ops */
5040
5041 case 0xa4: /* movsS */
5042 case 0xa5:
5043 if ((b & 1) == 0)
5044 ot = OT_BYTE;
5045 else
5046 ot = dflag + OT_WORD;
5047
5048 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5049 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5050 } else {
5051 gen_movs(s, ot);
5052 }
5053 break;
5054
5055 case 0xaa: /* stosS */
5056 case 0xab:
5057 if ((b & 1) == 0)
5058 ot = OT_BYTE;
5059 else
5060 ot = dflag + OT_WORD;
5061
5062 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5063 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5064 } else {
5065 gen_stos(s, ot);
5066 }
5067 break;
5068 case 0xac: /* lodsS */
5069 case 0xad:
5070 if ((b & 1) == 0)
5071 ot = OT_BYTE;
5072 else
5073 ot = dflag + OT_WORD;
5074 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5075 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5076 } else {
5077 gen_lods(s, ot);
5078 }
5079 break;
5080 case 0xae: /* scasS */
5081 case 0xaf:
5082 if ((b & 1) == 0)
5083 ot = OT_BYTE;
5084 else
5085 ot = dflag + OT_WORD;
5086 if (prefixes & PREFIX_REPNZ) {
5087 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5088 } else if (prefixes & PREFIX_REPZ) {
5089 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5090 } else {
5091 gen_scas(s, ot);
5092 s->cc_op = CC_OP_SUBB + ot;
5093 }
5094 break;
5095
5096 case 0xa6: /* cmpsS */
5097 case 0xa7:
5098 if ((b & 1) == 0)
5099 ot = OT_BYTE;
5100 else
5101 ot = dflag + OT_WORD;
5102 if (prefixes & PREFIX_REPNZ) {
5103 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5104 } else if (prefixes & PREFIX_REPZ) {
5105 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5106 } else {
5107 gen_cmps(s, ot);
5108 s->cc_op = CC_OP_SUBB + ot;
5109 }
5110 break;
5111 case 0x6c: /* insS */
5112 case 0x6d:
5113 if ((b & 1) == 0)
5114 ot = OT_BYTE;
5115 else
5116 ot = dflag ? OT_LONG : OT_WORD;
5117 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5118 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5119 gen_op_andl_T0_ffff();
5120 if (gen_svm_check_io(s, pc_start,
5121 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
5122 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
5123 break;
5124 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5125 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5126 } else {
5127 gen_ins(s, ot);
5128 }
5129 break;
5130 case 0x6e: /* outsS */
5131 case 0x6f:
5132 if ((b & 1) == 0)
5133 ot = OT_BYTE;
5134 else
5135 ot = dflag ? OT_LONG : OT_WORD;
5136 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5137 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5138 gen_op_andl_T0_ffff();
5139 if (gen_svm_check_io(s, pc_start,
5140 (1 << (4+ot)) | svm_is_rep(prefixes) |
5141 4 | (1 << (7+s->aflag))))
5142 break;
5143 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5144 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5145 } else {
5146 gen_outs(s, ot);
5147 }
5148 break;
5149
5150 /************************/
5151 /* port I/O */
5152
5153 case 0xe4:
5154 case 0xe5:
5155 if ((b & 1) == 0)
5156 ot = OT_BYTE;
5157 else
5158 ot = dflag ? OT_LONG : OT_WORD;
5159 val = ldub_code(s->pc++);
5160 gen_op_movl_T0_im(val);
5161 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5162 if (gen_svm_check_io(s, pc_start,
5163 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5164 (1 << (4+ot))))
5165 break;
5166 gen_op_in[ot]();
5167 gen_op_mov_reg_T1(ot, R_EAX);
5168 break;
5169 case 0xe6:
5170 case 0xe7:
5171 if ((b & 1) == 0)
5172 ot = OT_BYTE;
5173 else
5174 ot = dflag ? OT_LONG : OT_WORD;
5175 val = ldub_code(s->pc++);
5176 gen_op_movl_T0_im(val);
5177 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5178 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5179 (1 << (4+ot))))
5180 break;
5181 gen_op_mov_TN_reg(ot, 1, R_EAX);
5182 gen_op_out[ot]();
5183 break;
5184 case 0xec:
5185 case 0xed:
5186 if ((b & 1) == 0)
5187 ot = OT_BYTE;
5188 else
5189 ot = dflag ? OT_LONG : OT_WORD;
5190 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5191 gen_op_andl_T0_ffff();
5192 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5193 if (gen_svm_check_io(s, pc_start,
5194 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5195 (1 << (4+ot))))
5196 break;
5197 gen_op_in[ot]();
5198 gen_op_mov_reg_T1(ot, R_EAX);
5199 break;
5200 case 0xee:
5201 case 0xef:
5202 if ((b & 1) == 0)
5203 ot = OT_BYTE;
5204 else
5205 ot = dflag ? OT_LONG : OT_WORD;
5206 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5207 gen_op_andl_T0_ffff();
5208 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5209 if (gen_svm_check_io(s, pc_start,
5210 svm_is_rep(prefixes) | (1 << (4+ot))))
5211 break;
5212 gen_op_mov_TN_reg(ot, 1, R_EAX);
5213 gen_op_out[ot]();
5214 break;
5215
5216 /************************/
5217 /* control */
5218 case 0xc2: /* ret im */
5219 val = ldsw_code(s->pc);
5220 s->pc += 2;
5221 gen_pop_T0(s);
5222 if (CODE64(s) && s->dflag)
5223 s->dflag = 2;
5224 gen_stack_update(s, val + (2 << s->dflag));
5225 if (s->dflag == 0)
5226 gen_op_andl_T0_ffff();
5227 gen_op_jmp_T0();
5228 gen_eob(s);
5229 break;
5230 case 0xc3: /* ret */
5231 gen_pop_T0(s);
5232 gen_pop_update(s);
5233 if (s->dflag == 0)
5234 gen_op_andl_T0_ffff();
5235 gen_op_jmp_T0();
5236 gen_eob(s);
5237 break;
5238 case 0xca: /* lret im */
5239 val = ldsw_code(s->pc);
5240 s->pc += 2;
5241 do_lret:
5242 if (s->pe && !s->vm86) {
5243 if (s->cc_op != CC_OP_DYNAMIC)
5244 gen_op_set_cc_op(s->cc_op);
5245 gen_jmp_im(pc_start - s->cs_base);
5246 gen_op_lret_protected(s->dflag, val);
5247 } else {
5248 gen_stack_A0(s);
5249 /* pop offset */
5250 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5251 if (s->dflag == 0)
5252 gen_op_andl_T0_ffff();
5253 /* NOTE: keeping EIP updated is not a problem in case of
5254 exception */
5255 gen_op_jmp_T0();
5256 /* pop selector */
5257 gen_op_addl_A0_im(2 << s->dflag);
5258 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5259 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5260 /* add stack offset */
5261 gen_stack_update(s, val + (4 << s->dflag));
5262 }
5263 gen_eob(s);
5264 break;
5265 case 0xcb: /* lret */
5266 val = 0;
5267 goto do_lret;
5268 case 0xcf: /* iret */
5269 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5270 break;
5271 if (!s->pe) {
5272 /* real mode */
5273 gen_op_iret_real(s->dflag);
5274 s->cc_op = CC_OP_EFLAGS;
5275 } else if (s->vm86) {
5276 if (s->iopl != 3) {
5277 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5278 } else {
5279 gen_op_iret_real(s->dflag);
5280 s->cc_op = CC_OP_EFLAGS;
5281 }
5282 } else {
5283 if (s->cc_op != CC_OP_DYNAMIC)
5284 gen_op_set_cc_op(s->cc_op);
5285 gen_jmp_im(pc_start - s->cs_base);
5286 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5287 s->cc_op = CC_OP_EFLAGS;
5288 }
5289 gen_eob(s);
5290 break;
5291 case 0xe8: /* call im */
5292 {
5293 if (dflag)
5294 tval = (int32_t)insn_get(s, OT_LONG);
5295 else
5296 tval = (int16_t)insn_get(s, OT_WORD);
5297 next_eip = s->pc - s->cs_base;
5298 tval += next_eip;
5299 if (s->dflag == 0)
5300 tval &= 0xffff;
5301 gen_movtl_T0_im(next_eip);
5302 gen_push_T0(s);
5303 gen_jmp(s, tval);
5304 }
5305 break;
5306 case 0x9a: /* lcall im */
5307 {
5308 unsigned int selector, offset;
5309
5310 if (CODE64(s))
5311 goto illegal_op;
5312 ot = dflag ? OT_LONG : OT_WORD;
5313 offset = insn_get(s, ot);
5314 selector = insn_get(s, OT_WORD);
5315
5316 gen_op_movl_T0_im(selector);
5317 gen_op_movl_T1_imu(offset);
5318 }
5319 goto do_lcall;
5320 case 0xe9: /* jmp im */
5321 if (dflag)
5322 tval = (int32_t)insn_get(s, OT_LONG);
5323 else
5324 tval = (int16_t)insn_get(s, OT_WORD);
5325 tval += s->pc - s->cs_base;
5326 if (s->dflag == 0)
5327 tval &= 0xffff;
5328 gen_jmp(s, tval);
5329 break;
5330 case 0xea: /* ljmp im */
5331 {
5332 unsigned int selector, offset;
5333
5334 if (CODE64(s))
5335 goto illegal_op;
5336 ot = dflag ? OT_LONG : OT_WORD;
5337 offset = insn_get(s, ot);
5338 selector = insn_get(s, OT_WORD);
5339
5340 gen_op_movl_T0_im(selector);
5341 gen_op_movl_T1_imu(offset);
5342 }
5343 goto do_ljmp;
5344 case 0xeb: /* jmp Jb */
5345 tval = (int8_t)insn_get(s, OT_BYTE);
5346 tval += s->pc - s->cs_base;
5347 if (s->dflag == 0)
5348 tval &= 0xffff;
5349 gen_jmp(s, tval);
5350 break;
5351 case 0x70 ... 0x7f: /* jcc Jb */
5352 tval = (int8_t)insn_get(s, OT_BYTE);
5353 goto do_jcc;
5354 case 0x180 ... 0x18f: /* jcc Jv */
5355 if (dflag) {
5356 tval = (int32_t)insn_get(s, OT_LONG);
5357 } else {
5358 tval = (int16_t)insn_get(s, OT_WORD);
5359 }
5360 do_jcc:
5361 next_eip = s->pc - s->cs_base;
5362 tval += next_eip;
5363 if (s->dflag == 0)
5364 tval &= 0xffff;
5365 gen_jcc(s, b, tval, next_eip);
5366 break;
5367
5368 case 0x190 ... 0x19f: /* setcc Gv */
5369 modrm = ldub_code(s->pc++);
5370 gen_setcc(s, b);
5371 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5372 break;
5373 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5374 ot = dflag + OT_WORD;
5375 modrm = ldub_code(s->pc++);
5376 reg = ((modrm >> 3) & 7) | rex_r;
5377 mod = (modrm >> 6) & 3;
5378 gen_setcc(s, b);
5379 if (mod != 3) {
5380 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5381 gen_op_ld_T1_A0(ot + s->mem_index);
5382 } else {
5383 rm = (modrm & 7) | REX_B(s);
5384 gen_op_mov_TN_reg(ot, 1, rm);
5385 }
5386 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5387 break;
5388
5389 /************************/
5390 /* flags */
5391 case 0x9c: /* pushf */
5392 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5393 break;
5394 if (s->vm86 && s->iopl != 3) {
5395 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5396 } else {
5397 if (s->cc_op != CC_OP_DYNAMIC)
5398 gen_op_set_cc_op(s->cc_op);
5399 gen_op_movl_T0_eflags();
5400 gen_push_T0(s);
5401 }
5402 break;
5403 case 0x9d: /* popf */
5404 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5405 break;
5406 if (s->vm86 && s->iopl != 3) {
5407 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5408 } else {
5409 gen_pop_T0(s);
5410 if (s->cpl == 0) {
5411 if (s->dflag) {
5412 gen_op_movl_eflags_T0_cpl0();
5413 } else {
5414 gen_op_movw_eflags_T0_cpl0();
5415 }
5416 } else {
5417 if (s->cpl <= s->iopl) {
5418 if (s->dflag) {
5419 gen_op_movl_eflags_T0_io();
5420 } else {
5421 gen_op_movw_eflags_T0_io();
5422 }
5423 } else {
5424 if (s->dflag) {
5425 gen_op_movl_eflags_T0();
5426 } else {
5427 gen_op_movw_eflags_T0();
5428 }
5429 }
5430 }
5431 gen_pop_update(s);
5432 s->cc_op = CC_OP_EFLAGS;
5433 /* abort translation because TF flag may change */
5434 gen_jmp_im(s->pc - s->cs_base);
5435 gen_eob(s);
5436 }
5437 break;
5438 case 0x9e: /* sahf */
5439 if (CODE64(s))
5440 goto illegal_op;
5441 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5442 if (s->cc_op != CC_OP_DYNAMIC)
5443 gen_op_set_cc_op(s->cc_op);
5444 gen_op_movb_eflags_T0();
5445 s->cc_op = CC_OP_EFLAGS;
5446 break;
5447 case 0x9f: /* lahf */
5448 if (CODE64(s))
5449 goto illegal_op;
5450 if (s->cc_op != CC_OP_DYNAMIC)
5451 gen_op_set_cc_op(s->cc_op);
5452 gen_op_movl_T0_eflags();
5453 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5454 break;
5455 case 0xf5: /* cmc */
5456 if (s->cc_op != CC_OP_DYNAMIC)
5457 gen_op_set_cc_op(s->cc_op);
5458 gen_op_cmc();
5459 s->cc_op = CC_OP_EFLAGS;
5460 break;
5461 case 0xf8: /* clc */
5462 if (s->cc_op != CC_OP_DYNAMIC)
5463 gen_op_set_cc_op(s->cc_op);
5464 gen_op_clc();
5465 s->cc_op = CC_OP_EFLAGS;
5466 break;
5467 case 0xf9: /* stc */
5468 if (s->cc_op != CC_OP_DYNAMIC)
5469 gen_op_set_cc_op(s->cc_op);
5470 gen_op_stc();
5471 s->cc_op = CC_OP_EFLAGS;
5472 break;
5473 case 0xfc: /* cld */
5474 gen_op_cld();
5475 break;
5476 case 0xfd: /* std */
5477 gen_op_std();
5478 break;
5479
5480 /************************/
5481 /* bit operations */
5482 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5483 ot = dflag + OT_WORD;
5484 modrm = ldub_code(s->pc++);
5485 op = (modrm >> 3) & 7;
5486 mod = (modrm >> 6) & 3;
5487 rm = (modrm & 7) | REX_B(s);
5488 if (mod != 3) {
5489 s->rip_offset = 1;
5490 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5491 gen_op_ld_T0_A0(ot + s->mem_index);
5492 } else {
5493 gen_op_mov_TN_reg(ot, 0, rm);
5494 }
5495 /* load shift */
5496 val = ldub_code(s->pc++);
5497 gen_op_movl_T1_im(val);
5498 if (op < 4)
5499 goto illegal_op;
5500 op -= 4;
5501 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5502 s->cc_op = CC_OP_SARB + ot;
5503 if (op != 0) {
5504 if (mod != 3)
5505 gen_op_st_T0_A0(ot + s->mem_index);
5506 else
5507 gen_op_mov_reg_T0(ot, rm);
5508 gen_op_update_bt_cc();
5509 }
5510 break;
5511 case 0x1a3: /* bt Gv, Ev */
5512 op = 0;
5513 goto do_btx;
5514 case 0x1ab: /* bts */
5515 op = 1;
5516 goto do_btx;
5517 case 0x1b3: /* btr */
5518 op = 2;
5519 goto do_btx;
5520 case 0x1bb: /* btc */
5521 op = 3;
5522 do_btx:
5523 ot = dflag + OT_WORD;
5524 modrm = ldub_code(s->pc++);
5525 reg = ((modrm >> 3) & 7) | rex_r;
5526 mod = (modrm >> 6) & 3;
5527 rm = (modrm & 7) | REX_B(s);
5528 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5529 if (mod != 3) {
5530 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5531 /* specific case: we need to add a displacement */
5532 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5533 gen_op_ld_T0_A0(ot + s->mem_index);
5534 } else {
5535 gen_op_mov_TN_reg(ot, 0, rm);
5536 }
5537 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5538 s->cc_op = CC_OP_SARB + ot;
5539 if (op != 0) {
5540 if (mod != 3)
5541 gen_op_st_T0_A0(ot + s->mem_index);
5542 else
5543 gen_op_mov_reg_T0(ot, rm);
5544 gen_op_update_bt_cc();
5545 }
5546 break;
5547 case 0x1bc: /* bsf */
5548 case 0x1bd: /* bsr */
5549 ot = dflag + OT_WORD;
5550 modrm = ldub_code(s->pc++);
5551 reg = ((modrm >> 3) & 7) | rex_r;
5552 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5553 /* NOTE: in order to handle the 0 case, we must load the
5554 result. It could be optimized with a generated jump */
5555 gen_op_mov_TN_reg(ot, 1, reg);
5556 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5557 gen_op_mov_reg_T1(ot, reg);
5558 s->cc_op = CC_OP_LOGICB + ot;
5559 break;
5560 /************************/
5561 /* bcd */
5562 case 0x27: /* daa */
5563 if (CODE64(s))
5564 goto illegal_op;
5565 if (s->cc_op != CC_OP_DYNAMIC)
5566 gen_op_set_cc_op(s->cc_op);
5567 gen_op_daa();
5568 s->cc_op = CC_OP_EFLAGS;
5569 break;
5570 case 0x2f: /* das */
5571 if (CODE64(s))
5572 goto illegal_op;
5573 if (s->cc_op != CC_OP_DYNAMIC)
5574 gen_op_set_cc_op(s->cc_op);
5575 gen_op_das();
5576 s->cc_op = CC_OP_EFLAGS;
5577 break;
5578 case 0x37: /* aaa */
5579 if (CODE64(s))
5580 goto illegal_op;
5581 if (s->cc_op != CC_OP_DYNAMIC)
5582 gen_op_set_cc_op(s->cc_op);
5583 gen_op_aaa();
5584 s->cc_op = CC_OP_EFLAGS;
5585 break;
5586 case 0x3f: /* aas */
5587 if (CODE64(s))
5588 goto illegal_op;
5589 if (s->cc_op != CC_OP_DYNAMIC)
5590 gen_op_set_cc_op(s->cc_op);
5591 gen_op_aas();
5592 s->cc_op = CC_OP_EFLAGS;
5593 break;
5594 case 0xd4: /* aam */
5595 if (CODE64(s))
5596 goto illegal_op;
5597 val = ldub_code(s->pc++);
5598 if (val == 0) {
5599 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5600 } else {
5601 gen_op_aam(val);
5602 s->cc_op = CC_OP_LOGICB;
5603 }
5604 break;
5605 case 0xd5: /* aad */
5606 if (CODE64(s))
5607 goto illegal_op;
5608 val = ldub_code(s->pc++);
5609 gen_op_aad(val);
5610 s->cc_op = CC_OP_LOGICB;
5611 break;
5612 /************************/
5613 /* misc */
5614 case 0x90: /* nop */
5615 /* XXX: xchg + rex handling */
5616 /* XXX: correct lock test for all insn */
5617 if (prefixes & PREFIX_LOCK)
5618 goto illegal_op;
5619 if (prefixes & PREFIX_REPZ) {
5620 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5621 }
5622 break;
5623 case 0x9b: /* fwait */
5624 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5625 (HF_MP_MASK | HF_TS_MASK)) {
5626 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5627 } else {
5628 if (s->cc_op != CC_OP_DYNAMIC)
5629 gen_op_set_cc_op(s->cc_op);
5630 gen_jmp_im(pc_start - s->cs_base);
5631 tcg_gen_helper_0_0(helper_fwait);
5632 }
5633 break;
5634 case 0xcc: /* int3 */
5635 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5636 break;
5637 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5638 break;
5639 case 0xcd: /* int N */
5640 val = ldub_code(s->pc++);
5641 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5642 break;
5643 if (s->vm86 && s->iopl != 3) {
5644 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5645 } else {
5646 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5647 }
5648 break;
5649 case 0xce: /* into */
5650 if (CODE64(s))
5651 goto illegal_op;
5652 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5653 break;
5654 if (s->cc_op != CC_OP_DYNAMIC)
5655 gen_op_set_cc_op(s->cc_op);
5656 gen_jmp_im(pc_start - s->cs_base);
5657 gen_op_into(s->pc - pc_start);
5658 break;
5659 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5660 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5661 break;
5662 #if 1
5663 gen_debug(s, pc_start - s->cs_base);
5664 #else
5665 /* start debug */
5666 tb_flush(cpu_single_env);
5667 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5668 #endif
5669 break;
5670 case 0xfa: /* cli */
5671 if (!s->vm86) {
5672 if (s->cpl <= s->iopl) {
5673 gen_op_cli();
5674 } else {
5675 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5676 }
5677 } else {
5678 if (s->iopl == 3) {
5679 gen_op_cli();
5680 } else {
5681 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5682 }
5683 }
5684 break;
5685 case 0xfb: /* sti */
5686 if (!s->vm86) {
5687 if (s->cpl <= s->iopl) {
5688 gen_sti:
5689 gen_op_sti();
5690 /* interruptions are enabled only the first insn after sti */
5691 /* If several instructions disable interrupts, only the
5692 _first_ does it */
5693 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5694 gen_op_set_inhibit_irq();
5695 /* give a chance to handle pending irqs */
5696 gen_jmp_im(s->pc - s->cs_base);
5697 gen_eob(s);
5698 } else {
5699 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5700 }
5701 } else {
5702 if (s->iopl == 3) {
5703 goto gen_sti;
5704 } else {
5705 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5706 }
5707 }
5708 break;
5709 case 0x62: /* bound */
5710 if (CODE64(s))
5711 goto illegal_op;
5712 ot = dflag ? OT_LONG : OT_WORD;
5713 modrm = ldub_code(s->pc++);
5714 reg = (modrm >> 3) & 7;
5715 mod = (modrm >> 6) & 3;
5716 if (mod == 3)
5717 goto illegal_op;
5718 gen_op_mov_TN_reg(ot, 0, reg);
5719 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5720 gen_jmp_im(pc_start - s->cs_base);
5721 if (ot == OT_WORD)
5722 gen_op_boundw();
5723 else
5724 gen_op_boundl();
5725 break;
5726 case 0x1c8 ... 0x1cf: /* bswap reg */
5727 reg = (b & 7) | REX_B(s);
5728 #ifdef TARGET_X86_64
5729 if (dflag == 2) {
5730 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5731 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5732 gen_op_mov_reg_T0(OT_QUAD, reg);
5733 } else
5734 {
5735 TCGv tmp0;
5736 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5737
5738 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5739 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5740 tcg_gen_bswap_i32(tmp0, tmp0);
5741 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5742 gen_op_mov_reg_T0(OT_LONG, reg);
5743 }
5744 #else
5745 {
5746 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5747 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5748 gen_op_mov_reg_T0(OT_LONG, reg);
5749 }
5750 #endif
5751 break;
5752 case 0xd6: /* salc */
5753 if (CODE64(s))
5754 goto illegal_op;
5755 if (s->cc_op != CC_OP_DYNAMIC)
5756 gen_op_set_cc_op(s->cc_op);
5757 gen_op_salc();
5758 break;
5759 case 0xe0: /* loopnz */
5760 case 0xe1: /* loopz */
5761 if (s->cc_op != CC_OP_DYNAMIC)
5762 gen_op_set_cc_op(s->cc_op);
5763 /* FALL THRU */
5764 case 0xe2: /* loop */
5765 case 0xe3: /* jecxz */
5766 {
5767 int l1, l2;
5768
5769 tval = (int8_t)insn_get(s, OT_BYTE);
5770 next_eip = s->pc - s->cs_base;
5771 tval += next_eip;
5772 if (s->dflag == 0)
5773 tval &= 0xffff;
5774
5775 l1 = gen_new_label();
5776 l2 = gen_new_label();
5777 b &= 3;
5778 if (b == 3) {
5779 gen_op_jz_ecx[s->aflag](l1);
5780 } else {
5781 gen_op_dec_ECX[s->aflag]();
5782 if (b <= 1)
5783 gen_op_mov_T0_cc();
5784 gen_op_loop[s->aflag][b](l1);
5785 }
5786
5787 gen_jmp_im(next_eip);
5788 gen_op_jmp_label(l2);
5789 gen_set_label(l1);
5790 gen_jmp_im(tval);
5791 gen_set_label(l2);
5792 gen_eob(s);
5793 }
5794 break;
5795 case 0x130: /* wrmsr */
5796 case 0x132: /* rdmsr */
5797 if (s->cpl != 0) {
5798 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5799 } else {
5800 int retval = 0;
5801 if (b & 2) {
5802 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5803 gen_op_rdmsr();
5804 } else {
5805 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5806 gen_op_wrmsr();
5807 }
5808 if(retval)
5809 gen_eob(s);
5810 }
5811 break;
5812 case 0x131: /* rdtsc */
5813 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5814 break;
5815 gen_jmp_im(pc_start - s->cs_base);
5816 gen_op_rdtsc();
5817 break;
5818 case 0x133: /* rdpmc */
5819 gen_jmp_im(pc_start - s->cs_base);
5820 gen_op_rdpmc();
5821 break;
5822 case 0x134: /* sysenter */
5823 if (CODE64(s))
5824 goto illegal_op;
5825 if (!s->pe) {
5826 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5827 } else {
5828 if (s->cc_op != CC_OP_DYNAMIC) {
5829 gen_op_set_cc_op(s->cc_op);
5830 s->cc_op = CC_OP_DYNAMIC;
5831 }
5832 gen_jmp_im(pc_start - s->cs_base);
5833 gen_op_sysenter();
5834 gen_eob(s);
5835 }
5836 break;
5837 case 0x135: /* sysexit */
5838 if (CODE64(s))
5839 goto illegal_op;
5840 if (!s->pe) {
5841 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5842 } else {
5843 if (s->cc_op != CC_OP_DYNAMIC) {
5844 gen_op_set_cc_op(s->cc_op);
5845 s->cc_op = CC_OP_DYNAMIC;
5846 }
5847 gen_jmp_im(pc_start - s->cs_base);
5848 gen_op_sysexit();
5849 gen_eob(s);
5850 }
5851 break;
5852 #ifdef TARGET_X86_64
5853 case 0x105: /* syscall */
5854 /* XXX: is it usable in real mode ? */
5855 if (s->cc_op != CC_OP_DYNAMIC) {
5856 gen_op_set_cc_op(s->cc_op);
5857 s->cc_op = CC_OP_DYNAMIC;
5858 }
5859 gen_jmp_im(pc_start - s->cs_base);
5860 gen_op_syscall(s->pc - pc_start);
5861 gen_eob(s);
5862 break;
5863 case 0x107: /* sysret */
5864 if (!s->pe) {
5865 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5866 } else {
5867 if (s->cc_op != CC_OP_DYNAMIC) {
5868 gen_op_set_cc_op(s->cc_op);
5869 s->cc_op = CC_OP_DYNAMIC;
5870 }
5871 gen_jmp_im(pc_start - s->cs_base);
5872 gen_op_sysret(s->dflag);
5873 /* condition codes are modified only in long mode */
5874 if (s->lma)
5875 s->cc_op = CC_OP_EFLAGS;
5876 gen_eob(s);
5877 }
5878 break;
5879 #endif
5880 case 0x1a2: /* cpuid */
5881 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5882 break;
5883 gen_op_cpuid();
5884 break;
5885 case 0xf4: /* hlt */
5886 if (s->cpl != 0) {
5887 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5888 } else {
5889 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5890 break;
5891 if (s->cc_op != CC_OP_DYNAMIC)
5892 gen_op_set_cc_op(s->cc_op);
5893 gen_jmp_im(s->pc - s->cs_base);
5894 gen_op_hlt();
5895 s->is_jmp = 3;
5896 }
5897 break;
5898 case 0x100:
5899 modrm = ldub_code(s->pc++);
5900 mod = (modrm >> 6) & 3;
5901 op = (modrm >> 3) & 7;
5902 switch(op) {
5903 case 0: /* sldt */
5904 if (!s->pe || s->vm86)
5905 goto illegal_op;
5906 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5907 break;
5908 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5909 ot = OT_WORD;
5910 if (mod == 3)
5911 ot += s->dflag;
5912 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5913 break;
5914 case 2: /* lldt */
5915 if (!s->pe || s->vm86)
5916 goto illegal_op;
5917 if (s->cpl != 0) {
5918 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5919 } else {
5920 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5921 break;
5922 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5923 gen_jmp_im(pc_start - s->cs_base);
5924 gen_op_lldt_T0();
5925 }
5926 break;
5927 case 1: /* str */
5928 if (!s->pe || s->vm86)
5929 goto illegal_op;
5930 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5931 break;
5932 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5933 ot = OT_WORD;
5934 if (mod == 3)
5935 ot += s->dflag;
5936 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5937 break;
5938 case 3: /* ltr */
5939 if (!s->pe || s->vm86)
5940 goto illegal_op;
5941 if (s->cpl != 0) {
5942 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5943 } else {
5944 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5945 break;
5946 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5947 gen_jmp_im(pc_start - s->cs_base);
5948 gen_op_ltr_T0();
5949 }
5950 break;
5951 case 4: /* verr */
5952 case 5: /* verw */
5953 if (!s->pe || s->vm86)
5954 goto illegal_op;
5955 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5956 if (s->cc_op != CC_OP_DYNAMIC)
5957 gen_op_set_cc_op(s->cc_op);
5958 if (op == 4)
5959 gen_op_verr();
5960 else
5961 gen_op_verw();
5962 s->cc_op = CC_OP_EFLAGS;
5963 break;
5964 default:
5965 goto illegal_op;
5966 }
5967 break;
5968 case 0x101:
5969 modrm = ldub_code(s->pc++);
5970 mod = (modrm >> 6) & 3;
5971 op = (modrm >> 3) & 7;
5972 rm = modrm & 7;
5973 switch(op) {
5974 case 0: /* sgdt */
5975 if (mod == 3)
5976 goto illegal_op;
5977 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5978 break;
5979 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5980 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5981 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5982 gen_add_A0_im(s, 2);
5983 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5984 if (!s->dflag)
5985 gen_op_andl_T0_im(0xffffff);
5986 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5987 break;
5988 case 1:
5989 if (mod == 3) {
5990 switch (rm) {
5991 case 0: /* monitor */
5992 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5993 s->cpl != 0)
5994 goto illegal_op;
5995 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5996 break;
5997 gen_jmp_im(pc_start - s->cs_base);
5998 #ifdef TARGET_X86_64
5999 if (s->aflag == 2) {
6000 gen_op_movq_A0_reg(R_EBX);
6001 gen_op_addq_A0_AL();
6002 } else
6003 #endif
6004 {
6005 gen_op_movl_A0_reg(R_EBX);
6006 gen_op_addl_A0_AL();
6007 if (s->aflag == 0)
6008 gen_op_andl_A0_ffff();
6009 }
6010 gen_add_A0_ds_seg(s);
6011 gen_op_monitor();
6012 break;
6013 case 1: /* mwait */
6014 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6015 s->cpl != 0)
6016 goto illegal_op;
6017 if (s->cc_op != CC_OP_DYNAMIC) {
6018 gen_op_set_cc_op(s->cc_op);
6019 s->cc_op = CC_OP_DYNAMIC;
6020 }
6021 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6022 break;
6023 gen_jmp_im(s->pc - s->cs_base);
6024 gen_op_mwait();
6025 gen_eob(s);
6026 break;
6027 default:
6028 goto illegal_op;
6029 }
6030 } else { /* sidt */
6031 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6032 break;
6033 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6034 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6035 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6036 gen_add_A0_im(s, 2);
6037 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6038 if (!s->dflag)
6039 gen_op_andl_T0_im(0xffffff);
6040 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6041 }
6042 break;
6043 case 2: /* lgdt */
6044 case 3: /* lidt */
6045 if (mod == 3) {
6046 switch(rm) {
6047 case 0: /* VMRUN */
6048 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6049 break;
6050 if (s->cc_op != CC_OP_DYNAMIC)
6051 gen_op_set_cc_op(s->cc_op);
6052 gen_jmp_im(s->pc - s->cs_base);
6053 gen_op_vmrun();
6054 s->cc_op = CC_OP_EFLAGS;
6055 gen_eob(s);
6056 break;
6057 case 1: /* VMMCALL */
6058 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6059 break;
6060 /* FIXME: cause #UD if hflags & SVM */
6061 gen_op_vmmcall();
6062 break;
6063 case 2: /* VMLOAD */
6064 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6065 break;
6066 gen_op_vmload();
6067 break;
6068 case 3: /* VMSAVE */
6069 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6070 break;
6071 gen_op_vmsave();
6072 break;
6073 case 4: /* STGI */
6074 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6075 break;
6076 gen_op_stgi();
6077 break;
6078 case 5: /* CLGI */
6079 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6080 break;
6081 gen_op_clgi();
6082 break;
6083 case 6: /* SKINIT */
6084 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6085 break;
6086 gen_op_skinit();
6087 break;
6088 case 7: /* INVLPGA */
6089 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6090 break;
6091 gen_op_invlpga();
6092 break;
6093 default:
6094 goto illegal_op;
6095 }
6096 } else if (s->cpl != 0) {
6097 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6098 } else {
6099 if (gen_svm_check_intercept(s, pc_start,
6100 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6101 break;
6102 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6103 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6104 gen_add_A0_im(s, 2);
6105 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6106 if (!s->dflag)
6107 gen_op_andl_T0_im(0xffffff);
6108 if (op == 2) {
6109 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6110 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6111 } else {
6112 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6113 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6114 }
6115 }
6116 break;
6117 case 4: /* smsw */
6118 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6119 break;
6120 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6121 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6122 break;
6123 case 6: /* lmsw */
6124 if (s->cpl != 0) {
6125 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6126 } else {
6127 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6128 break;
6129 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6130 gen_op_lmsw_T0();
6131 gen_jmp_im(s->pc - s->cs_base);
6132 gen_eob(s);
6133 }
6134 break;
6135 case 7: /* invlpg */
6136 if (s->cpl != 0) {
6137 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6138 } else {
6139 if (mod == 3) {
6140 #ifdef TARGET_X86_64
6141 if (CODE64(s) && rm == 0) {
6142 /* swapgs */
6143 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6144 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6145 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6146 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6147 } else
6148 #endif
6149 {
6150 goto illegal_op;
6151 }
6152 } else {
6153 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6154 break;
6155 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6156 gen_op_invlpg_A0();
6157 gen_jmp_im(s->pc - s->cs_base);
6158 gen_eob(s);
6159 }
6160 }
6161 break;
6162 default:
6163 goto illegal_op;
6164 }
6165 break;
6166 case 0x108: /* invd */
6167 case 0x109: /* wbinvd */
6168 if (s->cpl != 0) {
6169 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6170 } else {
6171 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6172 break;
6173 /* nothing to do */
6174 }
6175 break;
6176 case 0x63: /* arpl or movslS (x86_64) */
6177 #ifdef TARGET_X86_64
6178 if (CODE64(s)) {
6179 int d_ot;
6180 /* d_ot is the size of destination */
6181 d_ot = dflag + OT_WORD;
6182
6183 modrm = ldub_code(s->pc++);
6184 reg = ((modrm >> 3) & 7) | rex_r;
6185 mod = (modrm >> 6) & 3;
6186 rm = (modrm & 7) | REX_B(s);
6187
6188 if (mod == 3) {
6189 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6190 /* sign extend */
6191 if (d_ot == OT_QUAD)
6192 gen_op_movslq_T0_T0();
6193 gen_op_mov_reg_T0(d_ot, reg);
6194 } else {
6195 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6196 if (d_ot == OT_QUAD) {
6197 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6198 } else {
6199 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6200 }
6201 gen_op_mov_reg_T0(d_ot, reg);
6202 }
6203 } else
6204 #endif
6205 {
6206 if (!s->pe || s->vm86)
6207 goto illegal_op;
6208 ot = dflag ? OT_LONG : OT_WORD;
6209 modrm = ldub_code(s->pc++);
6210 reg = (modrm >> 3) & 7;
6211 mod = (modrm >> 6) & 3;
6212 rm = modrm & 7;
6213 if (mod != 3) {
6214 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6215 gen_op_ld_T0_A0(ot + s->mem_index);
6216 } else {
6217 gen_op_mov_TN_reg(ot, 0, rm);
6218 }
6219 if (s->cc_op != CC_OP_DYNAMIC)
6220 gen_op_set_cc_op(s->cc_op);
6221 gen_op_arpl();
6222 s->cc_op = CC_OP_EFLAGS;
6223 if (mod != 3) {
6224 gen_op_st_T0_A0(ot + s->mem_index);
6225 } else {
6226 gen_op_mov_reg_T0(ot, rm);
6227 }
6228 gen_op_arpl_update();
6229 }
6230 break;
6231 case 0x102: /* lar */
6232 case 0x103: /* lsl */
6233 if (!s->pe || s->vm86)
6234 goto illegal_op;
6235 ot = dflag ? OT_LONG : OT_WORD;
6236 modrm = ldub_code(s->pc++);
6237 reg = ((modrm >> 3) & 7) | rex_r;
6238 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6239 gen_op_mov_TN_reg(ot, 1, reg);
6240 if (s->cc_op != CC_OP_DYNAMIC)
6241 gen_op_set_cc_op(s->cc_op);
6242 if (b == 0x102)
6243 gen_op_lar();
6244 else
6245 gen_op_lsl();
6246 s->cc_op = CC_OP_EFLAGS;
6247 gen_op_mov_reg_T1(ot, reg);
6248 break;
6249 case 0x118:
6250 modrm = ldub_code(s->pc++);
6251 mod = (modrm >> 6) & 3;
6252 op = (modrm >> 3) & 7;
6253 switch(op) {
6254 case 0: /* prefetchnta */
6255 case 1: /* prefetchnt0 */
6256 case 2: /* prefetchnt0 */
6257 case 3: /* prefetchnt0 */
6258 if (mod == 3)
6259 goto illegal_op;
6260 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6261 /* nothing more to do */
6262 break;
6263 default: /* nop (multi byte) */
6264 gen_nop_modrm(s, modrm);
6265 break;
6266 }
6267 break;
6268 case 0x119 ... 0x11f: /* nop (multi byte) */
6269 modrm = ldub_code(s->pc++);
6270 gen_nop_modrm(s, modrm);
6271 break;
6272 case 0x120: /* mov reg, crN */
6273 case 0x122: /* mov crN, reg */
6274 if (s->cpl != 0) {
6275 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6276 } else {
6277 modrm = ldub_code(s->pc++);
6278 if ((modrm & 0xc0) != 0xc0)
6279 goto illegal_op;
6280 rm = (modrm & 7) | REX_B(s);
6281 reg = ((modrm >> 3) & 7) | rex_r;
6282 if (CODE64(s))
6283 ot = OT_QUAD;
6284 else
6285 ot = OT_LONG;
6286 switch(reg) {
6287 case 0:
6288 case 2:
6289 case 3:
6290 case 4:
6291 case 8:
6292 if (b & 2) {
6293 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6294 gen_op_mov_TN_reg(ot, 0, rm);
6295 gen_op_movl_crN_T0(reg);
6296 gen_jmp_im(s->pc - s->cs_base);
6297 gen_eob(s);
6298 } else {
6299 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6300 #if !defined(CONFIG_USER_ONLY)
6301 if (reg == 8)
6302 gen_op_movtl_T0_cr8();
6303 else
6304 #endif
6305 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6306 gen_op_mov_reg_T0(ot, rm);
6307 }
6308 break;
6309 default:
6310 goto illegal_op;
6311 }
6312 }
6313 break;
6314 case 0x121: /* mov reg, drN */
6315 case 0x123: /* mov drN, reg */
6316 if (s->cpl != 0) {
6317 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6318 } else {
6319 modrm = ldub_code(s->pc++);
6320 if ((modrm & 0xc0) != 0xc0)
6321 goto illegal_op;
6322 rm = (modrm & 7) | REX_B(s);
6323 reg = ((modrm >> 3) & 7) | rex_r;
6324 if (CODE64(s))
6325 ot = OT_QUAD;
6326 else
6327 ot = OT_LONG;
6328 /* XXX: do it dynamically with CR4.DE bit */
6329 if (reg == 4 || reg == 5 || reg >= 8)
6330 goto illegal_op;
6331 if (b & 2) {
6332 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6333 gen_op_mov_TN_reg(ot, 0, rm);
6334 gen_op_movl_drN_T0(reg);
6335 gen_jmp_im(s->pc - s->cs_base);
6336 gen_eob(s);
6337 } else {
6338 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6339 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6340 gen_op_mov_reg_T0(ot, rm);
6341 }
6342 }
6343 break;
6344 case 0x106: /* clts */
6345 if (s->cpl != 0) {
6346 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6347 } else {
6348 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6349 gen_op_clts();
6350 /* abort block because static cpu state changed */
6351 gen_jmp_im(s->pc - s->cs_base);
6352 gen_eob(s);
6353 }
6354 break;
6355 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6356 case 0x1c3: /* MOVNTI reg, mem */
6357 if (!(s->cpuid_features & CPUID_SSE2))
6358 goto illegal_op;
6359 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6360 modrm = ldub_code(s->pc++);
6361 mod = (modrm >> 6) & 3;
6362 if (mod == 3)
6363 goto illegal_op;
6364 reg = ((modrm >> 3) & 7) | rex_r;
6365 /* generate a generic store */
6366 gen_ldst_modrm(s, modrm, ot, reg, 1);
6367 break;
6368 case 0x1ae:
6369 modrm = ldub_code(s->pc++);
6370 mod = (modrm >> 6) & 3;
6371 op = (modrm >> 3) & 7;
6372 switch(op) {
6373 case 0: /* fxsave */
6374 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6375 (s->flags & HF_EM_MASK))
6376 goto illegal_op;
6377 if (s->flags & HF_TS_MASK) {
6378 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6379 break;
6380 }
6381 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6382 if (s->cc_op != CC_OP_DYNAMIC)
6383 gen_op_set_cc_op(s->cc_op);
6384 gen_jmp_im(pc_start - s->cs_base);
6385 tcg_gen_helper_0_2(helper_fxsave,
6386 cpu_A0, tcg_const_i32((s->dflag == 2)));
6387 break;
6388 case 1: /* fxrstor */
6389 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6390 (s->flags & HF_EM_MASK))
6391 goto illegal_op;
6392 if (s->flags & HF_TS_MASK) {
6393 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6394 break;
6395 }
6396 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6397 if (s->cc_op != CC_OP_DYNAMIC)
6398 gen_op_set_cc_op(s->cc_op);
6399 gen_jmp_im(pc_start - s->cs_base);
6400 tcg_gen_helper_0_2(helper_fxrstor,
6401 cpu_A0, tcg_const_i32((s->dflag == 2)));
6402 break;
6403 case 2: /* ldmxcsr */
6404 case 3: /* stmxcsr */
6405 if (s->flags & HF_TS_MASK) {
6406 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6407 break;
6408 }
6409 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6410 mod == 3)
6411 goto illegal_op;
6412 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6413 if (op == 2) {
6414 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6415 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6416 } else {
6417 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6418 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6419 }
6420 break;
6421 case 5: /* lfence */
6422 case 6: /* mfence */
6423 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6424 goto illegal_op;
6425 break;
6426 case 7: /* sfence / clflush */
6427 if ((modrm & 0xc7) == 0xc0) {
6428 /* sfence */
6429 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6430 if (!(s->cpuid_features & CPUID_SSE))
6431 goto illegal_op;
6432 } else {
6433 /* clflush */
6434 if (!(s->cpuid_features & CPUID_CLFLUSH))
6435 goto illegal_op;
6436 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6437 }
6438 break;
6439 default:
6440 goto illegal_op;
6441 }
6442 break;
6443 case 0x10d: /* 3DNow! prefetch(w) */
6444 modrm = ldub_code(s->pc++);
6445 mod = (modrm >> 6) & 3;
6446 if (mod == 3)
6447 goto illegal_op;
6448 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6449 /* ignore for now */
6450 break;
6451 case 0x1aa: /* rsm */
6452 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6453 break;
6454 if (!(s->flags & HF_SMM_MASK))
6455 goto illegal_op;
6456 if (s->cc_op != CC_OP_DYNAMIC) {
6457 gen_op_set_cc_op(s->cc_op);
6458 s->cc_op = CC_OP_DYNAMIC;
6459 }
6460 gen_jmp_im(s->pc - s->cs_base);
6461 gen_op_rsm();
6462 gen_eob(s);
6463 break;
6464 case 0x10e ... 0x10f:
6465 /* 3DNow! instructions, ignore prefixes */
6466 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6467 case 0x110 ... 0x117:
6468 case 0x128 ... 0x12f:
6469 case 0x150 ... 0x177:
6470 case 0x17c ... 0x17f:
6471 case 0x1c2:
6472 case 0x1c4 ... 0x1c6:
6473 case 0x1d0 ... 0x1fe:
6474 gen_sse(s, b, pc_start, rex_r);
6475 break;
6476 default:
6477 goto illegal_op;
6478 }
6479 /* lock generation */
6480 if (s->prefix & PREFIX_LOCK)
6481 gen_op_unlock();
6482 return s->pc;
6483 illegal_op:
6484 if (s->prefix & PREFIX_LOCK)
6485 gen_op_unlock();
6486 /* XXX: ensure that no lock was generated */
6487 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6488 return s->pc;
6489 }
6490
6491 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6492 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6493
6494 /* flags read by an operation */
6495 static uint16_t opc_read_flags[NB_OPS] = {
6496 [INDEX_op_aas] = CC_A,
6497 [INDEX_op_aaa] = CC_A,
6498 [INDEX_op_das] = CC_A | CC_C,
6499 [INDEX_op_daa] = CC_A | CC_C,
6500
6501 /* subtle: due to the incl/decl implementation, C is used */
6502 [INDEX_op_update_inc_cc] = CC_C,
6503
6504 [INDEX_op_into] = CC_O,
6505
6506 [INDEX_op_jb_subb] = CC_C,
6507 [INDEX_op_jb_subw] = CC_C,
6508 [INDEX_op_jb_subl] = CC_C,
6509
6510 [INDEX_op_jz_subb] = CC_Z,
6511 [INDEX_op_jz_subw] = CC_Z,
6512 [INDEX_op_jz_subl] = CC_Z,
6513
6514 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6515 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6516 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6517
6518 [INDEX_op_js_subb] = CC_S,
6519 [INDEX_op_js_subw] = CC_S,
6520 [INDEX_op_js_subl] = CC_S,
6521
6522 [INDEX_op_jl_subb] = CC_O | CC_S,
6523 [INDEX_op_jl_subw] = CC_O | CC_S,
6524 [INDEX_op_jl_subl] = CC_O | CC_S,
6525
6526 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6527 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6528 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6529
6530 [INDEX_op_loopnzw] = CC_Z,
6531 [INDEX_op_loopnzl] = CC_Z,
6532 [INDEX_op_loopzw] = CC_Z,
6533 [INDEX_op_loopzl] = CC_Z,
6534
6535 [INDEX_op_seto_T0_cc] = CC_O,
6536 [INDEX_op_setb_T0_cc] = CC_C,
6537 [INDEX_op_setz_T0_cc] = CC_Z,
6538 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6539 [INDEX_op_sets_T0_cc] = CC_S,
6540 [INDEX_op_setp_T0_cc] = CC_P,
6541 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6542 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6543
6544 [INDEX_op_setb_T0_subb] = CC_C,
6545 [INDEX_op_setb_T0_subw] = CC_C,
6546 [INDEX_op_setb_T0_subl] = CC_C,
6547
6548 [INDEX_op_setz_T0_subb] = CC_Z,
6549 [INDEX_op_setz_T0_subw] = CC_Z,
6550 [INDEX_op_setz_T0_subl] = CC_Z,
6551
6552 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6553 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6554 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6555
6556 [INDEX_op_sets_T0_subb] = CC_S,
6557 [INDEX_op_sets_T0_subw] = CC_S,
6558 [INDEX_op_sets_T0_subl] = CC_S,
6559
6560 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6561 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6562 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6563
6564 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6565 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6566 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6567
6568 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6569 [INDEX_op_cmc] = CC_C,
6570 [INDEX_op_salc] = CC_C,
6571
6572 /* needed for correct flag optimisation before string ops */
6573 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6574 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6575 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6576 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6577
6578 #ifdef TARGET_X86_64
6579 [INDEX_op_jb_subq] = CC_C,
6580 [INDEX_op_jz_subq] = CC_Z,
6581 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6582 [INDEX_op_js_subq] = CC_S,
6583 [INDEX_op_jl_subq] = CC_O | CC_S,
6584 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6585
6586 [INDEX_op_loopnzq] = CC_Z,
6587 [INDEX_op_loopzq] = CC_Z,
6588
6589 [INDEX_op_setb_T0_subq] = CC_C,
6590 [INDEX_op_setz_T0_subq] = CC_Z,
6591 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6592 [INDEX_op_sets_T0_subq] = CC_S,
6593 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6594 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6595
6596 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6597 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6598 #endif
6599
6600 #define DEF_READF(SUFFIX)\
6601 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6602 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6603 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6604 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6605 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6606 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6607 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6608 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6609 \
6610 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6611 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6612 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6613 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6614 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6615 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6616 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6617 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6618
6619 DEF_READF( )
6620 DEF_READF(_raw)
6621 #ifndef CONFIG_USER_ONLY
6622 DEF_READF(_kernel)
6623 DEF_READF(_user)
6624 #endif
6625 };
6626
6627 /* flags written by an operation */
6628 static uint16_t opc_write_flags[NB_OPS] = {
6629 [INDEX_op_update2_cc] = CC_OSZAPC,
6630 [INDEX_op_update1_cc] = CC_OSZAPC,
6631 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6632 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6633 /* subtle: due to the incl/decl implementation, C is used */
6634 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6635 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6636
6637 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6638 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6639 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6640 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6641 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6642 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6643 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6644 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6645 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6646 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6647 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6648
6649 /* sse */
6650 [INDEX_op_com_dummy] = CC_OSZAPC,
6651 [INDEX_op_com_dummy] = CC_OSZAPC,
6652 [INDEX_op_com_dummy] = CC_OSZAPC,
6653 [INDEX_op_com_dummy] = CC_OSZAPC,
6654
6655 /* bcd */
6656 [INDEX_op_aam] = CC_OSZAPC,
6657 [INDEX_op_aad] = CC_OSZAPC,
6658 [INDEX_op_aas] = CC_OSZAPC,
6659 [INDEX_op_aaa] = CC_OSZAPC,
6660 [INDEX_op_das] = CC_OSZAPC,
6661 [INDEX_op_daa] = CC_OSZAPC,
6662
6663 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6664 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6665 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6666 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6667 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6668 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6669 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6670 [INDEX_op_clc] = CC_C,
6671 [INDEX_op_stc] = CC_C,
6672 [INDEX_op_cmc] = CC_C,
6673
6674 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6675 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6676 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6677 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6678 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6679 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6680 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6681 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6682 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6683 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6684 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6685 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6686
6687 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6688 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6689 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6690 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6691 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6692 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6693
6694 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6695 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6696 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6697 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6698
6699 [INDEX_op_cmpxchg8b] = CC_Z,
6700 [INDEX_op_lar] = CC_Z,
6701 [INDEX_op_lsl] = CC_Z,
6702 [INDEX_op_verr] = CC_Z,
6703 [INDEX_op_verw] = CC_Z,
6704 [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6705 [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6706
6707 #define DEF_WRITEF(SUFFIX)\
6708 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6709 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6710 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6711 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6712 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6713 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6714 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6715 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6716 \
6717 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6718 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6719 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6720 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6721 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6722 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6723 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6724 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6725 \
6726 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6727 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6728 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6729 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6730 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6731 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6732 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6733 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6734 \
6735 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6736 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6737 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6738 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6739 \
6740 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6741 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6742 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6743 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6744 \
6745 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6746 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6747 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6748 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6749 \
6750 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6751 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6752 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6753 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6754 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6755 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6756 \
6757 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6758 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6759 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6760 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6761 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6762 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6763 \
6764 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6765 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6766 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6767 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6768
6769
6770 DEF_WRITEF( )
6771 DEF_WRITEF(_raw)
6772 #ifndef CONFIG_USER_ONLY
6773 DEF_WRITEF(_kernel)
6774 DEF_WRITEF(_user)
6775 #endif
6776 };
6777
6778 /* simpler form of an operation if no flags need to be generated */
6779 static uint16_t opc_simpler[NB_OPS] = {
6780 [INDEX_op_update2_cc] = INDEX_op_nop,
6781 [INDEX_op_update1_cc] = INDEX_op_nop,
6782 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6783 #if 0
6784 /* broken: CC_OP logic must be rewritten */
6785 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6786 #endif
6787
6788 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6789 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6790 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6791 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6792
6793 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6794 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6795 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6796 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6797
6798 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6799 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6800 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6801 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6802
6803 #define DEF_SIMPLER(SUFFIX)\
6804 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6805 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6806 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6807 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6808 \
6809 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6810 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6811 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6812 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6813
6814 DEF_SIMPLER( )
6815 DEF_SIMPLER(_raw)
6816 #ifndef CONFIG_USER_ONLY
6817 DEF_SIMPLER(_kernel)
6818 DEF_SIMPLER(_user)
6819 #endif
6820 };
6821
6822 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6823 {
6824 switch(macro_id) {
6825 #ifdef MACRO_TEST
6826 case MACRO_TEST:
6827 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6828 break;
6829 #endif
6830 }
6831 }
6832
6833 void optimize_flags_init(void)
6834 {
6835 int i;
6836 /* put default values in arrays */
6837 for(i = 0; i < NB_OPS; i++) {
6838 if (opc_simpler[i] == 0)
6839 opc_simpler[i] = i;
6840 }
6841
6842 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6843
6844 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6845 #if TARGET_LONG_BITS > HOST_LONG_BITS
6846 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6847 TCG_AREG0, offsetof(CPUState, t0), "T0");
6848 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6849 TCG_AREG0, offsetof(CPUState, t1), "T1");
6850 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6851 TCG_AREG0, offsetof(CPUState, t2), "A0");
6852 #else
6853 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6854 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6855 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6856 cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6857 #endif
6858 /* the helpers are only registered to print debug info */
6859 TCG_HELPER(helper_divl_EAX_T0);
6860 TCG_HELPER(helper_idivl_EAX_T0);
6861 }
6862
6863 /* CPU flags computation optimization: we move backward thru the
6864 generated code to see which flags are needed. The operation is
6865 modified if suitable */
6866 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6867 {
6868 uint16_t *opc_ptr;
6869 int live_flags, write_flags, op;
6870
6871 opc_ptr = opc_buf + opc_buf_len;
6872 /* live_flags contains the flags needed by the next instructions
6873 in the code. At the end of the block, we consider that all the
6874 flags are live. */
6875 live_flags = CC_OSZAPC;
6876 while (opc_ptr > opc_buf) {
6877 op = *--opc_ptr;
6878 /* if none of the flags written by the instruction is used,
6879 then we can try to find a simpler instruction */
6880 write_flags = opc_write_flags[op];
6881 if ((live_flags & write_flags) == 0) {
6882 *opc_ptr = opc_simpler[op];
6883 }
6884 /* compute the live flags before the instruction */
6885 live_flags &= ~write_flags;
6886 live_flags |= opc_read_flags[op];
6887 }
6888 }
6889
6890 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6891 basic block 'tb'. If search_pc is TRUE, also generate PC
6892 information for each intermediate instruction. */
6893 static inline int gen_intermediate_code_internal(CPUState *env,
6894 TranslationBlock *tb,
6895 int search_pc)
6896 {
6897 DisasContext dc1, *dc = &dc1;
6898 target_ulong pc_ptr;
6899 uint16_t *gen_opc_end;
6900 int j, lj, cflags;
6901 uint64_t flags;
6902 target_ulong pc_start;
6903 target_ulong cs_base;
6904
6905 /* generate intermediate code */
6906 pc_start = tb->pc;
6907 cs_base = tb->cs_base;
6908 flags = tb->flags;
6909 cflags = tb->cflags;
6910
6911 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6912 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6913 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6914 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6915 dc->f_st = 0;
6916 dc->vm86 = (flags >> VM_SHIFT) & 1;
6917 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6918 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6919 dc->tf = (flags >> TF_SHIFT) & 1;
6920 dc->singlestep_enabled = env->singlestep_enabled;
6921 dc->cc_op = CC_OP_DYNAMIC;
6922 dc->cs_base = cs_base;
6923 dc->tb = tb;
6924 dc->popl_esp_hack = 0;
6925 /* select memory access functions */
6926 dc->mem_index = 0;
6927 if (flags & HF_SOFTMMU_MASK) {
6928 if (dc->cpl == 3)
6929 dc->mem_index = 2 * 4;
6930 else
6931 dc->mem_index = 1 * 4;
6932 }
6933 dc->cpuid_features = env->cpuid_features;
6934 dc->cpuid_ext_features = env->cpuid_ext_features;
6935 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6936 #ifdef TARGET_X86_64
6937 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6938 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6939 #endif
6940 dc->flags = flags;
6941 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6942 (flags & HF_INHIBIT_IRQ_MASK)
6943 #ifndef CONFIG_SOFTMMU
6944 || (flags & HF_SOFTMMU_MASK)
6945 #endif
6946 );
6947 #if 0
6948 /* check addseg logic */
6949 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6950 printf("ERROR addseg\n");
6951 #endif
6952
6953 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6954 #if TARGET_LONG_BITS > HOST_LONG_BITS
6955 cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6956 #endif
6957 cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6958 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6959 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6960
6961 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6962
6963 dc->is_jmp = DISAS_NEXT;
6964 pc_ptr = pc_start;
6965 lj = -1;
6966
6967 for(;;) {
6968 if (env->nb_breakpoints > 0) {
6969 for(j = 0; j < env->nb_breakpoints; j++) {
6970 if (env->breakpoints[j] == pc_ptr) {
6971 gen_debug(dc, pc_ptr - dc->cs_base);
6972 break;
6973 }
6974 }
6975 }
6976 if (search_pc) {
6977 j = gen_opc_ptr - gen_opc_buf;
6978 if (lj < j) {
6979 lj++;
6980 while (lj < j)
6981 gen_opc_instr_start[lj++] = 0;
6982 }
6983 gen_opc_pc[lj] = pc_ptr;
6984 gen_opc_cc_op[lj] = dc->cc_op;
6985 gen_opc_instr_start[lj] = 1;
6986 }
6987 pc_ptr = disas_insn(dc, pc_ptr);
6988 /* stop translation if indicated */
6989 if (dc->is_jmp)
6990 break;
6991 /* if single step mode, we generate only one instruction and
6992 generate an exception */
6993 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6994 the flag and abort the translation to give the irqs a
6995 change to be happen */
6996 if (dc->tf || dc->singlestep_enabled ||
6997 (flags & HF_INHIBIT_IRQ_MASK) ||
6998 (cflags & CF_SINGLE_INSN)) {
6999 gen_jmp_im(pc_ptr - dc->cs_base);
7000 gen_eob(dc);
7001 break;
7002 }
7003 /* if too long translation, stop generation too */
7004 if (gen_opc_ptr >= gen_opc_end ||
7005 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7006 gen_jmp_im(pc_ptr - dc->cs_base);
7007 gen_eob(dc);
7008 break;
7009 }
7010 }
7011 *gen_opc_ptr = INDEX_op_end;
7012 /* we don't forget to fill the last values */
7013 if (search_pc) {
7014 j = gen_opc_ptr - gen_opc_buf;
7015 lj++;
7016 while (lj <= j)
7017 gen_opc_instr_start[lj++] = 0;
7018 }
7019
7020 #ifdef DEBUG_DISAS
7021 if (loglevel & CPU_LOG_TB_CPU) {
7022 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7023 }
7024 if (loglevel & CPU_LOG_TB_IN_ASM) {
7025 int disas_flags;
7026 fprintf(logfile, "----------------\n");
7027 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7028 #ifdef TARGET_X86_64
7029 if (dc->code64)
7030 disas_flags = 2;
7031 else
7032 #endif
7033 disas_flags = !dc->code32;
7034 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7035 fprintf(logfile, "\n");
7036 if (loglevel & CPU_LOG_TB_OP_OPT) {
7037 fprintf(logfile, "OP before opt:\n");
7038 tcg_dump_ops(&tcg_ctx, logfile);
7039 fprintf(logfile, "\n");
7040 }
7041 }
7042 #endif
7043
7044 /* optimize flag computations */
7045 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
7046
7047 if (!search_pc)
7048 tb->size = pc_ptr - pc_start;
7049 return 0;
7050 }
7051
7052 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7053 {
7054 return gen_intermediate_code_internal(env, tb, 0);
7055 }
7056
7057 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7058 {
7059 return gen_intermediate_code_internal(env, tb, 1);
7060 }
7061
7062 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7063 unsigned long searched_pc, int pc_pos, void *puc)
7064 {
7065 int cc_op;
7066 #ifdef DEBUG_DISAS
7067 if (loglevel & CPU_LOG_TB_OP) {
7068 int i;
7069 fprintf(logfile, "RESTORE:\n");
7070 for(i = 0;i <= pc_pos; i++) {
7071 if (gen_opc_instr_start[i]) {
7072 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7073 }
7074 }
7075 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7076 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7077 (uint32_t)tb->cs_base);
7078 }
7079 #endif
7080 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7081 cc_op = gen_opc_cc_op[pc_pos];
7082 if (cc_op != CC_OP_DYNAMIC)
7083 env->cc_op = cc_op;
7084 }