]> git.proxmox.com Git - mirror_qemu.git/blob - target-i386/translate.c
393db0d65e369ca049d207d938c674506fe5dc98
[mirror_qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
35
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
41
42 #ifdef TARGET_X86_64
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49 #if 1
50 #define BUGGY_64(x) NULL
51 #endif
52 #else
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
55 #define CODE64(s) 0
56 #define REX_X(s) 0
57 #define REX_B(s) 0
58 #endif
59
60 #ifdef TARGET_X86_64
61 static int x86_64_hregs;
62 #endif
63
64 #ifdef USE_DIRECT_JUMP
65 #define TBPARAM(x)
66 #else
67 #define TBPARAM(x) (long)(x)
68 #endif
69
70 typedef struct DisasContext {
71 /* current insn context */
72 int override; /* -1 if no override */
73 int prefix;
74 int aflag, dflag;
75 target_ulong pc; /* pc = eip + cs_base */
76 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base; /* base of CS segment */
80 int pe; /* protected mode */
81 int code32; /* 32 bit code segment */
82 #ifdef TARGET_X86_64
83 int lma; /* long mode active */
84 int code64; /* 64 bit code segment */
85 int rex_x, rex_b;
86 #endif
87 int ss32; /* 32 bit stack segment */
88 int cc_op; /* current CC operation */
89 int addseg; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st; /* currently unused */
91 int vm86; /* vm86 mode */
92 int cpl;
93 int iopl;
94 int tf; /* TF cpu flag */
95 int singlestep_enabled; /* "hardware" single step enabled */
96 int jmp_opt; /* use direct block chaining for direct jumps */
97 int mem_index; /* select memory access functions */
98 int flags; /* all execution flags */
99 struct TranslationBlock *tb;
100 int popl_esp_hack; /* for correct popl with esp base handling */
101 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_features;
103 int cpuid_ext_features;
104 } DisasContext;
105
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109
110 /* i386 arith/logic operations */
111 enum {
112 OP_ADDL,
113 OP_ORL,
114 OP_ADCL,
115 OP_SBBL,
116 OP_ANDL,
117 OP_SUBL,
118 OP_XORL,
119 OP_CMPL,
120 };
121
122 /* i386 shift ops */
123 enum {
124 OP_ROL,
125 OP_ROR,
126 OP_RCL,
127 OP_RCR,
128 OP_SHL,
129 OP_SHR,
130 OP_SHL1, /* undocumented */
131 OP_SAR = 7,
132 };
133
134 enum {
135 #define DEF(s, n, copy_size) INDEX_op_ ## s,
136 #include "opc.h"
137 #undef DEF
138 NB_OPS,
139 };
140
141 #include "gen-op.h"
142
143 /* operand size */
144 enum {
145 OT_BYTE = 0,
146 OT_WORD,
147 OT_LONG,
148 OT_QUAD,
149 };
150
151 enum {
152 /* I386 int registers */
153 OR_EAX, /* MUST be even numbered */
154 OR_ECX,
155 OR_EDX,
156 OR_EBX,
157 OR_ESP,
158 OR_EBP,
159 OR_ESI,
160 OR_EDI,
161
162 OR_TMP0 = 16, /* temporary operand register */
163 OR_TMP1,
164 OR_A0, /* temporary register used when doing address evaluation */
165 };
166
167 #ifdef TARGET_X86_64
168
169 #define NB_OP_SIZES 4
170
171 #define DEF_REGS(prefix, suffix) \
172 prefix ## EAX ## suffix,\
173 prefix ## ECX ## suffix,\
174 prefix ## EDX ## suffix,\
175 prefix ## EBX ## suffix,\
176 prefix ## ESP ## suffix,\
177 prefix ## EBP ## suffix,\
178 prefix ## ESI ## suffix,\
179 prefix ## EDI ## suffix,\
180 prefix ## R8 ## suffix,\
181 prefix ## R9 ## suffix,\
182 prefix ## R10 ## suffix,\
183 prefix ## R11 ## suffix,\
184 prefix ## R12 ## suffix,\
185 prefix ## R13 ## suffix,\
186 prefix ## R14 ## suffix,\
187 prefix ## R15 ## suffix,
188
189 #define DEF_BREGS(prefixb, prefixh, suffix) \
190 \
191 static void prefixb ## ESP ## suffix ## _wrapper(void) \
192 { \
193 if (x86_64_hregs) \
194 prefixb ## ESP ## suffix (); \
195 else \
196 prefixh ## EAX ## suffix (); \
197 } \
198 \
199 static void prefixb ## EBP ## suffix ## _wrapper(void) \
200 { \
201 if (x86_64_hregs) \
202 prefixb ## EBP ## suffix (); \
203 else \
204 prefixh ## ECX ## suffix (); \
205 } \
206 \
207 static void prefixb ## ESI ## suffix ## _wrapper(void) \
208 { \
209 if (x86_64_hregs) \
210 prefixb ## ESI ## suffix (); \
211 else \
212 prefixh ## EDX ## suffix (); \
213 } \
214 \
215 static void prefixb ## EDI ## suffix ## _wrapper(void) \
216 { \
217 if (x86_64_hregs) \
218 prefixb ## EDI ## suffix (); \
219 else \
220 prefixh ## EBX ## suffix (); \
221 }
222
223 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
224 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
225 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
226 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227
228 #else /* !TARGET_X86_64 */
229
230 #define NB_OP_SIZES 3
231
232 #define DEF_REGS(prefix, suffix) \
233 prefix ## EAX ## suffix,\
234 prefix ## ECX ## suffix,\
235 prefix ## EDX ## suffix,\
236 prefix ## EBX ## suffix,\
237 prefix ## ESP ## suffix,\
238 prefix ## EBP ## suffix,\
239 prefix ## ESI ## suffix,\
240 prefix ## EDI ## suffix,
241
242 #endif /* !TARGET_X86_64 */
243
244 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
245 [OT_BYTE] = {
246 gen_op_movb_EAX_T0,
247 gen_op_movb_ECX_T0,
248 gen_op_movb_EDX_T0,
249 gen_op_movb_EBX_T0,
250 #ifdef TARGET_X86_64
251 gen_op_movb_ESP_T0_wrapper,
252 gen_op_movb_EBP_T0_wrapper,
253 gen_op_movb_ESI_T0_wrapper,
254 gen_op_movb_EDI_T0_wrapper,
255 gen_op_movb_R8_T0,
256 gen_op_movb_R9_T0,
257 gen_op_movb_R10_T0,
258 gen_op_movb_R11_T0,
259 gen_op_movb_R12_T0,
260 gen_op_movb_R13_T0,
261 gen_op_movb_R14_T0,
262 gen_op_movb_R15_T0,
263 #else
264 gen_op_movh_EAX_T0,
265 gen_op_movh_ECX_T0,
266 gen_op_movh_EDX_T0,
267 gen_op_movh_EBX_T0,
268 #endif
269 },
270 [OT_WORD] = {
271 DEF_REGS(gen_op_movw_, _T0)
272 },
273 [OT_LONG] = {
274 DEF_REGS(gen_op_movl_, _T0)
275 },
276 #ifdef TARGET_X86_64
277 [OT_QUAD] = {
278 DEF_REGS(gen_op_movq_, _T0)
279 },
280 #endif
281 };
282
283 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
284 [OT_BYTE] = {
285 gen_op_movb_EAX_T1,
286 gen_op_movb_ECX_T1,
287 gen_op_movb_EDX_T1,
288 gen_op_movb_EBX_T1,
289 #ifdef TARGET_X86_64
290 gen_op_movb_ESP_T1_wrapper,
291 gen_op_movb_EBP_T1_wrapper,
292 gen_op_movb_ESI_T1_wrapper,
293 gen_op_movb_EDI_T1_wrapper,
294 gen_op_movb_R8_T1,
295 gen_op_movb_R9_T1,
296 gen_op_movb_R10_T1,
297 gen_op_movb_R11_T1,
298 gen_op_movb_R12_T1,
299 gen_op_movb_R13_T1,
300 gen_op_movb_R14_T1,
301 gen_op_movb_R15_T1,
302 #else
303 gen_op_movh_EAX_T1,
304 gen_op_movh_ECX_T1,
305 gen_op_movh_EDX_T1,
306 gen_op_movh_EBX_T1,
307 #endif
308 },
309 [OT_WORD] = {
310 DEF_REGS(gen_op_movw_, _T1)
311 },
312 [OT_LONG] = {
313 DEF_REGS(gen_op_movl_, _T1)
314 },
315 #ifdef TARGET_X86_64
316 [OT_QUAD] = {
317 DEF_REGS(gen_op_movq_, _T1)
318 },
319 #endif
320 };
321
322 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
323 [0] = {
324 DEF_REGS(gen_op_movw_, _A0)
325 },
326 [1] = {
327 DEF_REGS(gen_op_movl_, _A0)
328 },
329 #ifdef TARGET_X86_64
330 [2] = {
331 DEF_REGS(gen_op_movq_, _A0)
332 },
333 #endif
334 };
335
336 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
337 {
338 [OT_BYTE] = {
339 {
340 gen_op_movl_T0_EAX,
341 gen_op_movl_T0_ECX,
342 gen_op_movl_T0_EDX,
343 gen_op_movl_T0_EBX,
344 #ifdef TARGET_X86_64
345 gen_op_movl_T0_ESP_wrapper,
346 gen_op_movl_T0_EBP_wrapper,
347 gen_op_movl_T0_ESI_wrapper,
348 gen_op_movl_T0_EDI_wrapper,
349 gen_op_movl_T0_R8,
350 gen_op_movl_T0_R9,
351 gen_op_movl_T0_R10,
352 gen_op_movl_T0_R11,
353 gen_op_movl_T0_R12,
354 gen_op_movl_T0_R13,
355 gen_op_movl_T0_R14,
356 gen_op_movl_T0_R15,
357 #else
358 gen_op_movh_T0_EAX,
359 gen_op_movh_T0_ECX,
360 gen_op_movh_T0_EDX,
361 gen_op_movh_T0_EBX,
362 #endif
363 },
364 {
365 gen_op_movl_T1_EAX,
366 gen_op_movl_T1_ECX,
367 gen_op_movl_T1_EDX,
368 gen_op_movl_T1_EBX,
369 #ifdef TARGET_X86_64
370 gen_op_movl_T1_ESP_wrapper,
371 gen_op_movl_T1_EBP_wrapper,
372 gen_op_movl_T1_ESI_wrapper,
373 gen_op_movl_T1_EDI_wrapper,
374 gen_op_movl_T1_R8,
375 gen_op_movl_T1_R9,
376 gen_op_movl_T1_R10,
377 gen_op_movl_T1_R11,
378 gen_op_movl_T1_R12,
379 gen_op_movl_T1_R13,
380 gen_op_movl_T1_R14,
381 gen_op_movl_T1_R15,
382 #else
383 gen_op_movh_T1_EAX,
384 gen_op_movh_T1_ECX,
385 gen_op_movh_T1_EDX,
386 gen_op_movh_T1_EBX,
387 #endif
388 },
389 },
390 [OT_WORD] = {
391 {
392 DEF_REGS(gen_op_movl_T0_, )
393 },
394 {
395 DEF_REGS(gen_op_movl_T1_, )
396 },
397 },
398 [OT_LONG] = {
399 {
400 DEF_REGS(gen_op_movl_T0_, )
401 },
402 {
403 DEF_REGS(gen_op_movl_T1_, )
404 },
405 },
406 #ifdef TARGET_X86_64
407 [OT_QUAD] = {
408 {
409 DEF_REGS(gen_op_movl_T0_, )
410 },
411 {
412 DEF_REGS(gen_op_movl_T1_, )
413 },
414 },
415 #endif
416 };
417
418 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
419 DEF_REGS(gen_op_movl_A0_, )
420 };
421
422 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
423 [0] = {
424 DEF_REGS(gen_op_addl_A0_, )
425 },
426 [1] = {
427 DEF_REGS(gen_op_addl_A0_, _s1)
428 },
429 [2] = {
430 DEF_REGS(gen_op_addl_A0_, _s2)
431 },
432 [3] = {
433 DEF_REGS(gen_op_addl_A0_, _s3)
434 },
435 };
436
437 #ifdef TARGET_X86_64
438 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
439 DEF_REGS(gen_op_movq_A0_, )
440 };
441
442 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
443 [0] = {
444 DEF_REGS(gen_op_addq_A0_, )
445 },
446 [1] = {
447 DEF_REGS(gen_op_addq_A0_, _s1)
448 },
449 [2] = {
450 DEF_REGS(gen_op_addq_A0_, _s2)
451 },
452 [3] = {
453 DEF_REGS(gen_op_addq_A0_, _s3)
454 },
455 };
456 #endif
457
458 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
459 [0] = {
460 DEF_REGS(gen_op_cmovw_, _T1_T0)
461 },
462 [1] = {
463 DEF_REGS(gen_op_cmovl_, _T1_T0)
464 },
465 #ifdef TARGET_X86_64
466 [2] = {
467 DEF_REGS(gen_op_cmovq_, _T1_T0)
468 },
469 #endif
470 };
471
472 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
473 NULL,
474 gen_op_orl_T0_T1,
475 NULL,
476 NULL,
477 gen_op_andl_T0_T1,
478 NULL,
479 gen_op_xorl_T0_T1,
480 NULL,
481 };
482
483 #define DEF_ARITHC(SUFFIX)\
484 {\
485 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
487 },\
488 {\
489 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
491 },\
492 {\
493 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
495 },\
496 {\
497 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
499 },
500
501 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
502 DEF_ARITHC( )
503 };
504
505 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
506 DEF_ARITHC(_raw)
507 #ifndef CONFIG_USER_ONLY
508 DEF_ARITHC(_kernel)
509 DEF_ARITHC(_user)
510 #endif
511 };
512
513 static const int cc_op_arithb[8] = {
514 CC_OP_ADDB,
515 CC_OP_LOGICB,
516 CC_OP_ADDB,
517 CC_OP_SUBB,
518 CC_OP_LOGICB,
519 CC_OP_SUBB,
520 CC_OP_LOGICB,
521 CC_OP_SUBB,
522 };
523
524 #define DEF_CMPXCHG(SUFFIX)\
525 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529
530 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
531 DEF_CMPXCHG( )
532 };
533
534 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
535 DEF_CMPXCHG(_raw)
536 #ifndef CONFIG_USER_ONLY
537 DEF_CMPXCHG(_kernel)
538 DEF_CMPXCHG(_user)
539 #endif
540 };
541
542 #define DEF_SHIFT(SUFFIX)\
543 {\
544 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
552 },\
553 {\
554 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
562 },\
563 {\
564 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
572 },\
573 {\
574 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
582 },
583
584 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
585 DEF_SHIFT( )
586 };
587
588 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
589 DEF_SHIFT(_raw)
590 #ifndef CONFIG_USER_ONLY
591 DEF_SHIFT(_kernel)
592 DEF_SHIFT(_user)
593 #endif
594 };
595
596 #define DEF_SHIFTD(SUFFIX, op)\
597 {\
598 NULL,\
599 NULL,\
600 },\
601 {\
602 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
604 },\
605 {\
606 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
608 },\
609 {\
610 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
612 },
613
614 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
615 DEF_SHIFTD(, im)
616 };
617
618 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
619 DEF_SHIFTD(, ECX)
620 };
621
622 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
623 DEF_SHIFTD(_raw, im)
624 #ifndef CONFIG_USER_ONLY
625 DEF_SHIFTD(_kernel, im)
626 DEF_SHIFTD(_user, im)
627 #endif
628 };
629
630 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
631 DEF_SHIFTD(_raw, ECX)
632 #ifndef CONFIG_USER_ONLY
633 DEF_SHIFTD(_kernel, ECX)
634 DEF_SHIFTD(_user, ECX)
635 #endif
636 };
637
638 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
639 [0] = {
640 gen_op_btw_T0_T1_cc,
641 gen_op_btsw_T0_T1_cc,
642 gen_op_btrw_T0_T1_cc,
643 gen_op_btcw_T0_T1_cc,
644 },
645 [1] = {
646 gen_op_btl_T0_T1_cc,
647 gen_op_btsl_T0_T1_cc,
648 gen_op_btrl_T0_T1_cc,
649 gen_op_btcl_T0_T1_cc,
650 },
651 #ifdef TARGET_X86_64
652 [2] = {
653 gen_op_btq_T0_T1_cc,
654 gen_op_btsq_T0_T1_cc,
655 gen_op_btrq_T0_T1_cc,
656 gen_op_btcq_T0_T1_cc,
657 },
658 #endif
659 };
660
661 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
662 gen_op_add_bitw_A0_T1,
663 gen_op_add_bitl_A0_T1,
664 X86_64_ONLY(gen_op_add_bitq_A0_T1),
665 };
666
667 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
668 [0] = {
669 gen_op_bsfw_T0_cc,
670 gen_op_bsrw_T0_cc,
671 },
672 [1] = {
673 gen_op_bsfl_T0_cc,
674 gen_op_bsrl_T0_cc,
675 },
676 #ifdef TARGET_X86_64
677 [2] = {
678 gen_op_bsfq_T0_cc,
679 gen_op_bsrq_T0_cc,
680 },
681 #endif
682 };
683
684 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
685 gen_op_ldsb_raw_T0_A0,
686 gen_op_ldsw_raw_T0_A0,
687 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
688 NULL,
689 #ifndef CONFIG_USER_ONLY
690 gen_op_ldsb_kernel_T0_A0,
691 gen_op_ldsw_kernel_T0_A0,
692 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
693 NULL,
694
695 gen_op_ldsb_user_T0_A0,
696 gen_op_ldsw_user_T0_A0,
697 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
698 NULL,
699 #endif
700 };
701
702 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
703 gen_op_ldub_raw_T0_A0,
704 gen_op_lduw_raw_T0_A0,
705 NULL,
706 NULL,
707
708 #ifndef CONFIG_USER_ONLY
709 gen_op_ldub_kernel_T0_A0,
710 gen_op_lduw_kernel_T0_A0,
711 NULL,
712 NULL,
713
714 gen_op_ldub_user_T0_A0,
715 gen_op_lduw_user_T0_A0,
716 NULL,
717 NULL,
718 #endif
719 };
720
721 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
723 gen_op_ldub_raw_T0_A0,
724 gen_op_lduw_raw_T0_A0,
725 gen_op_ldl_raw_T0_A0,
726 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
727
728 #ifndef CONFIG_USER_ONLY
729 gen_op_ldub_kernel_T0_A0,
730 gen_op_lduw_kernel_T0_A0,
731 gen_op_ldl_kernel_T0_A0,
732 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
733
734 gen_op_ldub_user_T0_A0,
735 gen_op_lduw_user_T0_A0,
736 gen_op_ldl_user_T0_A0,
737 X86_64_ONLY(gen_op_ldq_user_T0_A0),
738 #endif
739 };
740
741 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
742 gen_op_ldub_raw_T1_A0,
743 gen_op_lduw_raw_T1_A0,
744 gen_op_ldl_raw_T1_A0,
745 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
746
747 #ifndef CONFIG_USER_ONLY
748 gen_op_ldub_kernel_T1_A0,
749 gen_op_lduw_kernel_T1_A0,
750 gen_op_ldl_kernel_T1_A0,
751 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
752
753 gen_op_ldub_user_T1_A0,
754 gen_op_lduw_user_T1_A0,
755 gen_op_ldl_user_T1_A0,
756 X86_64_ONLY(gen_op_ldq_user_T1_A0),
757 #endif
758 };
759
760 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
761 gen_op_stb_raw_T0_A0,
762 gen_op_stw_raw_T0_A0,
763 gen_op_stl_raw_T0_A0,
764 X86_64_ONLY(gen_op_stq_raw_T0_A0),
765
766 #ifndef CONFIG_USER_ONLY
767 gen_op_stb_kernel_T0_A0,
768 gen_op_stw_kernel_T0_A0,
769 gen_op_stl_kernel_T0_A0,
770 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
771
772 gen_op_stb_user_T0_A0,
773 gen_op_stw_user_T0_A0,
774 gen_op_stl_user_T0_A0,
775 X86_64_ONLY(gen_op_stq_user_T0_A0),
776 #endif
777 };
778
779 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
780 NULL,
781 gen_op_stw_raw_T1_A0,
782 gen_op_stl_raw_T1_A0,
783 X86_64_ONLY(gen_op_stq_raw_T1_A0),
784
785 #ifndef CONFIG_USER_ONLY
786 NULL,
787 gen_op_stw_kernel_T1_A0,
788 gen_op_stl_kernel_T1_A0,
789 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
790
791 NULL,
792 gen_op_stw_user_T1_A0,
793 gen_op_stl_user_T1_A0,
794 X86_64_ONLY(gen_op_stq_user_T1_A0),
795 #endif
796 };
797
798 static inline void gen_jmp_im(target_ulong pc)
799 {
800 #ifdef TARGET_X86_64
801 if (pc == (uint32_t)pc) {
802 gen_op_movl_eip_im(pc);
803 } else if (pc == (int32_t)pc) {
804 gen_op_movq_eip_im(pc);
805 } else {
806 gen_op_movq_eip_im64(pc >> 32, pc);
807 }
808 #else
809 gen_op_movl_eip_im(pc);
810 #endif
811 }
812
813 static inline void gen_string_movl_A0_ESI(DisasContext *s)
814 {
815 int override;
816
817 override = s->override;
818 #ifdef TARGET_X86_64
819 if (s->aflag == 2) {
820 if (override >= 0) {
821 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
822 gen_op_addq_A0_reg_sN[0][R_ESI]();
823 } else {
824 gen_op_movq_A0_reg[R_ESI]();
825 }
826 } else
827 #endif
828 if (s->aflag) {
829 /* 32 bit address */
830 if (s->addseg && override < 0)
831 override = R_DS;
832 if (override >= 0) {
833 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
834 gen_op_addl_A0_reg_sN[0][R_ESI]();
835 } else {
836 gen_op_movl_A0_reg[R_ESI]();
837 }
838 } else {
839 /* 16 address, always override */
840 if (override < 0)
841 override = R_DS;
842 gen_op_movl_A0_reg[R_ESI]();
843 gen_op_andl_A0_ffff();
844 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
845 }
846 }
847
848 static inline void gen_string_movl_A0_EDI(DisasContext *s)
849 {
850 #ifdef TARGET_X86_64
851 if (s->aflag == 2) {
852 gen_op_movq_A0_reg[R_EDI]();
853 } else
854 #endif
855 if (s->aflag) {
856 if (s->addseg) {
857 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
858 gen_op_addl_A0_reg_sN[0][R_EDI]();
859 } else {
860 gen_op_movl_A0_reg[R_EDI]();
861 }
862 } else {
863 gen_op_movl_A0_reg[R_EDI]();
864 gen_op_andl_A0_ffff();
865 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
866 }
867 }
868
869 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
870 gen_op_movl_T0_Dshiftb,
871 gen_op_movl_T0_Dshiftw,
872 gen_op_movl_T0_Dshiftl,
873 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
874 };
875
876 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
877 gen_op_jnz_ecxw,
878 gen_op_jnz_ecxl,
879 X86_64_ONLY(gen_op_jnz_ecxq),
880 };
881
882 static GenOpFunc1 *gen_op_jz_ecx[3] = {
883 gen_op_jz_ecxw,
884 gen_op_jz_ecxl,
885 X86_64_ONLY(gen_op_jz_ecxq),
886 };
887
888 static GenOpFunc *gen_op_dec_ECX[3] = {
889 gen_op_decw_ECX,
890 gen_op_decl_ECX,
891 X86_64_ONLY(gen_op_decq_ECX),
892 };
893
894 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
895 {
896 gen_op_jnz_subb,
897 gen_op_jnz_subw,
898 gen_op_jnz_subl,
899 X86_64_ONLY(gen_op_jnz_subq),
900 },
901 {
902 gen_op_jz_subb,
903 gen_op_jz_subw,
904 gen_op_jz_subl,
905 X86_64_ONLY(gen_op_jz_subq),
906 },
907 };
908
909 static GenOpFunc *gen_op_in_DX_T0[3] = {
910 gen_op_inb_DX_T0,
911 gen_op_inw_DX_T0,
912 gen_op_inl_DX_T0,
913 };
914
915 static GenOpFunc *gen_op_out_DX_T0[3] = {
916 gen_op_outb_DX_T0,
917 gen_op_outw_DX_T0,
918 gen_op_outl_DX_T0,
919 };
920
921 static GenOpFunc *gen_op_in[3] = {
922 gen_op_inb_T0_T1,
923 gen_op_inw_T0_T1,
924 gen_op_inl_T0_T1,
925 };
926
927 static GenOpFunc *gen_op_out[3] = {
928 gen_op_outb_T0_T1,
929 gen_op_outw_T0_T1,
930 gen_op_outl_T0_T1,
931 };
932
933 static GenOpFunc *gen_check_io_T0[3] = {
934 gen_op_check_iob_T0,
935 gen_op_check_iow_T0,
936 gen_op_check_iol_T0,
937 };
938
939 static GenOpFunc *gen_check_io_DX[3] = {
940 gen_op_check_iob_DX,
941 gen_op_check_iow_DX,
942 gen_op_check_iol_DX,
943 };
944
945 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
946 {
947 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
948 if (s->cc_op != CC_OP_DYNAMIC)
949 gen_op_set_cc_op(s->cc_op);
950 gen_jmp_im(cur_eip);
951 if (use_dx)
952 gen_check_io_DX[ot]();
953 else
954 gen_check_io_T0[ot]();
955 }
956 }
957
958 static inline void gen_movs(DisasContext *s, int ot)
959 {
960 gen_string_movl_A0_ESI(s);
961 gen_op_ld_T0_A0[ot + s->mem_index]();
962 gen_string_movl_A0_EDI(s);
963 gen_op_st_T0_A0[ot + s->mem_index]();
964 gen_op_movl_T0_Dshift[ot]();
965 #ifdef TARGET_X86_64
966 if (s->aflag == 2) {
967 gen_op_addq_ESI_T0();
968 gen_op_addq_EDI_T0();
969 } else
970 #endif
971 if (s->aflag) {
972 gen_op_addl_ESI_T0();
973 gen_op_addl_EDI_T0();
974 } else {
975 gen_op_addw_ESI_T0();
976 gen_op_addw_EDI_T0();
977 }
978 }
979
980 static inline void gen_update_cc_op(DisasContext *s)
981 {
982 if (s->cc_op != CC_OP_DYNAMIC) {
983 gen_op_set_cc_op(s->cc_op);
984 s->cc_op = CC_OP_DYNAMIC;
985 }
986 }
987
988 /* XXX: does not work with gdbstub "ice" single step - not a
989 serious problem */
990 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991 {
992 int l1, l2;
993
994 l1 = gen_new_label();
995 l2 = gen_new_label();
996 gen_op_jnz_ecx[s->aflag](l1);
997 gen_set_label(l2);
998 gen_jmp_tb(s, next_eip, 1);
999 gen_set_label(l1);
1000 return l2;
1001 }
1002
1003 static inline void gen_stos(DisasContext *s, int ot)
1004 {
1005 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1006 gen_string_movl_A0_EDI(s);
1007 gen_op_st_T0_A0[ot + s->mem_index]();
1008 gen_op_movl_T0_Dshift[ot]();
1009 #ifdef TARGET_X86_64
1010 if (s->aflag == 2) {
1011 gen_op_addq_EDI_T0();
1012 } else
1013 #endif
1014 if (s->aflag) {
1015 gen_op_addl_EDI_T0();
1016 } else {
1017 gen_op_addw_EDI_T0();
1018 }
1019 }
1020
1021 static inline void gen_lods(DisasContext *s, int ot)
1022 {
1023 gen_string_movl_A0_ESI(s);
1024 gen_op_ld_T0_A0[ot + s->mem_index]();
1025 gen_op_mov_reg_T0[ot][R_EAX]();
1026 gen_op_movl_T0_Dshift[ot]();
1027 #ifdef TARGET_X86_64
1028 if (s->aflag == 2) {
1029 gen_op_addq_ESI_T0();
1030 } else
1031 #endif
1032 if (s->aflag) {
1033 gen_op_addl_ESI_T0();
1034 } else {
1035 gen_op_addw_ESI_T0();
1036 }
1037 }
1038
1039 static inline void gen_scas(DisasContext *s, int ot)
1040 {
1041 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1042 gen_string_movl_A0_EDI(s);
1043 gen_op_ld_T1_A0[ot + s->mem_index]();
1044 gen_op_cmpl_T0_T1_cc();
1045 gen_op_movl_T0_Dshift[ot]();
1046 #ifdef TARGET_X86_64
1047 if (s->aflag == 2) {
1048 gen_op_addq_EDI_T0();
1049 } else
1050 #endif
1051 if (s->aflag) {
1052 gen_op_addl_EDI_T0();
1053 } else {
1054 gen_op_addw_EDI_T0();
1055 }
1056 }
1057
1058 static inline void gen_cmps(DisasContext *s, int ot)
1059 {
1060 gen_string_movl_A0_ESI(s);
1061 gen_op_ld_T0_A0[ot + s->mem_index]();
1062 gen_string_movl_A0_EDI(s);
1063 gen_op_ld_T1_A0[ot + s->mem_index]();
1064 gen_op_cmpl_T0_T1_cc();
1065 gen_op_movl_T0_Dshift[ot]();
1066 #ifdef TARGET_X86_64
1067 if (s->aflag == 2) {
1068 gen_op_addq_ESI_T0();
1069 gen_op_addq_EDI_T0();
1070 } else
1071 #endif
1072 if (s->aflag) {
1073 gen_op_addl_ESI_T0();
1074 gen_op_addl_EDI_T0();
1075 } else {
1076 gen_op_addw_ESI_T0();
1077 gen_op_addw_EDI_T0();
1078 }
1079 }
1080
1081 static inline void gen_ins(DisasContext *s, int ot)
1082 {
1083 gen_string_movl_A0_EDI(s);
1084 gen_op_movl_T0_0();
1085 gen_op_st_T0_A0[ot + s->mem_index]();
1086 gen_op_in_DX_T0[ot]();
1087 gen_op_st_T0_A0[ot + s->mem_index]();
1088 gen_op_movl_T0_Dshift[ot]();
1089 #ifdef TARGET_X86_64
1090 if (s->aflag == 2) {
1091 gen_op_addq_EDI_T0();
1092 } else
1093 #endif
1094 if (s->aflag) {
1095 gen_op_addl_EDI_T0();
1096 } else {
1097 gen_op_addw_EDI_T0();
1098 }
1099 }
1100
1101 static inline void gen_outs(DisasContext *s, int ot)
1102 {
1103 gen_string_movl_A0_ESI(s);
1104 gen_op_ld_T0_A0[ot + s->mem_index]();
1105 gen_op_out_DX_T0[ot]();
1106 gen_op_movl_T0_Dshift[ot]();
1107 #ifdef TARGET_X86_64
1108 if (s->aflag == 2) {
1109 gen_op_addq_ESI_T0();
1110 } else
1111 #endif
1112 if (s->aflag) {
1113 gen_op_addl_ESI_T0();
1114 } else {
1115 gen_op_addw_ESI_T0();
1116 }
1117 }
1118
1119 /* same method as Valgrind : we generate jumps to current or next
1120 instruction */
1121 #define GEN_REPZ(op) \
1122 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1123 target_ulong cur_eip, target_ulong next_eip) \
1124 { \
1125 int l2;\
1126 gen_update_cc_op(s); \
1127 l2 = gen_jz_ecx_string(s, next_eip); \
1128 gen_ ## op(s, ot); \
1129 gen_op_dec_ECX[s->aflag](); \
1130 /* a loop would cause two single step exceptions if ECX = 1 \
1131 before rep string_insn */ \
1132 if (!s->jmp_opt) \
1133 gen_op_jz_ecx[s->aflag](l2); \
1134 gen_jmp(s, cur_eip); \
1135 }
1136
1137 #define GEN_REPZ2(op) \
1138 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1139 target_ulong cur_eip, \
1140 target_ulong next_eip, \
1141 int nz) \
1142 { \
1143 int l2;\
1144 gen_update_cc_op(s); \
1145 l2 = gen_jz_ecx_string(s, next_eip); \
1146 gen_ ## op(s, ot); \
1147 gen_op_dec_ECX[s->aflag](); \
1148 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1149 gen_op_string_jnz_sub[nz][ot](l2);\
1150 if (!s->jmp_opt) \
1151 gen_op_jz_ecx[s->aflag](l2); \
1152 gen_jmp(s, cur_eip); \
1153 }
1154
1155 GEN_REPZ(movs)
1156 GEN_REPZ(stos)
1157 GEN_REPZ(lods)
1158 GEN_REPZ(ins)
1159 GEN_REPZ(outs)
1160 GEN_REPZ2(scas)
1161 GEN_REPZ2(cmps)
1162
1163 enum {
1164 JCC_O,
1165 JCC_B,
1166 JCC_Z,
1167 JCC_BE,
1168 JCC_S,
1169 JCC_P,
1170 JCC_L,
1171 JCC_LE,
1172 };
1173
1174 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1175 [OT_BYTE] = {
1176 NULL,
1177 gen_op_jb_subb,
1178 gen_op_jz_subb,
1179 gen_op_jbe_subb,
1180 gen_op_js_subb,
1181 NULL,
1182 gen_op_jl_subb,
1183 gen_op_jle_subb,
1184 },
1185 [OT_WORD] = {
1186 NULL,
1187 gen_op_jb_subw,
1188 gen_op_jz_subw,
1189 gen_op_jbe_subw,
1190 gen_op_js_subw,
1191 NULL,
1192 gen_op_jl_subw,
1193 gen_op_jle_subw,
1194 },
1195 [OT_LONG] = {
1196 NULL,
1197 gen_op_jb_subl,
1198 gen_op_jz_subl,
1199 gen_op_jbe_subl,
1200 gen_op_js_subl,
1201 NULL,
1202 gen_op_jl_subl,
1203 gen_op_jle_subl,
1204 },
1205 #ifdef TARGET_X86_64
1206 [OT_QUAD] = {
1207 NULL,
1208 BUGGY_64(gen_op_jb_subq),
1209 gen_op_jz_subq,
1210 BUGGY_64(gen_op_jbe_subq),
1211 gen_op_js_subq,
1212 NULL,
1213 BUGGY_64(gen_op_jl_subq),
1214 BUGGY_64(gen_op_jle_subq),
1215 },
1216 #endif
1217 };
1218 static GenOpFunc1 *gen_op_loop[3][4] = {
1219 [0] = {
1220 gen_op_loopnzw,
1221 gen_op_loopzw,
1222 gen_op_jnz_ecxw,
1223 },
1224 [1] = {
1225 gen_op_loopnzl,
1226 gen_op_loopzl,
1227 gen_op_jnz_ecxl,
1228 },
1229 #ifdef TARGET_X86_64
1230 [2] = {
1231 gen_op_loopnzq,
1232 gen_op_loopzq,
1233 gen_op_jnz_ecxq,
1234 },
1235 #endif
1236 };
1237
1238 static GenOpFunc *gen_setcc_slow[8] = {
1239 gen_op_seto_T0_cc,
1240 gen_op_setb_T0_cc,
1241 gen_op_setz_T0_cc,
1242 gen_op_setbe_T0_cc,
1243 gen_op_sets_T0_cc,
1244 gen_op_setp_T0_cc,
1245 gen_op_setl_T0_cc,
1246 gen_op_setle_T0_cc,
1247 };
1248
1249 static GenOpFunc *gen_setcc_sub[4][8] = {
1250 [OT_BYTE] = {
1251 NULL,
1252 gen_op_setb_T0_subb,
1253 gen_op_setz_T0_subb,
1254 gen_op_setbe_T0_subb,
1255 gen_op_sets_T0_subb,
1256 NULL,
1257 gen_op_setl_T0_subb,
1258 gen_op_setle_T0_subb,
1259 },
1260 [OT_WORD] = {
1261 NULL,
1262 gen_op_setb_T0_subw,
1263 gen_op_setz_T0_subw,
1264 gen_op_setbe_T0_subw,
1265 gen_op_sets_T0_subw,
1266 NULL,
1267 gen_op_setl_T0_subw,
1268 gen_op_setle_T0_subw,
1269 },
1270 [OT_LONG] = {
1271 NULL,
1272 gen_op_setb_T0_subl,
1273 gen_op_setz_T0_subl,
1274 gen_op_setbe_T0_subl,
1275 gen_op_sets_T0_subl,
1276 NULL,
1277 gen_op_setl_T0_subl,
1278 gen_op_setle_T0_subl,
1279 },
1280 #ifdef TARGET_X86_64
1281 [OT_QUAD] = {
1282 NULL,
1283 gen_op_setb_T0_subq,
1284 gen_op_setz_T0_subq,
1285 gen_op_setbe_T0_subq,
1286 gen_op_sets_T0_subq,
1287 NULL,
1288 gen_op_setl_T0_subq,
1289 gen_op_setle_T0_subq,
1290 },
1291 #endif
1292 };
1293
1294 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1295 gen_op_fadd_ST0_FT0,
1296 gen_op_fmul_ST0_FT0,
1297 gen_op_fcom_ST0_FT0,
1298 gen_op_fcom_ST0_FT0,
1299 gen_op_fsub_ST0_FT0,
1300 gen_op_fsubr_ST0_FT0,
1301 gen_op_fdiv_ST0_FT0,
1302 gen_op_fdivr_ST0_FT0,
1303 };
1304
1305 /* NOTE the exception in "r" op ordering */
1306 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1307 gen_op_fadd_STN_ST0,
1308 gen_op_fmul_STN_ST0,
1309 NULL,
1310 NULL,
1311 gen_op_fsubr_STN_ST0,
1312 gen_op_fsub_STN_ST0,
1313 gen_op_fdivr_STN_ST0,
1314 gen_op_fdiv_STN_ST0,
1315 };
1316
1317 /* if d == OR_TMP0, it means memory operand (address in A0) */
1318 static void gen_op(DisasContext *s1, int op, int ot, int d)
1319 {
1320 GenOpFunc *gen_update_cc;
1321
1322 if (d != OR_TMP0) {
1323 gen_op_mov_TN_reg[ot][0][d]();
1324 } else {
1325 gen_op_ld_T0_A0[ot + s1->mem_index]();
1326 }
1327 switch(op) {
1328 case OP_ADCL:
1329 case OP_SBBL:
1330 if (s1->cc_op != CC_OP_DYNAMIC)
1331 gen_op_set_cc_op(s1->cc_op);
1332 if (d != OR_TMP0) {
1333 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1334 gen_op_mov_reg_T0[ot][d]();
1335 } else {
1336 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1337 }
1338 s1->cc_op = CC_OP_DYNAMIC;
1339 goto the_end;
1340 case OP_ADDL:
1341 gen_op_addl_T0_T1();
1342 s1->cc_op = CC_OP_ADDB + ot;
1343 gen_update_cc = gen_op_update2_cc;
1344 break;
1345 case OP_SUBL:
1346 gen_op_subl_T0_T1();
1347 s1->cc_op = CC_OP_SUBB + ot;
1348 gen_update_cc = gen_op_update2_cc;
1349 break;
1350 default:
1351 case OP_ANDL:
1352 case OP_ORL:
1353 case OP_XORL:
1354 gen_op_arith_T0_T1_cc[op]();
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1357 break;
1358 case OP_CMPL:
1359 gen_op_cmpl_T0_T1_cc();
1360 s1->cc_op = CC_OP_SUBB + ot;
1361 gen_update_cc = NULL;
1362 break;
1363 }
1364 if (op != OP_CMPL) {
1365 if (d != OR_TMP0)
1366 gen_op_mov_reg_T0[ot][d]();
1367 else
1368 gen_op_st_T0_A0[ot + s1->mem_index]();
1369 }
1370 /* the flags update must happen after the memory write (precise
1371 exception support) */
1372 if (gen_update_cc)
1373 gen_update_cc();
1374 the_end: ;
1375 }
1376
1377 /* if d == OR_TMP0, it means memory operand (address in A0) */
1378 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1379 {
1380 if (d != OR_TMP0)
1381 gen_op_mov_TN_reg[ot][0][d]();
1382 else
1383 gen_op_ld_T0_A0[ot + s1->mem_index]();
1384 if (s1->cc_op != CC_OP_DYNAMIC)
1385 gen_op_set_cc_op(s1->cc_op);
1386 if (c > 0) {
1387 gen_op_incl_T0();
1388 s1->cc_op = CC_OP_INCB + ot;
1389 } else {
1390 gen_op_decl_T0();
1391 s1->cc_op = CC_OP_DECB + ot;
1392 }
1393 if (d != OR_TMP0)
1394 gen_op_mov_reg_T0[ot][d]();
1395 else
1396 gen_op_st_T0_A0[ot + s1->mem_index]();
1397 gen_op_update_inc_cc();
1398 }
1399
1400 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1401 {
1402 if (d != OR_TMP0)
1403 gen_op_mov_TN_reg[ot][0][d]();
1404 else
1405 gen_op_ld_T0_A0[ot + s1->mem_index]();
1406 if (s != OR_TMP1)
1407 gen_op_mov_TN_reg[ot][1][s]();
1408 /* for zero counts, flags are not updated, so must do it dynamically */
1409 if (s1->cc_op != CC_OP_DYNAMIC)
1410 gen_op_set_cc_op(s1->cc_op);
1411
1412 if (d != OR_TMP0)
1413 gen_op_shift_T0_T1_cc[ot][op]();
1414 else
1415 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1416 if (d != OR_TMP0)
1417 gen_op_mov_reg_T0[ot][d]();
1418 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1419 }
1420
1421 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1422 {
1423 /* currently not optimized */
1424 gen_op_movl_T1_im(c);
1425 gen_shift(s1, op, ot, d, OR_TMP1);
1426 }
1427
1428 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1429 {
1430 target_long disp;
1431 int havesib;
1432 int base;
1433 int index;
1434 int scale;
1435 int opreg;
1436 int mod, rm, code, override, must_add_seg;
1437
1438 override = s->override;
1439 must_add_seg = s->addseg;
1440 if (override >= 0)
1441 must_add_seg = 1;
1442 mod = (modrm >> 6) & 3;
1443 rm = modrm & 7;
1444
1445 if (s->aflag) {
1446
1447 havesib = 0;
1448 base = rm;
1449 index = 0;
1450 scale = 0;
1451
1452 if (base == 4) {
1453 havesib = 1;
1454 code = ldub_code(s->pc++);
1455 scale = (code >> 6) & 3;
1456 index = ((code >> 3) & 7) | REX_X(s);
1457 base = (code & 7);
1458 }
1459 base |= REX_B(s);
1460
1461 switch (mod) {
1462 case 0:
1463 if ((base & 7) == 5) {
1464 base = -1;
1465 disp = (int32_t)ldl_code(s->pc);
1466 s->pc += 4;
1467 if (CODE64(s) && !havesib) {
1468 disp += s->pc + s->rip_offset;
1469 }
1470 } else {
1471 disp = 0;
1472 }
1473 break;
1474 case 1:
1475 disp = (int8_t)ldub_code(s->pc++);
1476 break;
1477 default:
1478 case 2:
1479 disp = ldl_code(s->pc);
1480 s->pc += 4;
1481 break;
1482 }
1483
1484 if (base >= 0) {
1485 /* for correct popl handling with esp */
1486 if (base == 4 && s->popl_esp_hack)
1487 disp += s->popl_esp_hack;
1488 #ifdef TARGET_X86_64
1489 if (s->aflag == 2) {
1490 gen_op_movq_A0_reg[base]();
1491 if (disp != 0) {
1492 if ((int32_t)disp == disp)
1493 gen_op_addq_A0_im(disp);
1494 else
1495 gen_op_addq_A0_im64(disp >> 32, disp);
1496 }
1497 } else
1498 #endif
1499 {
1500 gen_op_movl_A0_reg[base]();
1501 if (disp != 0)
1502 gen_op_addl_A0_im(disp);
1503 }
1504 } else {
1505 #ifdef TARGET_X86_64
1506 if (s->aflag == 2) {
1507 if ((int32_t)disp == disp)
1508 gen_op_movq_A0_im(disp);
1509 else
1510 gen_op_movq_A0_im64(disp >> 32, disp);
1511 } else
1512 #endif
1513 {
1514 gen_op_movl_A0_im(disp);
1515 }
1516 }
1517 /* XXX: index == 4 is always invalid */
1518 if (havesib && (index != 4 || scale != 0)) {
1519 #ifdef TARGET_X86_64
1520 if (s->aflag == 2) {
1521 gen_op_addq_A0_reg_sN[scale][index]();
1522 } else
1523 #endif
1524 {
1525 gen_op_addl_A0_reg_sN[scale][index]();
1526 }
1527 }
1528 if (must_add_seg) {
1529 if (override < 0) {
1530 if (base == R_EBP || base == R_ESP)
1531 override = R_SS;
1532 else
1533 override = R_DS;
1534 }
1535 #ifdef TARGET_X86_64
1536 if (s->aflag == 2) {
1537 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1538 } else
1539 #endif
1540 {
1541 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1542 }
1543 }
1544 } else {
1545 switch (mod) {
1546 case 0:
1547 if (rm == 6) {
1548 disp = lduw_code(s->pc);
1549 s->pc += 2;
1550 gen_op_movl_A0_im(disp);
1551 rm = 0; /* avoid SS override */
1552 goto no_rm;
1553 } else {
1554 disp = 0;
1555 }
1556 break;
1557 case 1:
1558 disp = (int8_t)ldub_code(s->pc++);
1559 break;
1560 default:
1561 case 2:
1562 disp = lduw_code(s->pc);
1563 s->pc += 2;
1564 break;
1565 }
1566 switch(rm) {
1567 case 0:
1568 gen_op_movl_A0_reg[R_EBX]();
1569 gen_op_addl_A0_reg_sN[0][R_ESI]();
1570 break;
1571 case 1:
1572 gen_op_movl_A0_reg[R_EBX]();
1573 gen_op_addl_A0_reg_sN[0][R_EDI]();
1574 break;
1575 case 2:
1576 gen_op_movl_A0_reg[R_EBP]();
1577 gen_op_addl_A0_reg_sN[0][R_ESI]();
1578 break;
1579 case 3:
1580 gen_op_movl_A0_reg[R_EBP]();
1581 gen_op_addl_A0_reg_sN[0][R_EDI]();
1582 break;
1583 case 4:
1584 gen_op_movl_A0_reg[R_ESI]();
1585 break;
1586 case 5:
1587 gen_op_movl_A0_reg[R_EDI]();
1588 break;
1589 case 6:
1590 gen_op_movl_A0_reg[R_EBP]();
1591 break;
1592 default:
1593 case 7:
1594 gen_op_movl_A0_reg[R_EBX]();
1595 break;
1596 }
1597 if (disp != 0)
1598 gen_op_addl_A0_im(disp);
1599 gen_op_andl_A0_ffff();
1600 no_rm:
1601 if (must_add_seg) {
1602 if (override < 0) {
1603 if (rm == 2 || rm == 3 || rm == 6)
1604 override = R_SS;
1605 else
1606 override = R_DS;
1607 }
1608 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1609 }
1610 }
1611
1612 opreg = OR_A0;
1613 disp = 0;
1614 *reg_ptr = opreg;
1615 *offset_ptr = disp;
1616 }
1617
1618 static void gen_nop_modrm(DisasContext *s, int modrm)
1619 {
1620 int mod, rm, base, code;
1621
1622 mod = (modrm >> 6) & 3;
1623 if (mod == 3)
1624 return;
1625 rm = modrm & 7;
1626
1627 if (s->aflag) {
1628
1629 base = rm;
1630
1631 if (base == 4) {
1632 code = ldub_code(s->pc++);
1633 base = (code & 7);
1634 }
1635
1636 switch (mod) {
1637 case 0:
1638 if (base == 5) {
1639 s->pc += 4;
1640 }
1641 break;
1642 case 1:
1643 s->pc++;
1644 break;
1645 default:
1646 case 2:
1647 s->pc += 4;
1648 break;
1649 }
1650 } else {
1651 switch (mod) {
1652 case 0:
1653 if (rm == 6) {
1654 s->pc += 2;
1655 }
1656 break;
1657 case 1:
1658 s->pc++;
1659 break;
1660 default:
1661 case 2:
1662 s->pc += 2;
1663 break;
1664 }
1665 }
1666 }
1667
1668 /* used for LEA and MOV AX, mem */
1669 static void gen_add_A0_ds_seg(DisasContext *s)
1670 {
1671 int override, must_add_seg;
1672 must_add_seg = s->addseg;
1673 override = R_DS;
1674 if (s->override >= 0) {
1675 override = s->override;
1676 must_add_seg = 1;
1677 } else {
1678 override = R_DS;
1679 }
1680 if (must_add_seg) {
1681 #ifdef TARGET_X86_64
1682 if (CODE64(s)) {
1683 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1684 } else
1685 #endif
1686 {
1687 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1688 }
1689 }
1690 }
1691
1692 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693 OR_TMP0 */
1694 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1695 {
1696 int mod, rm, opreg, disp;
1697
1698 mod = (modrm >> 6) & 3;
1699 rm = (modrm & 7) | REX_B(s);
1700 if (mod == 3) {
1701 if (is_store) {
1702 if (reg != OR_TMP0)
1703 gen_op_mov_TN_reg[ot][0][reg]();
1704 gen_op_mov_reg_T0[ot][rm]();
1705 } else {
1706 gen_op_mov_TN_reg[ot][0][rm]();
1707 if (reg != OR_TMP0)
1708 gen_op_mov_reg_T0[ot][reg]();
1709 }
1710 } else {
1711 gen_lea_modrm(s, modrm, &opreg, &disp);
1712 if (is_store) {
1713 if (reg != OR_TMP0)
1714 gen_op_mov_TN_reg[ot][0][reg]();
1715 gen_op_st_T0_A0[ot + s->mem_index]();
1716 } else {
1717 gen_op_ld_T0_A0[ot + s->mem_index]();
1718 if (reg != OR_TMP0)
1719 gen_op_mov_reg_T0[ot][reg]();
1720 }
1721 }
1722 }
1723
1724 static inline uint32_t insn_get(DisasContext *s, int ot)
1725 {
1726 uint32_t ret;
1727
1728 switch(ot) {
1729 case OT_BYTE:
1730 ret = ldub_code(s->pc);
1731 s->pc++;
1732 break;
1733 case OT_WORD:
1734 ret = lduw_code(s->pc);
1735 s->pc += 2;
1736 break;
1737 default:
1738 case OT_LONG:
1739 ret = ldl_code(s->pc);
1740 s->pc += 4;
1741 break;
1742 }
1743 return ret;
1744 }
1745
1746 static inline int insn_const_size(unsigned int ot)
1747 {
1748 if (ot <= OT_LONG)
1749 return 1 << ot;
1750 else
1751 return 4;
1752 }
1753
1754 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1755 {
1756 TranslationBlock *tb;
1757 target_ulong pc;
1758
1759 pc = s->cs_base + eip;
1760 tb = s->tb;
1761 /* NOTE: we handle the case where the TB spans two pages here */
1762 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1763 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1764 /* jump to same page: we can use a direct jump */
1765 if (tb_num == 0)
1766 gen_op_goto_tb0(TBPARAM(tb));
1767 else
1768 gen_op_goto_tb1(TBPARAM(tb));
1769 gen_jmp_im(eip);
1770 gen_op_movl_T0_im((long)tb + tb_num);
1771 gen_op_exit_tb();
1772 } else {
1773 /* jump to another page: currently not optimized */
1774 gen_jmp_im(eip);
1775 gen_eob(s);
1776 }
1777 }
1778
1779 static inline void gen_jcc(DisasContext *s, int b,
1780 target_ulong val, target_ulong next_eip)
1781 {
1782 TranslationBlock *tb;
1783 int inv, jcc_op;
1784 GenOpFunc1 *func;
1785 target_ulong tmp;
1786 int l1, l2;
1787
1788 inv = b & 1;
1789 jcc_op = (b >> 1) & 7;
1790
1791 if (s->jmp_opt) {
1792 switch(s->cc_op) {
1793 /* we optimize the cmp/jcc case */
1794 case CC_OP_SUBB:
1795 case CC_OP_SUBW:
1796 case CC_OP_SUBL:
1797 case CC_OP_SUBQ:
1798 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1799 break;
1800
1801 /* some jumps are easy to compute */
1802 case CC_OP_ADDB:
1803 case CC_OP_ADDW:
1804 case CC_OP_ADDL:
1805 case CC_OP_ADDQ:
1806
1807 case CC_OP_ADCB:
1808 case CC_OP_ADCW:
1809 case CC_OP_ADCL:
1810 case CC_OP_ADCQ:
1811
1812 case CC_OP_SBBB:
1813 case CC_OP_SBBW:
1814 case CC_OP_SBBL:
1815 case CC_OP_SBBQ:
1816
1817 case CC_OP_LOGICB:
1818 case CC_OP_LOGICW:
1819 case CC_OP_LOGICL:
1820 case CC_OP_LOGICQ:
1821
1822 case CC_OP_INCB:
1823 case CC_OP_INCW:
1824 case CC_OP_INCL:
1825 case CC_OP_INCQ:
1826
1827 case CC_OP_DECB:
1828 case CC_OP_DECW:
1829 case CC_OP_DECL:
1830 case CC_OP_DECQ:
1831
1832 case CC_OP_SHLB:
1833 case CC_OP_SHLW:
1834 case CC_OP_SHLL:
1835 case CC_OP_SHLQ:
1836
1837 case CC_OP_SARB:
1838 case CC_OP_SARW:
1839 case CC_OP_SARL:
1840 case CC_OP_SARQ:
1841 switch(jcc_op) {
1842 case JCC_Z:
1843 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1844 break;
1845 case JCC_S:
1846 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1847 break;
1848 default:
1849 func = NULL;
1850 break;
1851 }
1852 break;
1853 default:
1854 func = NULL;
1855 break;
1856 }
1857
1858 if (s->cc_op != CC_OP_DYNAMIC) {
1859 gen_op_set_cc_op(s->cc_op);
1860 s->cc_op = CC_OP_DYNAMIC;
1861 }
1862
1863 if (!func) {
1864 gen_setcc_slow[jcc_op]();
1865 func = gen_op_jnz_T0_label;
1866 }
1867
1868 if (inv) {
1869 tmp = val;
1870 val = next_eip;
1871 next_eip = tmp;
1872 }
1873 tb = s->tb;
1874
1875 l1 = gen_new_label();
1876 func(l1);
1877
1878 gen_goto_tb(s, 0, next_eip);
1879
1880 gen_set_label(l1);
1881 gen_goto_tb(s, 1, val);
1882
1883 s->is_jmp = 3;
1884 } else {
1885
1886 if (s->cc_op != CC_OP_DYNAMIC) {
1887 gen_op_set_cc_op(s->cc_op);
1888 s->cc_op = CC_OP_DYNAMIC;
1889 }
1890 gen_setcc_slow[jcc_op]();
1891 if (inv) {
1892 tmp = val;
1893 val = next_eip;
1894 next_eip = tmp;
1895 }
1896 l1 = gen_new_label();
1897 l2 = gen_new_label();
1898 gen_op_jnz_T0_label(l1);
1899 gen_jmp_im(next_eip);
1900 gen_op_jmp_label(l2);
1901 gen_set_label(l1);
1902 gen_jmp_im(val);
1903 gen_set_label(l2);
1904 gen_eob(s);
1905 }
1906 }
1907
1908 static void gen_setcc(DisasContext *s, int b)
1909 {
1910 int inv, jcc_op;
1911 GenOpFunc *func;
1912
1913 inv = b & 1;
1914 jcc_op = (b >> 1) & 7;
1915 switch(s->cc_op) {
1916 /* we optimize the cmp/jcc case */
1917 case CC_OP_SUBB:
1918 case CC_OP_SUBW:
1919 case CC_OP_SUBL:
1920 case CC_OP_SUBQ:
1921 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1922 if (!func)
1923 goto slow_jcc;
1924 break;
1925
1926 /* some jumps are easy to compute */
1927 case CC_OP_ADDB:
1928 case CC_OP_ADDW:
1929 case CC_OP_ADDL:
1930 case CC_OP_ADDQ:
1931
1932 case CC_OP_LOGICB:
1933 case CC_OP_LOGICW:
1934 case CC_OP_LOGICL:
1935 case CC_OP_LOGICQ:
1936
1937 case CC_OP_INCB:
1938 case CC_OP_INCW:
1939 case CC_OP_INCL:
1940 case CC_OP_INCQ:
1941
1942 case CC_OP_DECB:
1943 case CC_OP_DECW:
1944 case CC_OP_DECL:
1945 case CC_OP_DECQ:
1946
1947 case CC_OP_SHLB:
1948 case CC_OP_SHLW:
1949 case CC_OP_SHLL:
1950 case CC_OP_SHLQ:
1951 switch(jcc_op) {
1952 case JCC_Z:
1953 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1954 break;
1955 case JCC_S:
1956 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1957 break;
1958 default:
1959 goto slow_jcc;
1960 }
1961 break;
1962 default:
1963 slow_jcc:
1964 if (s->cc_op != CC_OP_DYNAMIC)
1965 gen_op_set_cc_op(s->cc_op);
1966 func = gen_setcc_slow[jcc_op];
1967 break;
1968 }
1969 func();
1970 if (inv) {
1971 gen_op_xor_T0_1();
1972 }
1973 }
1974
1975 /* move T0 to seg_reg and compute if the CPU state may change. Never
1976 call this function with seg_reg == R_CS */
1977 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1978 {
1979 if (s->pe && !s->vm86) {
1980 /* XXX: optimize by finding processor state dynamically */
1981 if (s->cc_op != CC_OP_DYNAMIC)
1982 gen_op_set_cc_op(s->cc_op);
1983 gen_jmp_im(cur_eip);
1984 gen_op_movl_seg_T0(seg_reg);
1985 /* abort translation because the addseg value may change or
1986 because ss32 may change. For R_SS, translation must always
1987 stop as a special handling must be done to disable hardware
1988 interrupts for the next instruction */
1989 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1990 s->is_jmp = 3;
1991 } else {
1992 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1993 if (seg_reg == R_SS)
1994 s->is_jmp = 3;
1995 }
1996 }
1997
1998 static inline void gen_stack_update(DisasContext *s, int addend)
1999 {
2000 #ifdef TARGET_X86_64
2001 if (CODE64(s)) {
2002 if (addend == 8)
2003 gen_op_addq_ESP_8();
2004 else
2005 gen_op_addq_ESP_im(addend);
2006 } else
2007 #endif
2008 if (s->ss32) {
2009 if (addend == 2)
2010 gen_op_addl_ESP_2();
2011 else if (addend == 4)
2012 gen_op_addl_ESP_4();
2013 else
2014 gen_op_addl_ESP_im(addend);
2015 } else {
2016 if (addend == 2)
2017 gen_op_addw_ESP_2();
2018 else if (addend == 4)
2019 gen_op_addw_ESP_4();
2020 else
2021 gen_op_addw_ESP_im(addend);
2022 }
2023 }
2024
2025 /* generate a push. It depends on ss32, addseg and dflag */
2026 static void gen_push_T0(DisasContext *s)
2027 {
2028 #ifdef TARGET_X86_64
2029 if (CODE64(s)) {
2030 gen_op_movq_A0_reg[R_ESP]();
2031 if (s->dflag) {
2032 gen_op_subq_A0_8();
2033 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2034 } else {
2035 gen_op_subq_A0_2();
2036 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2037 }
2038 gen_op_movq_ESP_A0();
2039 } else
2040 #endif
2041 {
2042 gen_op_movl_A0_reg[R_ESP]();
2043 if (!s->dflag)
2044 gen_op_subl_A0_2();
2045 else
2046 gen_op_subl_A0_4();
2047 if (s->ss32) {
2048 if (s->addseg) {
2049 gen_op_movl_T1_A0();
2050 gen_op_addl_A0_SS();
2051 }
2052 } else {
2053 gen_op_andl_A0_ffff();
2054 gen_op_movl_T1_A0();
2055 gen_op_addl_A0_SS();
2056 }
2057 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2058 if (s->ss32 && !s->addseg)
2059 gen_op_movl_ESP_A0();
2060 else
2061 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2062 }
2063 }
2064
2065 /* generate a push. It depends on ss32, addseg and dflag */
2066 /* slower version for T1, only used for call Ev */
2067 static void gen_push_T1(DisasContext *s)
2068 {
2069 #ifdef TARGET_X86_64
2070 if (CODE64(s)) {
2071 gen_op_movq_A0_reg[R_ESP]();
2072 if (s->dflag) {
2073 gen_op_subq_A0_8();
2074 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2075 } else {
2076 gen_op_subq_A0_2();
2077 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2078 }
2079 gen_op_movq_ESP_A0();
2080 } else
2081 #endif
2082 {
2083 gen_op_movl_A0_reg[R_ESP]();
2084 if (!s->dflag)
2085 gen_op_subl_A0_2();
2086 else
2087 gen_op_subl_A0_4();
2088 if (s->ss32) {
2089 if (s->addseg) {
2090 gen_op_addl_A0_SS();
2091 }
2092 } else {
2093 gen_op_andl_A0_ffff();
2094 gen_op_addl_A0_SS();
2095 }
2096 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2097
2098 if (s->ss32 && !s->addseg)
2099 gen_op_movl_ESP_A0();
2100 else
2101 gen_stack_update(s, (-2) << s->dflag);
2102 }
2103 }
2104
2105 /* two step pop is necessary for precise exceptions */
2106 static void gen_pop_T0(DisasContext *s)
2107 {
2108 #ifdef TARGET_X86_64
2109 if (CODE64(s)) {
2110 gen_op_movq_A0_reg[R_ESP]();
2111 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2112 } else
2113 #endif
2114 {
2115 gen_op_movl_A0_reg[R_ESP]();
2116 if (s->ss32) {
2117 if (s->addseg)
2118 gen_op_addl_A0_SS();
2119 } else {
2120 gen_op_andl_A0_ffff();
2121 gen_op_addl_A0_SS();
2122 }
2123 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2124 }
2125 }
2126
2127 static void gen_pop_update(DisasContext *s)
2128 {
2129 #ifdef TARGET_X86_64
2130 if (CODE64(s) && s->dflag) {
2131 gen_stack_update(s, 8);
2132 } else
2133 #endif
2134 {
2135 gen_stack_update(s, 2 << s->dflag);
2136 }
2137 }
2138
2139 static void gen_stack_A0(DisasContext *s)
2140 {
2141 gen_op_movl_A0_ESP();
2142 if (!s->ss32)
2143 gen_op_andl_A0_ffff();
2144 gen_op_movl_T1_A0();
2145 if (s->addseg)
2146 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2147 }
2148
2149 /* NOTE: wrap around in 16 bit not fully handled */
2150 static void gen_pusha(DisasContext *s)
2151 {
2152 int i;
2153 gen_op_movl_A0_ESP();
2154 gen_op_addl_A0_im(-16 << s->dflag);
2155 if (!s->ss32)
2156 gen_op_andl_A0_ffff();
2157 gen_op_movl_T1_A0();
2158 if (s->addseg)
2159 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2160 for(i = 0;i < 8; i++) {
2161 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2162 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2163 gen_op_addl_A0_im(2 << s->dflag);
2164 }
2165 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2166 }
2167
2168 /* NOTE: wrap around in 16 bit not fully handled */
2169 static void gen_popa(DisasContext *s)
2170 {
2171 int i;
2172 gen_op_movl_A0_ESP();
2173 if (!s->ss32)
2174 gen_op_andl_A0_ffff();
2175 gen_op_movl_T1_A0();
2176 gen_op_addl_T1_im(16 << s->dflag);
2177 if (s->addseg)
2178 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2179 for(i = 0;i < 8; i++) {
2180 /* ESP is not reloaded */
2181 if (i != 3) {
2182 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2183 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2184 }
2185 gen_op_addl_A0_im(2 << s->dflag);
2186 }
2187 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2188 }
2189
2190 static void gen_enter(DisasContext *s, int esp_addend, int level)
2191 {
2192 int ot, opsize;
2193
2194 level &= 0x1f;
2195 #ifdef TARGET_X86_64
2196 if (CODE64(s)) {
2197 ot = s->dflag ? OT_QUAD : OT_WORD;
2198 opsize = 1 << ot;
2199
2200 gen_op_movl_A0_ESP();
2201 gen_op_addq_A0_im(-opsize);
2202 gen_op_movl_T1_A0();
2203
2204 /* push bp */
2205 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2206 gen_op_st_T0_A0[ot + s->mem_index]();
2207 if (level) {
2208 gen_op_enter64_level(level, (ot == OT_QUAD));
2209 }
2210 gen_op_mov_reg_T1[ot][R_EBP]();
2211 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2212 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2213 } else
2214 #endif
2215 {
2216 ot = s->dflag + OT_WORD;
2217 opsize = 2 << s->dflag;
2218
2219 gen_op_movl_A0_ESP();
2220 gen_op_addl_A0_im(-opsize);
2221 if (!s->ss32)
2222 gen_op_andl_A0_ffff();
2223 gen_op_movl_T1_A0();
2224 if (s->addseg)
2225 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2226 /* push bp */
2227 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2228 gen_op_st_T0_A0[ot + s->mem_index]();
2229 if (level) {
2230 gen_op_enter_level(level, s->dflag);
2231 }
2232 gen_op_mov_reg_T1[ot][R_EBP]();
2233 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2234 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2235 }
2236 }
2237
2238 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2239 {
2240 if (s->cc_op != CC_OP_DYNAMIC)
2241 gen_op_set_cc_op(s->cc_op);
2242 gen_jmp_im(cur_eip);
2243 gen_op_raise_exception(trapno);
2244 s->is_jmp = 3;
2245 }
2246
2247 /* an interrupt is different from an exception because of the
2248 privilege checks */
2249 static void gen_interrupt(DisasContext *s, int intno,
2250 target_ulong cur_eip, target_ulong next_eip)
2251 {
2252 if (s->cc_op != CC_OP_DYNAMIC)
2253 gen_op_set_cc_op(s->cc_op);
2254 gen_jmp_im(cur_eip);
2255 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2256 s->is_jmp = 3;
2257 }
2258
2259 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2260 {
2261 if (s->cc_op != CC_OP_DYNAMIC)
2262 gen_op_set_cc_op(s->cc_op);
2263 gen_jmp_im(cur_eip);
2264 gen_op_debug();
2265 s->is_jmp = 3;
2266 }
2267
2268 /* generate a generic end of block. Trace exception is also generated
2269 if needed */
2270 static void gen_eob(DisasContext *s)
2271 {
2272 if (s->cc_op != CC_OP_DYNAMIC)
2273 gen_op_set_cc_op(s->cc_op);
2274 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2275 gen_op_reset_inhibit_irq();
2276 }
2277 if (s->singlestep_enabled) {
2278 gen_op_debug();
2279 } else if (s->tf) {
2280 gen_op_raise_exception(EXCP01_SSTP);
2281 } else {
2282 gen_op_movl_T0_0();
2283 gen_op_exit_tb();
2284 }
2285 s->is_jmp = 3;
2286 }
2287
2288 /* generate a jump to eip. No segment change must happen before as a
2289 direct call to the next block may occur */
2290 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2291 {
2292 if (s->jmp_opt) {
2293 if (s->cc_op != CC_OP_DYNAMIC) {
2294 gen_op_set_cc_op(s->cc_op);
2295 s->cc_op = CC_OP_DYNAMIC;
2296 }
2297 gen_goto_tb(s, tb_num, eip);
2298 s->is_jmp = 3;
2299 } else {
2300 gen_jmp_im(eip);
2301 gen_eob(s);
2302 }
2303 }
2304
2305 static void gen_jmp(DisasContext *s, target_ulong eip)
2306 {
2307 gen_jmp_tb(s, eip, 0);
2308 }
2309
2310 static void gen_movtl_T0_im(target_ulong val)
2311 {
2312 #ifdef TARGET_X86_64
2313 if ((int32_t)val == val) {
2314 gen_op_movl_T0_im(val);
2315 } else {
2316 gen_op_movq_T0_im64(val >> 32, val);
2317 }
2318 #else
2319 gen_op_movl_T0_im(val);
2320 #endif
2321 }
2322
2323 static void gen_movtl_T1_im(target_ulong val)
2324 {
2325 #ifdef TARGET_X86_64
2326 if ((int32_t)val == val) {
2327 gen_op_movl_T1_im(val);
2328 } else {
2329 gen_op_movq_T1_im64(val >> 32, val);
2330 }
2331 #else
2332 gen_op_movl_T1_im(val);
2333 #endif
2334 }
2335
2336 static void gen_add_A0_im(DisasContext *s, int val)
2337 {
2338 #ifdef TARGET_X86_64
2339 if (CODE64(s))
2340 gen_op_addq_A0_im(val);
2341 else
2342 #endif
2343 gen_op_addl_A0_im(val);
2344 }
2345
2346 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2347 gen_op_ldq_raw_env_A0,
2348 #ifndef CONFIG_USER_ONLY
2349 gen_op_ldq_kernel_env_A0,
2350 gen_op_ldq_user_env_A0,
2351 #endif
2352 };
2353
2354 static GenOpFunc1 *gen_stq_env_A0[3] = {
2355 gen_op_stq_raw_env_A0,
2356 #ifndef CONFIG_USER_ONLY
2357 gen_op_stq_kernel_env_A0,
2358 gen_op_stq_user_env_A0,
2359 #endif
2360 };
2361
2362 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2363 gen_op_ldo_raw_env_A0,
2364 #ifndef CONFIG_USER_ONLY
2365 gen_op_ldo_kernel_env_A0,
2366 gen_op_ldo_user_env_A0,
2367 #endif
2368 };
2369
2370 static GenOpFunc1 *gen_sto_env_A0[3] = {
2371 gen_op_sto_raw_env_A0,
2372 #ifndef CONFIG_USER_ONLY
2373 gen_op_sto_kernel_env_A0,
2374 gen_op_sto_user_env_A0,
2375 #endif
2376 };
2377
2378 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2379
2380 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2381 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2382 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2383
2384 static GenOpFunc2 *sse_op_table1[256][4] = {
2385 /* pure SSE operations */
2386 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2387 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2388 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2389 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2390 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2391 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2392 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2393 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2394
2395 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2396 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2397 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2398 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2399 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2400 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2401 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2402 [0x2f] = { gen_op_comiss, gen_op_comisd },
2403 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2404 [0x51] = SSE_FOP(sqrt),
2405 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2406 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2407 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2408 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2409 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2410 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2411 [0x58] = SSE_FOP(add),
2412 [0x59] = SSE_FOP(mul),
2413 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2414 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2415 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2416 [0x5c] = SSE_FOP(sub),
2417 [0x5d] = SSE_FOP(min),
2418 [0x5e] = SSE_FOP(div),
2419 [0x5f] = SSE_FOP(max),
2420
2421 [0xc2] = SSE_FOP(cmpeq),
2422 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2423
2424 /* MMX ops and their SSE extensions */
2425 [0x60] = MMX_OP2(punpcklbw),
2426 [0x61] = MMX_OP2(punpcklwd),
2427 [0x62] = MMX_OP2(punpckldq),
2428 [0x63] = MMX_OP2(packsswb),
2429 [0x64] = MMX_OP2(pcmpgtb),
2430 [0x65] = MMX_OP2(pcmpgtw),
2431 [0x66] = MMX_OP2(pcmpgtl),
2432 [0x67] = MMX_OP2(packuswb),
2433 [0x68] = MMX_OP2(punpckhbw),
2434 [0x69] = MMX_OP2(punpckhwd),
2435 [0x6a] = MMX_OP2(punpckhdq),
2436 [0x6b] = MMX_OP2(packssdw),
2437 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2438 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2439 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2440 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2441 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2442 (GenOpFunc2 *)gen_op_pshufd_xmm,
2443 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2444 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2445 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2446 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2447 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2448 [0x74] = MMX_OP2(pcmpeqb),
2449 [0x75] = MMX_OP2(pcmpeqw),
2450 [0x76] = MMX_OP2(pcmpeql),
2451 [0x77] = { SSE_SPECIAL }, /* emms */
2452 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2453 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2454 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2455 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2456 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2457 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2458 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2459 [0xd1] = MMX_OP2(psrlw),
2460 [0xd2] = MMX_OP2(psrld),
2461 [0xd3] = MMX_OP2(psrlq),
2462 [0xd4] = MMX_OP2(paddq),
2463 [0xd5] = MMX_OP2(pmullw),
2464 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2465 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2466 [0xd8] = MMX_OP2(psubusb),
2467 [0xd9] = MMX_OP2(psubusw),
2468 [0xda] = MMX_OP2(pminub),
2469 [0xdb] = MMX_OP2(pand),
2470 [0xdc] = MMX_OP2(paddusb),
2471 [0xdd] = MMX_OP2(paddusw),
2472 [0xde] = MMX_OP2(pmaxub),
2473 [0xdf] = MMX_OP2(pandn),
2474 [0xe0] = MMX_OP2(pavgb),
2475 [0xe1] = MMX_OP2(psraw),
2476 [0xe2] = MMX_OP2(psrad),
2477 [0xe3] = MMX_OP2(pavgw),
2478 [0xe4] = MMX_OP2(pmulhuw),
2479 [0xe5] = MMX_OP2(pmulhw),
2480 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2481 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2482 [0xe8] = MMX_OP2(psubsb),
2483 [0xe9] = MMX_OP2(psubsw),
2484 [0xea] = MMX_OP2(pminsw),
2485 [0xeb] = MMX_OP2(por),
2486 [0xec] = MMX_OP2(paddsb),
2487 [0xed] = MMX_OP2(paddsw),
2488 [0xee] = MMX_OP2(pmaxsw),
2489 [0xef] = MMX_OP2(pxor),
2490 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2491 [0xf1] = MMX_OP2(psllw),
2492 [0xf2] = MMX_OP2(pslld),
2493 [0xf3] = MMX_OP2(psllq),
2494 [0xf4] = MMX_OP2(pmuludq),
2495 [0xf5] = MMX_OP2(pmaddwd),
2496 [0xf6] = MMX_OP2(psadbw),
2497 [0xf7] = MMX_OP2(maskmov),
2498 [0xf8] = MMX_OP2(psubb),
2499 [0xf9] = MMX_OP2(psubw),
2500 [0xfa] = MMX_OP2(psubl),
2501 [0xfb] = MMX_OP2(psubq),
2502 [0xfc] = MMX_OP2(paddb),
2503 [0xfd] = MMX_OP2(paddw),
2504 [0xfe] = MMX_OP2(paddl),
2505 };
2506
2507 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2508 [0 + 2] = MMX_OP2(psrlw),
2509 [0 + 4] = MMX_OP2(psraw),
2510 [0 + 6] = MMX_OP2(psllw),
2511 [8 + 2] = MMX_OP2(psrld),
2512 [8 + 4] = MMX_OP2(psrad),
2513 [8 + 6] = MMX_OP2(pslld),
2514 [16 + 2] = MMX_OP2(psrlq),
2515 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2516 [16 + 6] = MMX_OP2(psllq),
2517 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2518 };
2519
2520 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2521 gen_op_cvtsi2ss,
2522 gen_op_cvtsi2sd,
2523 X86_64_ONLY(gen_op_cvtsq2ss),
2524 X86_64_ONLY(gen_op_cvtsq2sd),
2525
2526 gen_op_cvttss2si,
2527 gen_op_cvttsd2si,
2528 X86_64_ONLY(gen_op_cvttss2sq),
2529 X86_64_ONLY(gen_op_cvttsd2sq),
2530
2531 gen_op_cvtss2si,
2532 gen_op_cvtsd2si,
2533 X86_64_ONLY(gen_op_cvtss2sq),
2534 X86_64_ONLY(gen_op_cvtsd2sq),
2535 };
2536
2537 static GenOpFunc2 *sse_op_table4[8][4] = {
2538 SSE_FOP(cmpeq),
2539 SSE_FOP(cmplt),
2540 SSE_FOP(cmple),
2541 SSE_FOP(cmpunord),
2542 SSE_FOP(cmpneq),
2543 SSE_FOP(cmpnlt),
2544 SSE_FOP(cmpnle),
2545 SSE_FOP(cmpord),
2546 };
2547
2548 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2549 {
2550 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2551 int modrm, mod, rm, reg, reg_addr, offset_addr;
2552 GenOpFunc2 *sse_op2;
2553 GenOpFunc3 *sse_op3;
2554
2555 b &= 0xff;
2556 if (s->prefix & PREFIX_DATA)
2557 b1 = 1;
2558 else if (s->prefix & PREFIX_REPZ)
2559 b1 = 2;
2560 else if (s->prefix & PREFIX_REPNZ)
2561 b1 = 3;
2562 else
2563 b1 = 0;
2564 sse_op2 = sse_op_table1[b][b1];
2565 if (!sse_op2)
2566 goto illegal_op;
2567 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2568 is_xmm = 1;
2569 } else {
2570 if (b1 == 0) {
2571 /* MMX case */
2572 is_xmm = 0;
2573 } else {
2574 is_xmm = 1;
2575 }
2576 }
2577 /* simple MMX/SSE operation */
2578 if (s->flags & HF_TS_MASK) {
2579 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2580 return;
2581 }
2582 if (s->flags & HF_EM_MASK) {
2583 illegal_op:
2584 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2585 return;
2586 }
2587 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2588 goto illegal_op;
2589 if (b == 0x77) {
2590 /* emms */
2591 gen_op_emms();
2592 return;
2593 }
2594 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2595 the static cpu state) */
2596 if (!is_xmm) {
2597 gen_op_enter_mmx();
2598 }
2599
2600 modrm = ldub_code(s->pc++);
2601 reg = ((modrm >> 3) & 7);
2602 if (is_xmm)
2603 reg |= rex_r;
2604 mod = (modrm >> 6) & 3;
2605 if (sse_op2 == SSE_SPECIAL) {
2606 b |= (b1 << 8);
2607 switch(b) {
2608 case 0x0e7: /* movntq */
2609 if (mod == 3)
2610 goto illegal_op;
2611 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2612 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2613 break;
2614 case 0x1e7: /* movntdq */
2615 case 0x02b: /* movntps */
2616 case 0x12b: /* movntps */
2617 case 0x3f0: /* lddqu */
2618 if (mod == 3)
2619 goto illegal_op;
2620 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2621 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2622 break;
2623 case 0x6e: /* movd mm, ea */
2624 #ifdef TARGET_X86_64
2625 if (s->dflag == 2) {
2626 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2627 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2628 } else
2629 #endif
2630 {
2631 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2632 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2633 }
2634 break;
2635 case 0x16e: /* movd xmm, ea */
2636 #ifdef TARGET_X86_64
2637 if (s->dflag == 2) {
2638 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2639 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2640 } else
2641 #endif
2642 {
2643 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2644 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2645 }
2646 break;
2647 case 0x6f: /* movq mm, ea */
2648 if (mod != 3) {
2649 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2650 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2651 } else {
2652 rm = (modrm & 7);
2653 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2654 offsetof(CPUX86State,fpregs[rm].mmx));
2655 }
2656 break;
2657 case 0x010: /* movups */
2658 case 0x110: /* movupd */
2659 case 0x028: /* movaps */
2660 case 0x128: /* movapd */
2661 case 0x16f: /* movdqa xmm, ea */
2662 case 0x26f: /* movdqu xmm, ea */
2663 if (mod != 3) {
2664 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2665 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2666 } else {
2667 rm = (modrm & 7) | REX_B(s);
2668 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2669 offsetof(CPUX86State,xmm_regs[rm]));
2670 }
2671 break;
2672 case 0x210: /* movss xmm, ea */
2673 if (mod != 3) {
2674 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2675 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2676 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2677 gen_op_movl_T0_0();
2678 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2679 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2680 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2681 } else {
2682 rm = (modrm & 7) | REX_B(s);
2683 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2684 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2685 }
2686 break;
2687 case 0x310: /* movsd xmm, ea */
2688 if (mod != 3) {
2689 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2690 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2691 gen_op_movl_T0_0();
2692 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2693 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2694 } else {
2695 rm = (modrm & 7) | REX_B(s);
2696 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2697 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2698 }
2699 break;
2700 case 0x012: /* movlps */
2701 case 0x112: /* movlpd */
2702 if (mod != 3) {
2703 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2704 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2705 } else {
2706 /* movhlps */
2707 rm = (modrm & 7) | REX_B(s);
2708 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2709 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2710 }
2711 break;
2712 case 0x212: /* movsldup */
2713 if (mod != 3) {
2714 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2715 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2716 } else {
2717 rm = (modrm & 7) | REX_B(s);
2718 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2719 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2720 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2721 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2722 }
2723 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2724 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2725 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2726 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2727 break;
2728 case 0x312: /* movddup */
2729 if (mod != 3) {
2730 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2731 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2732 } else {
2733 rm = (modrm & 7) | REX_B(s);
2734 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2735 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2736 }
2737 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2738 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2739 break;
2740 case 0x016: /* movhps */
2741 case 0x116: /* movhpd */
2742 if (mod != 3) {
2743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2744 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2745 } else {
2746 /* movlhps */
2747 rm = (modrm & 7) | REX_B(s);
2748 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2749 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2750 }
2751 break;
2752 case 0x216: /* movshdup */
2753 if (mod != 3) {
2754 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2755 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2756 } else {
2757 rm = (modrm & 7) | REX_B(s);
2758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2759 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2760 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2761 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2762 }
2763 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2764 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2765 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2766 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2767 break;
2768 case 0x7e: /* movd ea, mm */
2769 #ifdef TARGET_X86_64
2770 if (s->dflag == 2) {
2771 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2772 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2773 } else
2774 #endif
2775 {
2776 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2777 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2778 }
2779 break;
2780 case 0x17e: /* movd ea, xmm */
2781 #ifdef TARGET_X86_64
2782 if (s->dflag == 2) {
2783 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2784 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2785 } else
2786 #endif
2787 {
2788 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2789 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2790 }
2791 break;
2792 case 0x27e: /* movq xmm, ea */
2793 if (mod != 3) {
2794 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2795 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2796 } else {
2797 rm = (modrm & 7) | REX_B(s);
2798 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2799 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2800 }
2801 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2802 break;
2803 case 0x7f: /* movq ea, mm */
2804 if (mod != 3) {
2805 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2806 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2807 } else {
2808 rm = (modrm & 7);
2809 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2810 offsetof(CPUX86State,fpregs[reg].mmx));
2811 }
2812 break;
2813 case 0x011: /* movups */
2814 case 0x111: /* movupd */
2815 case 0x029: /* movaps */
2816 case 0x129: /* movapd */
2817 case 0x17f: /* movdqa ea, xmm */
2818 case 0x27f: /* movdqu ea, xmm */
2819 if (mod != 3) {
2820 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2821 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2822 } else {
2823 rm = (modrm & 7) | REX_B(s);
2824 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2825 offsetof(CPUX86State,xmm_regs[reg]));
2826 }
2827 break;
2828 case 0x211: /* movss ea, xmm */
2829 if (mod != 3) {
2830 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2831 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2832 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2833 } else {
2834 rm = (modrm & 7) | REX_B(s);
2835 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2836 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2837 }
2838 break;
2839 case 0x311: /* movsd ea, xmm */
2840 if (mod != 3) {
2841 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2842 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2843 } else {
2844 rm = (modrm & 7) | REX_B(s);
2845 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2846 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2847 }
2848 break;
2849 case 0x013: /* movlps */
2850 case 0x113: /* movlpd */
2851 if (mod != 3) {
2852 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2853 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2854 } else {
2855 goto illegal_op;
2856 }
2857 break;
2858 case 0x017: /* movhps */
2859 case 0x117: /* movhpd */
2860 if (mod != 3) {
2861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2862 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2863 } else {
2864 goto illegal_op;
2865 }
2866 break;
2867 case 0x71: /* shift mm, im */
2868 case 0x72:
2869 case 0x73:
2870 case 0x171: /* shift xmm, im */
2871 case 0x172:
2872 case 0x173:
2873 val = ldub_code(s->pc++);
2874 if (is_xmm) {
2875 gen_op_movl_T0_im(val);
2876 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2877 gen_op_movl_T0_0();
2878 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2879 op1_offset = offsetof(CPUX86State,xmm_t0);
2880 } else {
2881 gen_op_movl_T0_im(val);
2882 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2883 gen_op_movl_T0_0();
2884 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2885 op1_offset = offsetof(CPUX86State,mmx_t0);
2886 }
2887 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2888 if (!sse_op2)
2889 goto illegal_op;
2890 if (is_xmm) {
2891 rm = (modrm & 7) | REX_B(s);
2892 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2893 } else {
2894 rm = (modrm & 7);
2895 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2896 }
2897 sse_op2(op2_offset, op1_offset);
2898 break;
2899 case 0x050: /* movmskps */
2900 rm = (modrm & 7) | REX_B(s);
2901 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2902 gen_op_mov_reg_T0[OT_LONG][reg]();
2903 break;
2904 case 0x150: /* movmskpd */
2905 rm = (modrm & 7) | REX_B(s);
2906 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2907 gen_op_mov_reg_T0[OT_LONG][reg]();
2908 break;
2909 case 0x02a: /* cvtpi2ps */
2910 case 0x12a: /* cvtpi2pd */
2911 gen_op_enter_mmx();
2912 if (mod != 3) {
2913 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2914 op2_offset = offsetof(CPUX86State,mmx_t0);
2915 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2916 } else {
2917 rm = (modrm & 7);
2918 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2919 }
2920 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2921 switch(b >> 8) {
2922 case 0x0:
2923 gen_op_cvtpi2ps(op1_offset, op2_offset);
2924 break;
2925 default:
2926 case 0x1:
2927 gen_op_cvtpi2pd(op1_offset, op2_offset);
2928 break;
2929 }
2930 break;
2931 case 0x22a: /* cvtsi2ss */
2932 case 0x32a: /* cvtsi2sd */
2933 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2934 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2935 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2936 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2937 break;
2938 case 0x02c: /* cvttps2pi */
2939 case 0x12c: /* cvttpd2pi */
2940 case 0x02d: /* cvtps2pi */
2941 case 0x12d: /* cvtpd2pi */
2942 gen_op_enter_mmx();
2943 if (mod != 3) {
2944 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2945 op2_offset = offsetof(CPUX86State,xmm_t0);
2946 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2947 } else {
2948 rm = (modrm & 7) | REX_B(s);
2949 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2950 }
2951 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2952 switch(b) {
2953 case 0x02c:
2954 gen_op_cvttps2pi(op1_offset, op2_offset);
2955 break;
2956 case 0x12c:
2957 gen_op_cvttpd2pi(op1_offset, op2_offset);
2958 break;
2959 case 0x02d:
2960 gen_op_cvtps2pi(op1_offset, op2_offset);
2961 break;
2962 case 0x12d:
2963 gen_op_cvtpd2pi(op1_offset, op2_offset);
2964 break;
2965 }
2966 break;
2967 case 0x22c: /* cvttss2si */
2968 case 0x32c: /* cvttsd2si */
2969 case 0x22d: /* cvtss2si */
2970 case 0x32d: /* cvtsd2si */
2971 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2972 if (mod != 3) {
2973 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2974 if ((b >> 8) & 1) {
2975 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2976 } else {
2977 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2978 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2979 }
2980 op2_offset = offsetof(CPUX86State,xmm_t0);
2981 } else {
2982 rm = (modrm & 7) | REX_B(s);
2983 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2984 }
2985 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2986 (b & 1) * 4](op2_offset);
2987 gen_op_mov_reg_T0[ot][reg]();
2988 break;
2989 case 0xc4: /* pinsrw */
2990 case 0x1c4:
2991 s->rip_offset = 1;
2992 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2993 val = ldub_code(s->pc++);
2994 if (b1) {
2995 val &= 7;
2996 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2997 } else {
2998 val &= 3;
2999 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3000 }
3001 break;
3002 case 0xc5: /* pextrw */
3003 case 0x1c5:
3004 if (mod != 3)
3005 goto illegal_op;
3006 val = ldub_code(s->pc++);
3007 if (b1) {
3008 val &= 7;
3009 rm = (modrm & 7) | REX_B(s);
3010 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3011 } else {
3012 val &= 3;
3013 rm = (modrm & 7);
3014 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3015 }
3016 reg = ((modrm >> 3) & 7) | rex_r;
3017 gen_op_mov_reg_T0[OT_LONG][reg]();
3018 break;
3019 case 0x1d6: /* movq ea, xmm */
3020 if (mod != 3) {
3021 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3022 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3023 } else {
3024 rm = (modrm & 7) | REX_B(s);
3025 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3026 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3027 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3028 }
3029 break;
3030 case 0x2d6: /* movq2dq */
3031 gen_op_enter_mmx();
3032 rm = (modrm & 7);
3033 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3034 offsetof(CPUX86State,fpregs[rm].mmx));
3035 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3036 break;
3037 case 0x3d6: /* movdq2q */
3038 gen_op_enter_mmx();
3039 rm = (modrm & 7) | REX_B(s);
3040 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3041 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3042 break;
3043 case 0xd7: /* pmovmskb */
3044 case 0x1d7:
3045 if (mod != 3)
3046 goto illegal_op;
3047 if (b1) {
3048 rm = (modrm & 7) | REX_B(s);
3049 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3050 } else {
3051 rm = (modrm & 7);
3052 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3053 }
3054 reg = ((modrm >> 3) & 7) | rex_r;
3055 gen_op_mov_reg_T0[OT_LONG][reg]();
3056 break;
3057 default:
3058 goto illegal_op;
3059 }
3060 } else {
3061 /* generic MMX or SSE operation */
3062 switch(b) {
3063 case 0xf7:
3064 /* maskmov : we must prepare A0 */
3065 if (mod != 3)
3066 goto illegal_op;
3067 #ifdef TARGET_X86_64
3068 if (s->aflag == 2) {
3069 gen_op_movq_A0_reg[R_EDI]();
3070 } else
3071 #endif
3072 {
3073 gen_op_movl_A0_reg[R_EDI]();
3074 if (s->aflag == 0)
3075 gen_op_andl_A0_ffff();
3076 }
3077 gen_add_A0_ds_seg(s);
3078 break;
3079 case 0x70: /* pshufx insn */
3080 case 0xc6: /* pshufx insn */
3081 case 0xc2: /* compare insns */
3082 s->rip_offset = 1;
3083 break;
3084 default:
3085 break;
3086 }
3087 if (is_xmm) {
3088 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3089 if (mod != 3) {
3090 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3091 op2_offset = offsetof(CPUX86State,xmm_t0);
3092 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3093 b == 0xc2)) {
3094 /* specific case for SSE single instructions */
3095 if (b1 == 2) {
3096 /* 32 bit access */
3097 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3098 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3099 } else {
3100 /* 64 bit access */
3101 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3102 }
3103 } else {
3104 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3105 }
3106 } else {
3107 rm = (modrm & 7) | REX_B(s);
3108 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3109 }
3110 } else {
3111 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3112 if (mod != 3) {
3113 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3114 op2_offset = offsetof(CPUX86State,mmx_t0);
3115 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3116 } else {
3117 rm = (modrm & 7);
3118 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3119 }
3120 }
3121 switch(b) {
3122 case 0x70: /* pshufx insn */
3123 case 0xc6: /* pshufx insn */
3124 val = ldub_code(s->pc++);
3125 sse_op3 = (GenOpFunc3 *)sse_op2;
3126 sse_op3(op1_offset, op2_offset, val);
3127 break;
3128 case 0xc2:
3129 /* compare insns */
3130 val = ldub_code(s->pc++);
3131 if (val >= 8)
3132 goto illegal_op;
3133 sse_op2 = sse_op_table4[val][b1];
3134 sse_op2(op1_offset, op2_offset);
3135 break;
3136 default:
3137 sse_op2(op1_offset, op2_offset);
3138 break;
3139 }
3140 if (b == 0x2e || b == 0x2f) {
3141 s->cc_op = CC_OP_EFLAGS;
3142 }
3143 }
3144 }
3145
3146
3147 /* convert one instruction. s->is_jmp is set if the translation must
3148 be stopped. Return the next pc value */
3149 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3150 {
3151 int b, prefixes, aflag, dflag;
3152 int shift, ot;
3153 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3154 target_ulong next_eip, tval;
3155 int rex_w, rex_r;
3156
3157 s->pc = pc_start;
3158 prefixes = 0;
3159 aflag = s->code32;
3160 dflag = s->code32;
3161 s->override = -1;
3162 rex_w = -1;
3163 rex_r = 0;
3164 #ifdef TARGET_X86_64
3165 s->rex_x = 0;
3166 s->rex_b = 0;
3167 x86_64_hregs = 0;
3168 #endif
3169 s->rip_offset = 0; /* for relative ip address */
3170 next_byte:
3171 b = ldub_code(s->pc);
3172 s->pc++;
3173 /* check prefixes */
3174 #ifdef TARGET_X86_64
3175 if (CODE64(s)) {
3176 switch (b) {
3177 case 0xf3:
3178 prefixes |= PREFIX_REPZ;
3179 goto next_byte;
3180 case 0xf2:
3181 prefixes |= PREFIX_REPNZ;
3182 goto next_byte;
3183 case 0xf0:
3184 prefixes |= PREFIX_LOCK;
3185 goto next_byte;
3186 case 0x2e:
3187 s->override = R_CS;
3188 goto next_byte;
3189 case 0x36:
3190 s->override = R_SS;
3191 goto next_byte;
3192 case 0x3e:
3193 s->override = R_DS;
3194 goto next_byte;
3195 case 0x26:
3196 s->override = R_ES;
3197 goto next_byte;
3198 case 0x64:
3199 s->override = R_FS;
3200 goto next_byte;
3201 case 0x65:
3202 s->override = R_GS;
3203 goto next_byte;
3204 case 0x66:
3205 prefixes |= PREFIX_DATA;
3206 goto next_byte;
3207 case 0x67:
3208 prefixes |= PREFIX_ADR;
3209 goto next_byte;
3210 case 0x40 ... 0x4f:
3211 /* REX prefix */
3212 rex_w = (b >> 3) & 1;
3213 rex_r = (b & 0x4) << 1;
3214 s->rex_x = (b & 0x2) << 2;
3215 REX_B(s) = (b & 0x1) << 3;
3216 x86_64_hregs = 1; /* select uniform byte register addressing */
3217 goto next_byte;
3218 }
3219 if (rex_w == 1) {
3220 /* 0x66 is ignored if rex.w is set */
3221 dflag = 2;
3222 } else {
3223 if (prefixes & PREFIX_DATA)
3224 dflag ^= 1;
3225 }
3226 if (!(prefixes & PREFIX_ADR))
3227 aflag = 2;
3228 } else
3229 #endif
3230 {
3231 switch (b) {
3232 case 0xf3:
3233 prefixes |= PREFIX_REPZ;
3234 goto next_byte;
3235 case 0xf2:
3236 prefixes |= PREFIX_REPNZ;
3237 goto next_byte;
3238 case 0xf0:
3239 prefixes |= PREFIX_LOCK;
3240 goto next_byte;
3241 case 0x2e:
3242 s->override = R_CS;
3243 goto next_byte;
3244 case 0x36:
3245 s->override = R_SS;
3246 goto next_byte;
3247 case 0x3e:
3248 s->override = R_DS;
3249 goto next_byte;
3250 case 0x26:
3251 s->override = R_ES;
3252 goto next_byte;
3253 case 0x64:
3254 s->override = R_FS;
3255 goto next_byte;
3256 case 0x65:
3257 s->override = R_GS;
3258 goto next_byte;
3259 case 0x66:
3260 prefixes |= PREFIX_DATA;
3261 goto next_byte;
3262 case 0x67:
3263 prefixes |= PREFIX_ADR;
3264 goto next_byte;
3265 }
3266 if (prefixes & PREFIX_DATA)
3267 dflag ^= 1;
3268 if (prefixes & PREFIX_ADR)
3269 aflag ^= 1;
3270 }
3271
3272 s->prefix = prefixes;
3273 s->aflag = aflag;
3274 s->dflag = dflag;
3275
3276 /* lock generation */
3277 if (prefixes & PREFIX_LOCK)
3278 gen_op_lock();
3279
3280 /* now check op code */
3281 reswitch:
3282 switch(b) {
3283 case 0x0f:
3284 /**************************/
3285 /* extended op code */
3286 b = ldub_code(s->pc++) | 0x100;
3287 goto reswitch;
3288
3289 /**************************/
3290 /* arith & logic */
3291 case 0x00 ... 0x05:
3292 case 0x08 ... 0x0d:
3293 case 0x10 ... 0x15:
3294 case 0x18 ... 0x1d:
3295 case 0x20 ... 0x25:
3296 case 0x28 ... 0x2d:
3297 case 0x30 ... 0x35:
3298 case 0x38 ... 0x3d:
3299 {
3300 int op, f, val;
3301 op = (b >> 3) & 7;
3302 f = (b >> 1) & 3;
3303
3304 if ((b & 1) == 0)
3305 ot = OT_BYTE;
3306 else
3307 ot = dflag + OT_WORD;
3308
3309 switch(f) {
3310 case 0: /* OP Ev, Gv */
3311 modrm = ldub_code(s->pc++);
3312 reg = ((modrm >> 3) & 7) | rex_r;
3313 mod = (modrm >> 6) & 3;
3314 rm = (modrm & 7) | REX_B(s);
3315 if (mod != 3) {
3316 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3317 opreg = OR_TMP0;
3318 } else if (op == OP_XORL && rm == reg) {
3319 xor_zero:
3320 /* xor reg, reg optimisation */
3321 gen_op_movl_T0_0();
3322 s->cc_op = CC_OP_LOGICB + ot;
3323 gen_op_mov_reg_T0[ot][reg]();
3324 gen_op_update1_cc();
3325 break;
3326 } else {
3327 opreg = rm;
3328 }
3329 gen_op_mov_TN_reg[ot][1][reg]();
3330 gen_op(s, op, ot, opreg);
3331 break;
3332 case 1: /* OP Gv, Ev */
3333 modrm = ldub_code(s->pc++);
3334 mod = (modrm >> 6) & 3;
3335 reg = ((modrm >> 3) & 7) | rex_r;
3336 rm = (modrm & 7) | REX_B(s);
3337 if (mod != 3) {
3338 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3339 gen_op_ld_T1_A0[ot + s->mem_index]();
3340 } else if (op == OP_XORL && rm == reg) {
3341 goto xor_zero;
3342 } else {
3343 gen_op_mov_TN_reg[ot][1][rm]();
3344 }
3345 gen_op(s, op, ot, reg);
3346 break;
3347 case 2: /* OP A, Iv */
3348 val = insn_get(s, ot);
3349 gen_op_movl_T1_im(val);
3350 gen_op(s, op, ot, OR_EAX);
3351 break;
3352 }
3353 }
3354 break;
3355
3356 case 0x80: /* GRP1 */
3357 case 0x81:
3358 case 0x82:
3359 case 0x83:
3360 {
3361 int val;
3362
3363 if ((b & 1) == 0)
3364 ot = OT_BYTE;
3365 else
3366 ot = dflag + OT_WORD;
3367
3368 modrm = ldub_code(s->pc++);
3369 mod = (modrm >> 6) & 3;
3370 rm = (modrm & 7) | REX_B(s);
3371 op = (modrm >> 3) & 7;
3372
3373 if (mod != 3) {
3374 if (b == 0x83)
3375 s->rip_offset = 1;
3376 else
3377 s->rip_offset = insn_const_size(ot);
3378 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3379 opreg = OR_TMP0;
3380 } else {
3381 opreg = rm;
3382 }
3383
3384 switch(b) {
3385 default:
3386 case 0x80:
3387 case 0x81:
3388 case 0x82:
3389 val = insn_get(s, ot);
3390 break;
3391 case 0x83:
3392 val = (int8_t)insn_get(s, OT_BYTE);
3393 break;
3394 }
3395 gen_op_movl_T1_im(val);
3396 gen_op(s, op, ot, opreg);
3397 }
3398 break;
3399
3400 /**************************/
3401 /* inc, dec, and other misc arith */
3402 case 0x40 ... 0x47: /* inc Gv */
3403 ot = dflag ? OT_LONG : OT_WORD;
3404 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3405 break;
3406 case 0x48 ... 0x4f: /* dec Gv */
3407 ot = dflag ? OT_LONG : OT_WORD;
3408 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3409 break;
3410 case 0xf6: /* GRP3 */
3411 case 0xf7:
3412 if ((b & 1) == 0)
3413 ot = OT_BYTE;
3414 else
3415 ot = dflag + OT_WORD;
3416
3417 modrm = ldub_code(s->pc++);
3418 mod = (modrm >> 6) & 3;
3419 rm = (modrm & 7) | REX_B(s);
3420 op = (modrm >> 3) & 7;
3421 if (mod != 3) {
3422 if (op == 0)
3423 s->rip_offset = insn_const_size(ot);
3424 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3425 gen_op_ld_T0_A0[ot + s->mem_index]();
3426 } else {
3427 gen_op_mov_TN_reg[ot][0][rm]();
3428 }
3429
3430 switch(op) {
3431 case 0: /* test */
3432 val = insn_get(s, ot);
3433 gen_op_movl_T1_im(val);
3434 gen_op_testl_T0_T1_cc();
3435 s->cc_op = CC_OP_LOGICB + ot;
3436 break;
3437 case 2: /* not */
3438 gen_op_notl_T0();
3439 if (mod != 3) {
3440 gen_op_st_T0_A0[ot + s->mem_index]();
3441 } else {
3442 gen_op_mov_reg_T0[ot][rm]();
3443 }
3444 break;
3445 case 3: /* neg */
3446 gen_op_negl_T0();
3447 if (mod != 3) {
3448 gen_op_st_T0_A0[ot + s->mem_index]();
3449 } else {
3450 gen_op_mov_reg_T0[ot][rm]();
3451 }
3452 gen_op_update_neg_cc();
3453 s->cc_op = CC_OP_SUBB + ot;
3454 break;
3455 case 4: /* mul */
3456 switch(ot) {
3457 case OT_BYTE:
3458 gen_op_mulb_AL_T0();
3459 s->cc_op = CC_OP_MULB;
3460 break;
3461 case OT_WORD:
3462 gen_op_mulw_AX_T0();
3463 s->cc_op = CC_OP_MULW;
3464 break;
3465 default:
3466 case OT_LONG:
3467 gen_op_mull_EAX_T0();
3468 s->cc_op = CC_OP_MULL;
3469 break;
3470 #ifdef TARGET_X86_64
3471 case OT_QUAD:
3472 gen_op_mulq_EAX_T0();
3473 s->cc_op = CC_OP_MULQ;
3474 break;
3475 #endif
3476 }
3477 break;
3478 case 5: /* imul */
3479 switch(ot) {
3480 case OT_BYTE:
3481 gen_op_imulb_AL_T0();
3482 s->cc_op = CC_OP_MULB;
3483 break;
3484 case OT_WORD:
3485 gen_op_imulw_AX_T0();
3486 s->cc_op = CC_OP_MULW;
3487 break;
3488 default:
3489 case OT_LONG:
3490 gen_op_imull_EAX_T0();
3491 s->cc_op = CC_OP_MULL;
3492 break;
3493 #ifdef TARGET_X86_64
3494 case OT_QUAD:
3495 gen_op_imulq_EAX_T0();
3496 s->cc_op = CC_OP_MULQ;
3497 break;
3498 #endif
3499 }
3500 break;
3501 case 6: /* div */
3502 switch(ot) {
3503 case OT_BYTE:
3504 gen_jmp_im(pc_start - s->cs_base);
3505 gen_op_divb_AL_T0();
3506 break;
3507 case OT_WORD:
3508 gen_jmp_im(pc_start - s->cs_base);
3509 gen_op_divw_AX_T0();
3510 break;
3511 default:
3512 case OT_LONG:
3513 gen_jmp_im(pc_start - s->cs_base);
3514 gen_op_divl_EAX_T0();
3515 break;
3516 #ifdef TARGET_X86_64
3517 case OT_QUAD:
3518 gen_jmp_im(pc_start - s->cs_base);
3519 gen_op_divq_EAX_T0();
3520 break;
3521 #endif
3522 }
3523 break;
3524 case 7: /* idiv */
3525 switch(ot) {
3526 case OT_BYTE:
3527 gen_jmp_im(pc_start - s->cs_base);
3528 gen_op_idivb_AL_T0();
3529 break;
3530 case OT_WORD:
3531 gen_jmp_im(pc_start - s->cs_base);
3532 gen_op_idivw_AX_T0();
3533 break;
3534 default:
3535 case OT_LONG:
3536 gen_jmp_im(pc_start - s->cs_base);
3537 gen_op_idivl_EAX_T0();
3538 break;
3539 #ifdef TARGET_X86_64
3540 case OT_QUAD:
3541 gen_jmp_im(pc_start - s->cs_base);
3542 gen_op_idivq_EAX_T0();
3543 break;
3544 #endif
3545 }
3546 break;
3547 default:
3548 goto illegal_op;
3549 }
3550 break;
3551
3552 case 0xfe: /* GRP4 */
3553 case 0xff: /* GRP5 */
3554 if ((b & 1) == 0)
3555 ot = OT_BYTE;
3556 else
3557 ot = dflag + OT_WORD;
3558
3559 modrm = ldub_code(s->pc++);
3560 mod = (modrm >> 6) & 3;
3561 rm = (modrm & 7) | REX_B(s);
3562 op = (modrm >> 3) & 7;
3563 if (op >= 2 && b == 0xfe) {
3564 goto illegal_op;
3565 }
3566 if (CODE64(s)) {
3567 if (op == 2 || op == 4) {
3568 /* operand size for jumps is 64 bit */
3569 ot = OT_QUAD;
3570 } else if (op == 3 || op == 5) {
3571 /* for call calls, the operand is 16 or 32 bit, even
3572 in long mode */
3573 ot = dflag ? OT_LONG : OT_WORD;
3574 } else if (op == 6) {
3575 /* default push size is 64 bit */
3576 ot = dflag ? OT_QUAD : OT_WORD;
3577 }
3578 }
3579 if (mod != 3) {
3580 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3581 if (op >= 2 && op != 3 && op != 5)
3582 gen_op_ld_T0_A0[ot + s->mem_index]();
3583 } else {
3584 gen_op_mov_TN_reg[ot][0][rm]();
3585 }
3586
3587 switch(op) {
3588 case 0: /* inc Ev */
3589 if (mod != 3)
3590 opreg = OR_TMP0;
3591 else
3592 opreg = rm;
3593 gen_inc(s, ot, opreg, 1);
3594 break;
3595 case 1: /* dec Ev */
3596 if (mod != 3)
3597 opreg = OR_TMP0;
3598 else
3599 opreg = rm;
3600 gen_inc(s, ot, opreg, -1);
3601 break;
3602 case 2: /* call Ev */
3603 /* XXX: optimize if memory (no 'and' is necessary) */
3604 if (s->dflag == 0)
3605 gen_op_andl_T0_ffff();
3606 next_eip = s->pc - s->cs_base;
3607 gen_movtl_T1_im(next_eip);
3608 gen_push_T1(s);
3609 gen_op_jmp_T0();
3610 gen_eob(s);
3611 break;
3612 case 3: /* lcall Ev */
3613 gen_op_ld_T1_A0[ot + s->mem_index]();
3614 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3615 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3616 do_lcall:
3617 if (s->pe && !s->vm86) {
3618 if (s->cc_op != CC_OP_DYNAMIC)
3619 gen_op_set_cc_op(s->cc_op);
3620 gen_jmp_im(pc_start - s->cs_base);
3621 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3622 } else {
3623 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3624 }
3625 gen_eob(s);
3626 break;
3627 case 4: /* jmp Ev */
3628 if (s->dflag == 0)
3629 gen_op_andl_T0_ffff();
3630 gen_op_jmp_T0();
3631 gen_eob(s);
3632 break;
3633 case 5: /* ljmp Ev */
3634 gen_op_ld_T1_A0[ot + s->mem_index]();
3635 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3636 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3637 do_ljmp:
3638 if (s->pe && !s->vm86) {
3639 if (s->cc_op != CC_OP_DYNAMIC)
3640 gen_op_set_cc_op(s->cc_op);
3641 gen_jmp_im(pc_start - s->cs_base);
3642 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3643 } else {
3644 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3645 gen_op_movl_T0_T1();
3646 gen_op_jmp_T0();
3647 }
3648 gen_eob(s);
3649 break;
3650 case 6: /* push Ev */
3651 gen_push_T0(s);
3652 break;
3653 default:
3654 goto illegal_op;
3655 }
3656 break;
3657
3658 case 0x84: /* test Ev, Gv */
3659 case 0x85:
3660 if ((b & 1) == 0)
3661 ot = OT_BYTE;
3662 else
3663 ot = dflag + OT_WORD;
3664
3665 modrm = ldub_code(s->pc++);
3666 mod = (modrm >> 6) & 3;
3667 rm = (modrm & 7) | REX_B(s);
3668 reg = ((modrm >> 3) & 7) | rex_r;
3669
3670 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3671 gen_op_mov_TN_reg[ot][1][reg]();
3672 gen_op_testl_T0_T1_cc();
3673 s->cc_op = CC_OP_LOGICB + ot;
3674 break;
3675
3676 case 0xa8: /* test eAX, Iv */
3677 case 0xa9:
3678 if ((b & 1) == 0)
3679 ot = OT_BYTE;
3680 else
3681 ot = dflag + OT_WORD;
3682 val = insn_get(s, ot);
3683
3684 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3685 gen_op_movl_T1_im(val);
3686 gen_op_testl_T0_T1_cc();
3687 s->cc_op = CC_OP_LOGICB + ot;
3688 break;
3689
3690 case 0x98: /* CWDE/CBW */
3691 #ifdef TARGET_X86_64
3692 if (dflag == 2) {
3693 gen_op_movslq_RAX_EAX();
3694 } else
3695 #endif
3696 if (dflag == 1)
3697 gen_op_movswl_EAX_AX();
3698 else
3699 gen_op_movsbw_AX_AL();
3700 break;
3701 case 0x99: /* CDQ/CWD */
3702 #ifdef TARGET_X86_64
3703 if (dflag == 2) {
3704 gen_op_movsqo_RDX_RAX();
3705 } else
3706 #endif
3707 if (dflag == 1)
3708 gen_op_movslq_EDX_EAX();
3709 else
3710 gen_op_movswl_DX_AX();
3711 break;
3712 case 0x1af: /* imul Gv, Ev */
3713 case 0x69: /* imul Gv, Ev, I */
3714 case 0x6b:
3715 ot = dflag + OT_WORD;
3716 modrm = ldub_code(s->pc++);
3717 reg = ((modrm >> 3) & 7) | rex_r;
3718 if (b == 0x69)
3719 s->rip_offset = insn_const_size(ot);
3720 else if (b == 0x6b)
3721 s->rip_offset = 1;
3722 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3723 if (b == 0x69) {
3724 val = insn_get(s, ot);
3725 gen_op_movl_T1_im(val);
3726 } else if (b == 0x6b) {
3727 val = (int8_t)insn_get(s, OT_BYTE);
3728 gen_op_movl_T1_im(val);
3729 } else {
3730 gen_op_mov_TN_reg[ot][1][reg]();
3731 }
3732
3733 #ifdef TARGET_X86_64
3734 if (ot == OT_QUAD) {
3735 gen_op_imulq_T0_T1();
3736 } else
3737 #endif
3738 if (ot == OT_LONG) {
3739 gen_op_imull_T0_T1();
3740 } else {
3741 gen_op_imulw_T0_T1();
3742 }
3743 gen_op_mov_reg_T0[ot][reg]();
3744 s->cc_op = CC_OP_MULB + ot;
3745 break;
3746 case 0x1c0:
3747 case 0x1c1: /* xadd Ev, Gv */
3748 if ((b & 1) == 0)
3749 ot = OT_BYTE;
3750 else
3751 ot = dflag + OT_WORD;
3752 modrm = ldub_code(s->pc++);
3753 reg = ((modrm >> 3) & 7) | rex_r;
3754 mod = (modrm >> 6) & 3;
3755 if (mod == 3) {
3756 rm = (modrm & 7) | REX_B(s);
3757 gen_op_mov_TN_reg[ot][0][reg]();
3758 gen_op_mov_TN_reg[ot][1][rm]();
3759 gen_op_addl_T0_T1();
3760 gen_op_mov_reg_T1[ot][reg]();
3761 gen_op_mov_reg_T0[ot][rm]();
3762 } else {
3763 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3764 gen_op_mov_TN_reg[ot][0][reg]();
3765 gen_op_ld_T1_A0[ot + s->mem_index]();
3766 gen_op_addl_T0_T1();
3767 gen_op_st_T0_A0[ot + s->mem_index]();
3768 gen_op_mov_reg_T1[ot][reg]();
3769 }
3770 gen_op_update2_cc();
3771 s->cc_op = CC_OP_ADDB + ot;
3772 break;
3773 case 0x1b0:
3774 case 0x1b1: /* cmpxchg Ev, Gv */
3775 if ((b & 1) == 0)
3776 ot = OT_BYTE;
3777 else
3778 ot = dflag + OT_WORD;
3779 modrm = ldub_code(s->pc++);
3780 reg = ((modrm >> 3) & 7) | rex_r;
3781 mod = (modrm >> 6) & 3;
3782 gen_op_mov_TN_reg[ot][1][reg]();
3783 if (mod == 3) {
3784 rm = (modrm & 7) | REX_B(s);
3785 gen_op_mov_TN_reg[ot][0][rm]();
3786 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3787 gen_op_mov_reg_T0[ot][rm]();
3788 } else {
3789 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3790 gen_op_ld_T0_A0[ot + s->mem_index]();
3791 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3792 }
3793 s->cc_op = CC_OP_SUBB + ot;
3794 break;
3795 case 0x1c7: /* cmpxchg8b */
3796 modrm = ldub_code(s->pc++);
3797 mod = (modrm >> 6) & 3;
3798 if (mod == 3)
3799 goto illegal_op;
3800 gen_jmp_im(pc_start - s->cs_base);
3801 if (s->cc_op != CC_OP_DYNAMIC)
3802 gen_op_set_cc_op(s->cc_op);
3803 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3804 gen_op_cmpxchg8b();
3805 s->cc_op = CC_OP_EFLAGS;
3806 break;
3807
3808 /**************************/
3809 /* push/pop */
3810 case 0x50 ... 0x57: /* push */
3811 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3812 gen_push_T0(s);
3813 break;
3814 case 0x58 ... 0x5f: /* pop */
3815 if (CODE64(s)) {
3816 ot = dflag ? OT_QUAD : OT_WORD;
3817 } else {
3818 ot = dflag + OT_WORD;
3819 }
3820 gen_pop_T0(s);
3821 /* NOTE: order is important for pop %sp */
3822 gen_pop_update(s);
3823 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3824 break;
3825 case 0x60: /* pusha */
3826 if (CODE64(s))
3827 goto illegal_op;
3828 gen_pusha(s);
3829 break;
3830 case 0x61: /* popa */
3831 if (CODE64(s))
3832 goto illegal_op;
3833 gen_popa(s);
3834 break;
3835 case 0x68: /* push Iv */
3836 case 0x6a:
3837 if (CODE64(s)) {
3838 ot = dflag ? OT_QUAD : OT_WORD;
3839 } else {
3840 ot = dflag + OT_WORD;
3841 }
3842 if (b == 0x68)
3843 val = insn_get(s, ot);
3844 else
3845 val = (int8_t)insn_get(s, OT_BYTE);
3846 gen_op_movl_T0_im(val);
3847 gen_push_T0(s);
3848 break;
3849 case 0x8f: /* pop Ev */
3850 if (CODE64(s)) {
3851 ot = dflag ? OT_QUAD : OT_WORD;
3852 } else {
3853 ot = dflag + OT_WORD;
3854 }
3855 modrm = ldub_code(s->pc++);
3856 mod = (modrm >> 6) & 3;
3857 gen_pop_T0(s);
3858 if (mod == 3) {
3859 /* NOTE: order is important for pop %sp */
3860 gen_pop_update(s);
3861 rm = (modrm & 7) | REX_B(s);
3862 gen_op_mov_reg_T0[ot][rm]();
3863 } else {
3864 /* NOTE: order is important too for MMU exceptions */
3865 s->popl_esp_hack = 1 << ot;
3866 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3867 s->popl_esp_hack = 0;
3868 gen_pop_update(s);
3869 }
3870 break;
3871 case 0xc8: /* enter */
3872 {
3873 int level;
3874 val = lduw_code(s->pc);
3875 s->pc += 2;
3876 level = ldub_code(s->pc++);
3877 gen_enter(s, val, level);
3878 }
3879 break;
3880 case 0xc9: /* leave */
3881 /* XXX: exception not precise (ESP is updated before potential exception) */
3882 if (CODE64(s)) {
3883 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3884 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3885 } else if (s->ss32) {
3886 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3887 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3888 } else {
3889 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3890 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3891 }
3892 gen_pop_T0(s);
3893 if (CODE64(s)) {
3894 ot = dflag ? OT_QUAD : OT_WORD;
3895 } else {
3896 ot = dflag + OT_WORD;
3897 }
3898 gen_op_mov_reg_T0[ot][R_EBP]();
3899 gen_pop_update(s);
3900 break;
3901 case 0x06: /* push es */
3902 case 0x0e: /* push cs */
3903 case 0x16: /* push ss */
3904 case 0x1e: /* push ds */
3905 if (CODE64(s))
3906 goto illegal_op;
3907 gen_op_movl_T0_seg(b >> 3);
3908 gen_push_T0(s);
3909 break;
3910 case 0x1a0: /* push fs */
3911 case 0x1a8: /* push gs */
3912 gen_op_movl_T0_seg((b >> 3) & 7);
3913 gen_push_T0(s);
3914 break;
3915 case 0x07: /* pop es */
3916 case 0x17: /* pop ss */
3917 case 0x1f: /* pop ds */
3918 if (CODE64(s))
3919 goto illegal_op;
3920 reg = b >> 3;
3921 gen_pop_T0(s);
3922 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3923 gen_pop_update(s);
3924 if (reg == R_SS) {
3925 /* if reg == SS, inhibit interrupts/trace. */
3926 /* If several instructions disable interrupts, only the
3927 _first_ does it */
3928 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3929 gen_op_set_inhibit_irq();
3930 s->tf = 0;
3931 }
3932 if (s->is_jmp) {
3933 gen_jmp_im(s->pc - s->cs_base);
3934 gen_eob(s);
3935 }
3936 break;
3937 case 0x1a1: /* pop fs */
3938 case 0x1a9: /* pop gs */
3939 gen_pop_T0(s);
3940 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3941 gen_pop_update(s);
3942 if (s->is_jmp) {
3943 gen_jmp_im(s->pc - s->cs_base);
3944 gen_eob(s);
3945 }
3946 break;
3947
3948 /**************************/
3949 /* mov */
3950 case 0x88:
3951 case 0x89: /* mov Gv, Ev */
3952 if ((b & 1) == 0)
3953 ot = OT_BYTE;
3954 else
3955 ot = dflag + OT_WORD;
3956 modrm = ldub_code(s->pc++);
3957 reg = ((modrm >> 3) & 7) | rex_r;
3958
3959 /* generate a generic store */
3960 gen_ldst_modrm(s, modrm, ot, reg, 1);
3961 break;
3962 case 0xc6:
3963 case 0xc7: /* mov Ev, Iv */
3964 if ((b & 1) == 0)
3965 ot = OT_BYTE;
3966 else
3967 ot = dflag + OT_WORD;
3968 modrm = ldub_code(s->pc++);
3969 mod = (modrm >> 6) & 3;
3970 if (mod != 3) {
3971 s->rip_offset = insn_const_size(ot);
3972 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3973 }
3974 val = insn_get(s, ot);
3975 gen_op_movl_T0_im(val);
3976 if (mod != 3)
3977 gen_op_st_T0_A0[ot + s->mem_index]();
3978 else
3979 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3980 break;
3981 case 0x8a:
3982 case 0x8b: /* mov Ev, Gv */
3983 if ((b & 1) == 0)
3984 ot = OT_BYTE;
3985 else
3986 ot = OT_WORD + dflag;
3987 modrm = ldub_code(s->pc++);
3988 reg = ((modrm >> 3) & 7) | rex_r;
3989
3990 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3991 gen_op_mov_reg_T0[ot][reg]();
3992 break;
3993 case 0x8e: /* mov seg, Gv */
3994 modrm = ldub_code(s->pc++);
3995 reg = (modrm >> 3) & 7;
3996 if (reg >= 6 || reg == R_CS)
3997 goto illegal_op;
3998 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3999 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4000 if (reg == R_SS) {
4001 /* if reg == SS, inhibit interrupts/trace */
4002 /* If several instructions disable interrupts, only the
4003 _first_ does it */
4004 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4005 gen_op_set_inhibit_irq();
4006 s->tf = 0;
4007 }
4008 if (s->is_jmp) {
4009 gen_jmp_im(s->pc - s->cs_base);
4010 gen_eob(s);
4011 }
4012 break;
4013 case 0x8c: /* mov Gv, seg */
4014 modrm = ldub_code(s->pc++);
4015 reg = (modrm >> 3) & 7;
4016 mod = (modrm >> 6) & 3;
4017 if (reg >= 6)
4018 goto illegal_op;
4019 gen_op_movl_T0_seg(reg);
4020 if (mod == 3)
4021 ot = OT_WORD + dflag;
4022 else
4023 ot = OT_WORD;
4024 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4025 break;
4026
4027 case 0x1b6: /* movzbS Gv, Eb */
4028 case 0x1b7: /* movzwS Gv, Eb */
4029 case 0x1be: /* movsbS Gv, Eb */
4030 case 0x1bf: /* movswS Gv, Eb */
4031 {
4032 int d_ot;
4033 /* d_ot is the size of destination */
4034 d_ot = dflag + OT_WORD;
4035 /* ot is the size of source */
4036 ot = (b & 1) + OT_BYTE;
4037 modrm = ldub_code(s->pc++);
4038 reg = ((modrm >> 3) & 7) | rex_r;
4039 mod = (modrm >> 6) & 3;
4040 rm = (modrm & 7) | REX_B(s);
4041
4042 if (mod == 3) {
4043 gen_op_mov_TN_reg[ot][0][rm]();
4044 switch(ot | (b & 8)) {
4045 case OT_BYTE:
4046 gen_op_movzbl_T0_T0();
4047 break;
4048 case OT_BYTE | 8:
4049 gen_op_movsbl_T0_T0();
4050 break;
4051 case OT_WORD:
4052 gen_op_movzwl_T0_T0();
4053 break;
4054 default:
4055 case OT_WORD | 8:
4056 gen_op_movswl_T0_T0();
4057 break;
4058 }
4059 gen_op_mov_reg_T0[d_ot][reg]();
4060 } else {
4061 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4062 if (b & 8) {
4063 gen_op_lds_T0_A0[ot + s->mem_index]();
4064 } else {
4065 gen_op_ldu_T0_A0[ot + s->mem_index]();
4066 }
4067 gen_op_mov_reg_T0[d_ot][reg]();
4068 }
4069 }
4070 break;
4071
4072 case 0x8d: /* lea */
4073 ot = dflag + OT_WORD;
4074 modrm = ldub_code(s->pc++);
4075 mod = (modrm >> 6) & 3;
4076 if (mod == 3)
4077 goto illegal_op;
4078 reg = ((modrm >> 3) & 7) | rex_r;
4079 /* we must ensure that no segment is added */
4080 s->override = -1;
4081 val = s->addseg;
4082 s->addseg = 0;
4083 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4084 s->addseg = val;
4085 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4086 break;
4087
4088 case 0xa0: /* mov EAX, Ov */
4089 case 0xa1:
4090 case 0xa2: /* mov Ov, EAX */
4091 case 0xa3:
4092 {
4093 target_ulong offset_addr;
4094
4095 if ((b & 1) == 0)
4096 ot = OT_BYTE;
4097 else
4098 ot = dflag + OT_WORD;
4099 #ifdef TARGET_X86_64
4100 if (s->aflag == 2) {
4101 offset_addr = ldq_code(s->pc);
4102 s->pc += 8;
4103 if (offset_addr == (int32_t)offset_addr)
4104 gen_op_movq_A0_im(offset_addr);
4105 else
4106 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4107 } else
4108 #endif
4109 {
4110 if (s->aflag) {
4111 offset_addr = insn_get(s, OT_LONG);
4112 } else {
4113 offset_addr = insn_get(s, OT_WORD);
4114 }
4115 gen_op_movl_A0_im(offset_addr);
4116 }
4117 gen_add_A0_ds_seg(s);
4118 if ((b & 2) == 0) {
4119 gen_op_ld_T0_A0[ot + s->mem_index]();
4120 gen_op_mov_reg_T0[ot][R_EAX]();
4121 } else {
4122 gen_op_mov_TN_reg[ot][0][R_EAX]();
4123 gen_op_st_T0_A0[ot + s->mem_index]();
4124 }
4125 }
4126 break;
4127 case 0xd7: /* xlat */
4128 #ifdef TARGET_X86_64
4129 if (s->aflag == 2) {
4130 gen_op_movq_A0_reg[R_EBX]();
4131 gen_op_addq_A0_AL();
4132 } else
4133 #endif
4134 {
4135 gen_op_movl_A0_reg[R_EBX]();
4136 gen_op_addl_A0_AL();
4137 if (s->aflag == 0)
4138 gen_op_andl_A0_ffff();
4139 }
4140 gen_add_A0_ds_seg(s);
4141 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4142 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4143 break;
4144 case 0xb0 ... 0xb7: /* mov R, Ib */
4145 val = insn_get(s, OT_BYTE);
4146 gen_op_movl_T0_im(val);
4147 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4148 break;
4149 case 0xb8 ... 0xbf: /* mov R, Iv */
4150 #ifdef TARGET_X86_64
4151 if (dflag == 2) {
4152 uint64_t tmp;
4153 /* 64 bit case */
4154 tmp = ldq_code(s->pc);
4155 s->pc += 8;
4156 reg = (b & 7) | REX_B(s);
4157 gen_movtl_T0_im(tmp);
4158 gen_op_mov_reg_T0[OT_QUAD][reg]();
4159 } else
4160 #endif
4161 {
4162 ot = dflag ? OT_LONG : OT_WORD;
4163 val = insn_get(s, ot);
4164 reg = (b & 7) | REX_B(s);
4165 gen_op_movl_T0_im(val);
4166 gen_op_mov_reg_T0[ot][reg]();
4167 }
4168 break;
4169
4170 case 0x91 ... 0x97: /* xchg R, EAX */
4171 ot = dflag + OT_WORD;
4172 reg = (b & 7) | REX_B(s);
4173 rm = R_EAX;
4174 goto do_xchg_reg;
4175 case 0x86:
4176 case 0x87: /* xchg Ev, Gv */
4177 if ((b & 1) == 0)
4178 ot = OT_BYTE;
4179 else
4180 ot = dflag + OT_WORD;
4181 modrm = ldub_code(s->pc++);
4182 reg = ((modrm >> 3) & 7) | rex_r;
4183 mod = (modrm >> 6) & 3;
4184 if (mod == 3) {
4185 rm = (modrm & 7) | REX_B(s);
4186 do_xchg_reg:
4187 gen_op_mov_TN_reg[ot][0][reg]();
4188 gen_op_mov_TN_reg[ot][1][rm]();
4189 gen_op_mov_reg_T0[ot][rm]();
4190 gen_op_mov_reg_T1[ot][reg]();
4191 } else {
4192 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4193 gen_op_mov_TN_reg[ot][0][reg]();
4194 /* for xchg, lock is implicit */
4195 if (!(prefixes & PREFIX_LOCK))
4196 gen_op_lock();
4197 gen_op_ld_T1_A0[ot + s->mem_index]();
4198 gen_op_st_T0_A0[ot + s->mem_index]();
4199 if (!(prefixes & PREFIX_LOCK))
4200 gen_op_unlock();
4201 gen_op_mov_reg_T1[ot][reg]();
4202 }
4203 break;
4204 case 0xc4: /* les Gv */
4205 if (CODE64(s))
4206 goto illegal_op;
4207 op = R_ES;
4208 goto do_lxx;
4209 case 0xc5: /* lds Gv */
4210 if (CODE64(s))
4211 goto illegal_op;
4212 op = R_DS;
4213 goto do_lxx;
4214 case 0x1b2: /* lss Gv */
4215 op = R_SS;
4216 goto do_lxx;
4217 case 0x1b4: /* lfs Gv */
4218 op = R_FS;
4219 goto do_lxx;
4220 case 0x1b5: /* lgs Gv */
4221 op = R_GS;
4222 do_lxx:
4223 ot = dflag ? OT_LONG : OT_WORD;
4224 modrm = ldub_code(s->pc++);
4225 reg = ((modrm >> 3) & 7) | rex_r;
4226 mod = (modrm >> 6) & 3;
4227 if (mod == 3)
4228 goto illegal_op;
4229 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4230 gen_op_ld_T1_A0[ot + s->mem_index]();
4231 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4232 /* load the segment first to handle exceptions properly */
4233 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4234 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4235 /* then put the data */
4236 gen_op_mov_reg_T1[ot][reg]();
4237 if (s->is_jmp) {
4238 gen_jmp_im(s->pc - s->cs_base);
4239 gen_eob(s);
4240 }
4241 break;
4242
4243 /************************/
4244 /* shifts */
4245 case 0xc0:
4246 case 0xc1:
4247 /* shift Ev,Ib */
4248 shift = 2;
4249 grp2:
4250 {
4251 if ((b & 1) == 0)
4252 ot = OT_BYTE;
4253 else
4254 ot = dflag + OT_WORD;
4255
4256 modrm = ldub_code(s->pc++);
4257 mod = (modrm >> 6) & 3;
4258 op = (modrm >> 3) & 7;
4259
4260 if (mod != 3) {
4261 if (shift == 2) {
4262 s->rip_offset = 1;
4263 }
4264 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4265 opreg = OR_TMP0;
4266 } else {
4267 opreg = (modrm & 7) | REX_B(s);
4268 }
4269
4270 /* simpler op */
4271 if (shift == 0) {
4272 gen_shift(s, op, ot, opreg, OR_ECX);
4273 } else {
4274 if (shift == 2) {
4275 shift = ldub_code(s->pc++);
4276 }
4277 gen_shifti(s, op, ot, opreg, shift);
4278 }
4279 }
4280 break;
4281 case 0xd0:
4282 case 0xd1:
4283 /* shift Ev,1 */
4284 shift = 1;
4285 goto grp2;
4286 case 0xd2:
4287 case 0xd3:
4288 /* shift Ev,cl */
4289 shift = 0;
4290 goto grp2;
4291
4292 case 0x1a4: /* shld imm */
4293 op = 0;
4294 shift = 1;
4295 goto do_shiftd;
4296 case 0x1a5: /* shld cl */
4297 op = 0;
4298 shift = 0;
4299 goto do_shiftd;
4300 case 0x1ac: /* shrd imm */
4301 op = 1;
4302 shift = 1;
4303 goto do_shiftd;
4304 case 0x1ad: /* shrd cl */
4305 op = 1;
4306 shift = 0;
4307 do_shiftd:
4308 ot = dflag + OT_WORD;
4309 modrm = ldub_code(s->pc++);
4310 mod = (modrm >> 6) & 3;
4311 rm = (modrm & 7) | REX_B(s);
4312 reg = ((modrm >> 3) & 7) | rex_r;
4313
4314 if (mod != 3) {
4315 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4316 gen_op_ld_T0_A0[ot + s->mem_index]();
4317 } else {
4318 gen_op_mov_TN_reg[ot][0][rm]();
4319 }
4320 gen_op_mov_TN_reg[ot][1][reg]();
4321
4322 if (shift) {
4323 val = ldub_code(s->pc++);
4324 if (ot == OT_QUAD)
4325 val &= 0x3f;
4326 else
4327 val &= 0x1f;
4328 if (val) {
4329 if (mod == 3)
4330 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4331 else
4332 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4333 if (op == 0 && ot != OT_WORD)
4334 s->cc_op = CC_OP_SHLB + ot;
4335 else
4336 s->cc_op = CC_OP_SARB + ot;
4337 }
4338 } else {
4339 if (s->cc_op != CC_OP_DYNAMIC)
4340 gen_op_set_cc_op(s->cc_op);
4341 if (mod == 3)
4342 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4343 else
4344 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4345 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4346 }
4347 if (mod == 3) {
4348 gen_op_mov_reg_T0[ot][rm]();
4349 }
4350 break;
4351
4352 /************************/
4353 /* floats */
4354 case 0xd8 ... 0xdf:
4355 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4356 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4357 /* XXX: what to do if illegal op ? */
4358 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4359 break;
4360 }
4361 modrm = ldub_code(s->pc++);
4362 mod = (modrm >> 6) & 3;
4363 rm = modrm & 7;
4364 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4365 if (mod != 3) {
4366 /* memory op */
4367 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4368 switch(op) {
4369 case 0x00 ... 0x07: /* fxxxs */
4370 case 0x10 ... 0x17: /* fixxxl */
4371 case 0x20 ... 0x27: /* fxxxl */
4372 case 0x30 ... 0x37: /* fixxx */
4373 {
4374 int op1;
4375 op1 = op & 7;
4376
4377 switch(op >> 4) {
4378 case 0:
4379 gen_op_flds_FT0_A0();
4380 break;
4381 case 1:
4382 gen_op_fildl_FT0_A0();
4383 break;
4384 case 2:
4385 gen_op_fldl_FT0_A0();
4386 break;
4387 case 3:
4388 default:
4389 gen_op_fild_FT0_A0();
4390 break;
4391 }
4392
4393 gen_op_fp_arith_ST0_FT0[op1]();
4394 if (op1 == 3) {
4395 /* fcomp needs pop */
4396 gen_op_fpop();
4397 }
4398 }
4399 break;
4400 case 0x08: /* flds */
4401 case 0x0a: /* fsts */
4402 case 0x0b: /* fstps */
4403 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4404 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4405 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4406 switch(op & 7) {
4407 case 0:
4408 switch(op >> 4) {
4409 case 0:
4410 gen_op_flds_ST0_A0();
4411 break;
4412 case 1:
4413 gen_op_fildl_ST0_A0();
4414 break;
4415 case 2:
4416 gen_op_fldl_ST0_A0();
4417 break;
4418 case 3:
4419 default:
4420 gen_op_fild_ST0_A0();
4421 break;
4422 }
4423 break;
4424 case 1:
4425 switch(op >> 4) {
4426 case 1:
4427 gen_op_fisttl_ST0_A0();
4428 break;
4429 case 2:
4430 gen_op_fisttll_ST0_A0();
4431 break;
4432 case 3:
4433 default:
4434 gen_op_fistt_ST0_A0();
4435 }
4436 gen_op_fpop();
4437 break;
4438 default:
4439 switch(op >> 4) {
4440 case 0:
4441 gen_op_fsts_ST0_A0();
4442 break;
4443 case 1:
4444 gen_op_fistl_ST0_A0();
4445 break;
4446 case 2:
4447 gen_op_fstl_ST0_A0();
4448 break;
4449 case 3:
4450 default:
4451 gen_op_fist_ST0_A0();
4452 break;
4453 }
4454 if ((op & 7) == 3)
4455 gen_op_fpop();
4456 break;
4457 }
4458 break;
4459 case 0x0c: /* fldenv mem */
4460 gen_op_fldenv_A0(s->dflag);
4461 break;
4462 case 0x0d: /* fldcw mem */
4463 gen_op_fldcw_A0();
4464 break;
4465 case 0x0e: /* fnstenv mem */
4466 gen_op_fnstenv_A0(s->dflag);
4467 break;
4468 case 0x0f: /* fnstcw mem */
4469 gen_op_fnstcw_A0();
4470 break;
4471 case 0x1d: /* fldt mem */
4472 gen_op_fldt_ST0_A0();
4473 break;
4474 case 0x1f: /* fstpt mem */
4475 gen_op_fstt_ST0_A0();
4476 gen_op_fpop();
4477 break;
4478 case 0x2c: /* frstor mem */
4479 gen_op_frstor_A0(s->dflag);
4480 break;
4481 case 0x2e: /* fnsave mem */
4482 gen_op_fnsave_A0(s->dflag);
4483 break;
4484 case 0x2f: /* fnstsw mem */
4485 gen_op_fnstsw_A0();
4486 break;
4487 case 0x3c: /* fbld */
4488 gen_op_fbld_ST0_A0();
4489 break;
4490 case 0x3e: /* fbstp */
4491 gen_op_fbst_ST0_A0();
4492 gen_op_fpop();
4493 break;
4494 case 0x3d: /* fildll */
4495 gen_op_fildll_ST0_A0();
4496 break;
4497 case 0x3f: /* fistpll */
4498 gen_op_fistll_ST0_A0();
4499 gen_op_fpop();
4500 break;
4501 default:
4502 goto illegal_op;
4503 }
4504 } else {
4505 /* register float ops */
4506 opreg = rm;
4507
4508 switch(op) {
4509 case 0x08: /* fld sti */
4510 gen_op_fpush();
4511 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4512 break;
4513 case 0x09: /* fxchg sti */
4514 case 0x29: /* fxchg4 sti, undocumented op */
4515 case 0x39: /* fxchg7 sti, undocumented op */
4516 gen_op_fxchg_ST0_STN(opreg);
4517 break;
4518 case 0x0a: /* grp d9/2 */
4519 switch(rm) {
4520 case 0: /* fnop */
4521 /* check exceptions (FreeBSD FPU probe) */
4522 if (s->cc_op != CC_OP_DYNAMIC)
4523 gen_op_set_cc_op(s->cc_op);
4524 gen_jmp_im(pc_start - s->cs_base);
4525 gen_op_fwait();
4526 break;
4527 default:
4528 goto illegal_op;
4529 }
4530 break;
4531 case 0x0c: /* grp d9/4 */
4532 switch(rm) {
4533 case 0: /* fchs */
4534 gen_op_fchs_ST0();
4535 break;
4536 case 1: /* fabs */
4537 gen_op_fabs_ST0();
4538 break;
4539 case 4: /* ftst */
4540 gen_op_fldz_FT0();
4541 gen_op_fcom_ST0_FT0();
4542 break;
4543 case 5: /* fxam */
4544 gen_op_fxam_ST0();
4545 break;
4546 default:
4547 goto illegal_op;
4548 }
4549 break;
4550 case 0x0d: /* grp d9/5 */
4551 {
4552 switch(rm) {
4553 case 0:
4554 gen_op_fpush();
4555 gen_op_fld1_ST0();
4556 break;
4557 case 1:
4558 gen_op_fpush();
4559 gen_op_fldl2t_ST0();
4560 break;
4561 case 2:
4562 gen_op_fpush();
4563 gen_op_fldl2e_ST0();
4564 break;
4565 case 3:
4566 gen_op_fpush();
4567 gen_op_fldpi_ST0();
4568 break;
4569 case 4:
4570 gen_op_fpush();
4571 gen_op_fldlg2_ST0();
4572 break;
4573 case 5:
4574 gen_op_fpush();
4575 gen_op_fldln2_ST0();
4576 break;
4577 case 6:
4578 gen_op_fpush();
4579 gen_op_fldz_ST0();
4580 break;
4581 default:
4582 goto illegal_op;
4583 }
4584 }
4585 break;
4586 case 0x0e: /* grp d9/6 */
4587 switch(rm) {
4588 case 0: /* f2xm1 */
4589 gen_op_f2xm1();
4590 break;
4591 case 1: /* fyl2x */
4592 gen_op_fyl2x();
4593 break;
4594 case 2: /* fptan */
4595 gen_op_fptan();
4596 break;
4597 case 3: /* fpatan */
4598 gen_op_fpatan();
4599 break;
4600 case 4: /* fxtract */
4601 gen_op_fxtract();
4602 break;
4603 case 5: /* fprem1 */
4604 gen_op_fprem1();
4605 break;
4606 case 6: /* fdecstp */
4607 gen_op_fdecstp();
4608 break;
4609 default:
4610 case 7: /* fincstp */
4611 gen_op_fincstp();
4612 break;
4613 }
4614 break;
4615 case 0x0f: /* grp d9/7 */
4616 switch(rm) {
4617 case 0: /* fprem */
4618 gen_op_fprem();
4619 break;
4620 case 1: /* fyl2xp1 */
4621 gen_op_fyl2xp1();
4622 break;
4623 case 2: /* fsqrt */
4624 gen_op_fsqrt();
4625 break;
4626 case 3: /* fsincos */
4627 gen_op_fsincos();
4628 break;
4629 case 5: /* fscale */
4630 gen_op_fscale();
4631 break;
4632 case 4: /* frndint */
4633 gen_op_frndint();
4634 break;
4635 case 6: /* fsin */
4636 gen_op_fsin();
4637 break;
4638 default:
4639 case 7: /* fcos */
4640 gen_op_fcos();
4641 break;
4642 }
4643 break;
4644 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4645 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4646 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4647 {
4648 int op1;
4649
4650 op1 = op & 7;
4651 if (op >= 0x20) {
4652 gen_op_fp_arith_STN_ST0[op1](opreg);
4653 if (op >= 0x30)
4654 gen_op_fpop();
4655 } else {
4656 gen_op_fmov_FT0_STN(opreg);
4657 gen_op_fp_arith_ST0_FT0[op1]();
4658 }
4659 }
4660 break;
4661 case 0x02: /* fcom */
4662 case 0x22: /* fcom2, undocumented op */
4663 gen_op_fmov_FT0_STN(opreg);
4664 gen_op_fcom_ST0_FT0();
4665 break;
4666 case 0x03: /* fcomp */
4667 case 0x23: /* fcomp3, undocumented op */
4668 case 0x32: /* fcomp5, undocumented op */
4669 gen_op_fmov_FT0_STN(opreg);
4670 gen_op_fcom_ST0_FT0();
4671 gen_op_fpop();
4672 break;
4673 case 0x15: /* da/5 */
4674 switch(rm) {
4675 case 1: /* fucompp */
4676 gen_op_fmov_FT0_STN(1);
4677 gen_op_fucom_ST0_FT0();
4678 gen_op_fpop();
4679 gen_op_fpop();
4680 break;
4681 default:
4682 goto illegal_op;
4683 }
4684 break;
4685 case 0x1c:
4686 switch(rm) {
4687 case 0: /* feni (287 only, just do nop here) */
4688 break;
4689 case 1: /* fdisi (287 only, just do nop here) */
4690 break;
4691 case 2: /* fclex */
4692 gen_op_fclex();
4693 break;
4694 case 3: /* fninit */
4695 gen_op_fninit();
4696 break;
4697 case 4: /* fsetpm (287 only, just do nop here) */
4698 break;
4699 default:
4700 goto illegal_op;
4701 }
4702 break;
4703 case 0x1d: /* fucomi */
4704 if (s->cc_op != CC_OP_DYNAMIC)
4705 gen_op_set_cc_op(s->cc_op);
4706 gen_op_fmov_FT0_STN(opreg);
4707 gen_op_fucomi_ST0_FT0();
4708 s->cc_op = CC_OP_EFLAGS;
4709 break;
4710 case 0x1e: /* fcomi */
4711 if (s->cc_op != CC_OP_DYNAMIC)
4712 gen_op_set_cc_op(s->cc_op);
4713 gen_op_fmov_FT0_STN(opreg);
4714 gen_op_fcomi_ST0_FT0();
4715 s->cc_op = CC_OP_EFLAGS;
4716 break;
4717 case 0x28: /* ffree sti */
4718 gen_op_ffree_STN(opreg);
4719 break;
4720 case 0x2a: /* fst sti */
4721 gen_op_fmov_STN_ST0(opreg);
4722 break;
4723 case 0x2b: /* fstp sti */
4724 case 0x0b: /* fstp1 sti, undocumented op */
4725 case 0x3a: /* fstp8 sti, undocumented op */
4726 case 0x3b: /* fstp9 sti, undocumented op */
4727 gen_op_fmov_STN_ST0(opreg);
4728 gen_op_fpop();
4729 break;
4730 case 0x2c: /* fucom st(i) */
4731 gen_op_fmov_FT0_STN(opreg);
4732 gen_op_fucom_ST0_FT0();
4733 break;
4734 case 0x2d: /* fucomp st(i) */
4735 gen_op_fmov_FT0_STN(opreg);
4736 gen_op_fucom_ST0_FT0();
4737 gen_op_fpop();
4738 break;
4739 case 0x33: /* de/3 */
4740 switch(rm) {
4741 case 1: /* fcompp */
4742 gen_op_fmov_FT0_STN(1);
4743 gen_op_fcom_ST0_FT0();
4744 gen_op_fpop();
4745 gen_op_fpop();
4746 break;
4747 default:
4748 goto illegal_op;
4749 }
4750 break;
4751 case 0x38: /* ffreep sti, undocumented op */
4752 gen_op_ffree_STN(opreg);
4753 gen_op_fpop();
4754 break;
4755 case 0x3c: /* df/4 */
4756 switch(rm) {
4757 case 0:
4758 gen_op_fnstsw_EAX();
4759 break;
4760 default:
4761 goto illegal_op;
4762 }
4763 break;
4764 case 0x3d: /* fucomip */
4765 if (s->cc_op != CC_OP_DYNAMIC)
4766 gen_op_set_cc_op(s->cc_op);
4767 gen_op_fmov_FT0_STN(opreg);
4768 gen_op_fucomi_ST0_FT0();
4769 gen_op_fpop();
4770 s->cc_op = CC_OP_EFLAGS;
4771 break;
4772 case 0x3e: /* fcomip */
4773 if (s->cc_op != CC_OP_DYNAMIC)
4774 gen_op_set_cc_op(s->cc_op);
4775 gen_op_fmov_FT0_STN(opreg);
4776 gen_op_fcomi_ST0_FT0();
4777 gen_op_fpop();
4778 s->cc_op = CC_OP_EFLAGS;
4779 break;
4780 case 0x10 ... 0x13: /* fcmovxx */
4781 case 0x18 ... 0x1b:
4782 {
4783 int op1;
4784 const static uint8_t fcmov_cc[8] = {
4785 (JCC_B << 1),
4786 (JCC_Z << 1),
4787 (JCC_BE << 1),
4788 (JCC_P << 1),
4789 };
4790 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4791 gen_setcc(s, op1);
4792 gen_op_fcmov_ST0_STN_T0(opreg);
4793 }
4794 break;
4795 default:
4796 goto illegal_op;
4797 }
4798 }
4799 #ifdef USE_CODE_COPY
4800 s->tb->cflags |= CF_TB_FP_USED;
4801 #endif
4802 break;
4803 /************************/
4804 /* string ops */
4805
4806 case 0xa4: /* movsS */
4807 case 0xa5:
4808 if ((b & 1) == 0)
4809 ot = OT_BYTE;
4810 else
4811 ot = dflag + OT_WORD;
4812
4813 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4814 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4815 } else {
4816 gen_movs(s, ot);
4817 }
4818 break;
4819
4820 case 0xaa: /* stosS */
4821 case 0xab:
4822 if ((b & 1) == 0)
4823 ot = OT_BYTE;
4824 else
4825 ot = dflag + OT_WORD;
4826
4827 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4828 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4829 } else {
4830 gen_stos(s, ot);
4831 }
4832 break;
4833 case 0xac: /* lodsS */
4834 case 0xad:
4835 if ((b & 1) == 0)
4836 ot = OT_BYTE;
4837 else
4838 ot = dflag + OT_WORD;
4839 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4840 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4841 } else {
4842 gen_lods(s, ot);
4843 }
4844 break;
4845 case 0xae: /* scasS */
4846 case 0xaf:
4847 if ((b & 1) == 0)
4848 ot = OT_BYTE;
4849 else
4850 ot = dflag + OT_WORD;
4851 if (prefixes & PREFIX_REPNZ) {
4852 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4853 } else if (prefixes & PREFIX_REPZ) {
4854 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4855 } else {
4856 gen_scas(s, ot);
4857 s->cc_op = CC_OP_SUBB + ot;
4858 }
4859 break;
4860
4861 case 0xa6: /* cmpsS */
4862 case 0xa7:
4863 if ((b & 1) == 0)
4864 ot = OT_BYTE;
4865 else
4866 ot = dflag + OT_WORD;
4867 if (prefixes & PREFIX_REPNZ) {
4868 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4869 } else if (prefixes & PREFIX_REPZ) {
4870 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4871 } else {
4872 gen_cmps(s, ot);
4873 s->cc_op = CC_OP_SUBB + ot;
4874 }
4875 break;
4876 case 0x6c: /* insS */
4877 case 0x6d:
4878 if ((b & 1) == 0)
4879 ot = OT_BYTE;
4880 else
4881 ot = dflag ? OT_LONG : OT_WORD;
4882 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4883 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4884 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4885 } else {
4886 gen_ins(s, ot);
4887 }
4888 break;
4889 case 0x6e: /* outsS */
4890 case 0x6f:
4891 if ((b & 1) == 0)
4892 ot = OT_BYTE;
4893 else
4894 ot = dflag ? OT_LONG : OT_WORD;
4895 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4896 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4897 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4898 } else {
4899 gen_outs(s, ot);
4900 }
4901 break;
4902
4903 /************************/
4904 /* port I/O */
4905 case 0xe4:
4906 case 0xe5:
4907 if ((b & 1) == 0)
4908 ot = OT_BYTE;
4909 else
4910 ot = dflag ? OT_LONG : OT_WORD;
4911 val = ldub_code(s->pc++);
4912 gen_op_movl_T0_im(val);
4913 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4914 gen_op_in[ot]();
4915 gen_op_mov_reg_T1[ot][R_EAX]();
4916 break;
4917 case 0xe6:
4918 case 0xe7:
4919 if ((b & 1) == 0)
4920 ot = OT_BYTE;
4921 else
4922 ot = dflag ? OT_LONG : OT_WORD;
4923 val = ldub_code(s->pc++);
4924 gen_op_movl_T0_im(val);
4925 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4926 gen_op_mov_TN_reg[ot][1][R_EAX]();
4927 gen_op_out[ot]();
4928 break;
4929 case 0xec:
4930 case 0xed:
4931 if ((b & 1) == 0)
4932 ot = OT_BYTE;
4933 else
4934 ot = dflag ? OT_LONG : OT_WORD;
4935 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4936 gen_op_andl_T0_ffff();
4937 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4938 gen_op_in[ot]();
4939 gen_op_mov_reg_T1[ot][R_EAX]();
4940 break;
4941 case 0xee:
4942 case 0xef:
4943 if ((b & 1) == 0)
4944 ot = OT_BYTE;
4945 else
4946 ot = dflag ? OT_LONG : OT_WORD;
4947 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4948 gen_op_andl_T0_ffff();
4949 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4950 gen_op_mov_TN_reg[ot][1][R_EAX]();
4951 gen_op_out[ot]();
4952 break;
4953
4954 /************************/
4955 /* control */
4956 case 0xc2: /* ret im */
4957 val = ldsw_code(s->pc);
4958 s->pc += 2;
4959 gen_pop_T0(s);
4960 if (CODE64(s) && s->dflag)
4961 s->dflag = 2;
4962 gen_stack_update(s, val + (2 << s->dflag));
4963 if (s->dflag == 0)
4964 gen_op_andl_T0_ffff();
4965 gen_op_jmp_T0();
4966 gen_eob(s);
4967 break;
4968 case 0xc3: /* ret */
4969 gen_pop_T0(s);
4970 gen_pop_update(s);
4971 if (s->dflag == 0)
4972 gen_op_andl_T0_ffff();
4973 gen_op_jmp_T0();
4974 gen_eob(s);
4975 break;
4976 case 0xca: /* lret im */
4977 val = ldsw_code(s->pc);
4978 s->pc += 2;
4979 do_lret:
4980 if (s->pe && !s->vm86) {
4981 if (s->cc_op != CC_OP_DYNAMIC)
4982 gen_op_set_cc_op(s->cc_op);
4983 gen_jmp_im(pc_start - s->cs_base);
4984 gen_op_lret_protected(s->dflag, val);
4985 } else {
4986 gen_stack_A0(s);
4987 /* pop offset */
4988 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4989 if (s->dflag == 0)
4990 gen_op_andl_T0_ffff();
4991 /* NOTE: keeping EIP updated is not a problem in case of
4992 exception */
4993 gen_op_jmp_T0();
4994 /* pop selector */
4995 gen_op_addl_A0_im(2 << s->dflag);
4996 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4997 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4998 /* add stack offset */
4999 gen_stack_update(s, val + (4 << s->dflag));
5000 }
5001 gen_eob(s);
5002 break;
5003 case 0xcb: /* lret */
5004 val = 0;
5005 goto do_lret;
5006 case 0xcf: /* iret */
5007 if (!s->pe) {
5008 /* real mode */
5009 gen_op_iret_real(s->dflag);
5010 s->cc_op = CC_OP_EFLAGS;
5011 } else if (s->vm86) {
5012 if (s->iopl != 3) {
5013 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5014 } else {
5015 gen_op_iret_real(s->dflag);
5016 s->cc_op = CC_OP_EFLAGS;
5017 }
5018 } else {
5019 if (s->cc_op != CC_OP_DYNAMIC)
5020 gen_op_set_cc_op(s->cc_op);
5021 gen_jmp_im(pc_start - s->cs_base);
5022 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5023 s->cc_op = CC_OP_EFLAGS;
5024 }
5025 gen_eob(s);
5026 break;
5027 case 0xe8: /* call im */
5028 {
5029 if (dflag)
5030 tval = (int32_t)insn_get(s, OT_LONG);
5031 else
5032 tval = (int16_t)insn_get(s, OT_WORD);
5033 next_eip = s->pc - s->cs_base;
5034 tval += next_eip;
5035 if (s->dflag == 0)
5036 tval &= 0xffff;
5037 gen_movtl_T0_im(next_eip);
5038 gen_push_T0(s);
5039 gen_jmp(s, tval);
5040 }
5041 break;
5042 case 0x9a: /* lcall im */
5043 {
5044 unsigned int selector, offset;
5045
5046 if (CODE64(s))
5047 goto illegal_op;
5048 ot = dflag ? OT_LONG : OT_WORD;
5049 offset = insn_get(s, ot);
5050 selector = insn_get(s, OT_WORD);
5051
5052 gen_op_movl_T0_im(selector);
5053 gen_op_movl_T1_imu(offset);
5054 }
5055 goto do_lcall;
5056 case 0xe9: /* jmp im */
5057 if (dflag)
5058 tval = (int32_t)insn_get(s, OT_LONG);
5059 else
5060 tval = (int16_t)insn_get(s, OT_WORD);
5061 tval += s->pc - s->cs_base;
5062 if (s->dflag == 0)
5063 tval &= 0xffff;
5064 gen_jmp(s, tval);
5065 break;
5066 case 0xea: /* ljmp im */
5067 {
5068 unsigned int selector, offset;
5069
5070 if (CODE64(s))
5071 goto illegal_op;
5072 ot = dflag ? OT_LONG : OT_WORD;
5073 offset = insn_get(s, ot);
5074 selector = insn_get(s, OT_WORD);
5075
5076 gen_op_movl_T0_im(selector);
5077 gen_op_movl_T1_imu(offset);
5078 }
5079 goto do_ljmp;
5080 case 0xeb: /* jmp Jb */
5081 tval = (int8_t)insn_get(s, OT_BYTE);
5082 tval += s->pc - s->cs_base;
5083 if (s->dflag == 0)
5084 tval &= 0xffff;
5085 gen_jmp(s, tval);
5086 break;
5087 case 0x70 ... 0x7f: /* jcc Jb */
5088 tval = (int8_t)insn_get(s, OT_BYTE);
5089 goto do_jcc;
5090 case 0x180 ... 0x18f: /* jcc Jv */
5091 if (dflag) {
5092 tval = (int32_t)insn_get(s, OT_LONG);
5093 } else {
5094 tval = (int16_t)insn_get(s, OT_WORD);
5095 }
5096 do_jcc:
5097 next_eip = s->pc - s->cs_base;
5098 tval += next_eip;
5099 if (s->dflag == 0)
5100 tval &= 0xffff;
5101 gen_jcc(s, b, tval, next_eip);
5102 break;
5103
5104 case 0x190 ... 0x19f: /* setcc Gv */
5105 modrm = ldub_code(s->pc++);
5106 gen_setcc(s, b);
5107 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5108 break;
5109 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5110 ot = dflag + OT_WORD;
5111 modrm = ldub_code(s->pc++);
5112 reg = ((modrm >> 3) & 7) | rex_r;
5113 mod = (modrm >> 6) & 3;
5114 gen_setcc(s, b);
5115 if (mod != 3) {
5116 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5117 gen_op_ld_T1_A0[ot + s->mem_index]();
5118 } else {
5119 rm = (modrm & 7) | REX_B(s);
5120 gen_op_mov_TN_reg[ot][1][rm]();
5121 }
5122 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5123 break;
5124
5125 /************************/
5126 /* flags */
5127 case 0x9c: /* pushf */
5128 if (s->vm86 && s->iopl != 3) {
5129 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5130 } else {
5131 if (s->cc_op != CC_OP_DYNAMIC)
5132 gen_op_set_cc_op(s->cc_op);
5133 gen_op_movl_T0_eflags();
5134 gen_push_T0(s);
5135 }
5136 break;
5137 case 0x9d: /* popf */
5138 if (s->vm86 && s->iopl != 3) {
5139 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5140 } else {
5141 gen_pop_T0(s);
5142 if (s->cpl == 0) {
5143 if (s->dflag) {
5144 gen_op_movl_eflags_T0_cpl0();
5145 } else {
5146 gen_op_movw_eflags_T0_cpl0();
5147 }
5148 } else {
5149 if (s->cpl <= s->iopl) {
5150 if (s->dflag) {
5151 gen_op_movl_eflags_T0_io();
5152 } else {
5153 gen_op_movw_eflags_T0_io();
5154 }
5155 } else {
5156 if (s->dflag) {
5157 gen_op_movl_eflags_T0();
5158 } else {
5159 gen_op_movw_eflags_T0();
5160 }
5161 }
5162 }
5163 gen_pop_update(s);
5164 s->cc_op = CC_OP_EFLAGS;
5165 /* abort translation because TF flag may change */
5166 gen_jmp_im(s->pc - s->cs_base);
5167 gen_eob(s);
5168 }
5169 break;
5170 case 0x9e: /* sahf */
5171 if (CODE64(s))
5172 goto illegal_op;
5173 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5174 if (s->cc_op != CC_OP_DYNAMIC)
5175 gen_op_set_cc_op(s->cc_op);
5176 gen_op_movb_eflags_T0();
5177 s->cc_op = CC_OP_EFLAGS;
5178 break;
5179 case 0x9f: /* lahf */
5180 if (CODE64(s))
5181 goto illegal_op;
5182 if (s->cc_op != CC_OP_DYNAMIC)
5183 gen_op_set_cc_op(s->cc_op);
5184 gen_op_movl_T0_eflags();
5185 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5186 break;
5187 case 0xf5: /* cmc */
5188 if (s->cc_op != CC_OP_DYNAMIC)
5189 gen_op_set_cc_op(s->cc_op);
5190 gen_op_cmc();
5191 s->cc_op = CC_OP_EFLAGS;
5192 break;
5193 case 0xf8: /* clc */
5194 if (s->cc_op != CC_OP_DYNAMIC)
5195 gen_op_set_cc_op(s->cc_op);
5196 gen_op_clc();
5197 s->cc_op = CC_OP_EFLAGS;
5198 break;
5199 case 0xf9: /* stc */
5200 if (s->cc_op != CC_OP_DYNAMIC)
5201 gen_op_set_cc_op(s->cc_op);
5202 gen_op_stc();
5203 s->cc_op = CC_OP_EFLAGS;
5204 break;
5205 case 0xfc: /* cld */
5206 gen_op_cld();
5207 break;
5208 case 0xfd: /* std */
5209 gen_op_std();
5210 break;
5211
5212 /************************/
5213 /* bit operations */
5214 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5215 ot = dflag + OT_WORD;
5216 modrm = ldub_code(s->pc++);
5217 op = (modrm >> 3) & 7;
5218 mod = (modrm >> 6) & 3;
5219 rm = (modrm & 7) | REX_B(s);
5220 if (mod != 3) {
5221 s->rip_offset = 1;
5222 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5223 gen_op_ld_T0_A0[ot + s->mem_index]();
5224 } else {
5225 gen_op_mov_TN_reg[ot][0][rm]();
5226 }
5227 /* load shift */
5228 val = ldub_code(s->pc++);
5229 gen_op_movl_T1_im(val);
5230 if (op < 4)
5231 goto illegal_op;
5232 op -= 4;
5233 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5234 s->cc_op = CC_OP_SARB + ot;
5235 if (op != 0) {
5236 if (mod != 3)
5237 gen_op_st_T0_A0[ot + s->mem_index]();
5238 else
5239 gen_op_mov_reg_T0[ot][rm]();
5240 gen_op_update_bt_cc();
5241 }
5242 break;
5243 case 0x1a3: /* bt Gv, Ev */
5244 op = 0;
5245 goto do_btx;
5246 case 0x1ab: /* bts */
5247 op = 1;
5248 goto do_btx;
5249 case 0x1b3: /* btr */
5250 op = 2;
5251 goto do_btx;
5252 case 0x1bb: /* btc */
5253 op = 3;
5254 do_btx:
5255 ot = dflag + OT_WORD;
5256 modrm = ldub_code(s->pc++);
5257 reg = ((modrm >> 3) & 7) | rex_r;
5258 mod = (modrm >> 6) & 3;
5259 rm = (modrm & 7) | REX_B(s);
5260 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5261 if (mod != 3) {
5262 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5263 /* specific case: we need to add a displacement */
5264 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5265 gen_op_ld_T0_A0[ot + s->mem_index]();
5266 } else {
5267 gen_op_mov_TN_reg[ot][0][rm]();
5268 }
5269 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5270 s->cc_op = CC_OP_SARB + ot;
5271 if (op != 0) {
5272 if (mod != 3)
5273 gen_op_st_T0_A0[ot + s->mem_index]();
5274 else
5275 gen_op_mov_reg_T0[ot][rm]();
5276 gen_op_update_bt_cc();
5277 }
5278 break;
5279 case 0x1bc: /* bsf */
5280 case 0x1bd: /* bsr */
5281 ot = dflag + OT_WORD;
5282 modrm = ldub_code(s->pc++);
5283 reg = ((modrm >> 3) & 7) | rex_r;
5284 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5285 /* NOTE: in order to handle the 0 case, we must load the
5286 result. It could be optimized with a generated jump */
5287 gen_op_mov_TN_reg[ot][1][reg]();
5288 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5289 gen_op_mov_reg_T1[ot][reg]();
5290 s->cc_op = CC_OP_LOGICB + ot;
5291 break;
5292 /************************/
5293 /* bcd */
5294 case 0x27: /* daa */
5295 if (CODE64(s))
5296 goto illegal_op;
5297 if (s->cc_op != CC_OP_DYNAMIC)
5298 gen_op_set_cc_op(s->cc_op);
5299 gen_op_daa();
5300 s->cc_op = CC_OP_EFLAGS;
5301 break;
5302 case 0x2f: /* das */
5303 if (CODE64(s))
5304 goto illegal_op;
5305 if (s->cc_op != CC_OP_DYNAMIC)
5306 gen_op_set_cc_op(s->cc_op);
5307 gen_op_das();
5308 s->cc_op = CC_OP_EFLAGS;
5309 break;
5310 case 0x37: /* aaa */
5311 if (CODE64(s))
5312 goto illegal_op;
5313 if (s->cc_op != CC_OP_DYNAMIC)
5314 gen_op_set_cc_op(s->cc_op);
5315 gen_op_aaa();
5316 s->cc_op = CC_OP_EFLAGS;
5317 break;
5318 case 0x3f: /* aas */
5319 if (CODE64(s))
5320 goto illegal_op;
5321 if (s->cc_op != CC_OP_DYNAMIC)
5322 gen_op_set_cc_op(s->cc_op);
5323 gen_op_aas();
5324 s->cc_op = CC_OP_EFLAGS;
5325 break;
5326 case 0xd4: /* aam */
5327 if (CODE64(s))
5328 goto illegal_op;
5329 val = ldub_code(s->pc++);
5330 if (val == 0) {
5331 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5332 } else {
5333 gen_op_aam(val);
5334 s->cc_op = CC_OP_LOGICB;
5335 }
5336 break;
5337 case 0xd5: /* aad */
5338 if (CODE64(s))
5339 goto illegal_op;
5340 val = ldub_code(s->pc++);
5341 gen_op_aad(val);
5342 s->cc_op = CC_OP_LOGICB;
5343 break;
5344 /************************/
5345 /* misc */
5346 case 0x90: /* nop */
5347 /* XXX: xchg + rex handling */
5348 /* XXX: correct lock test for all insn */
5349 if (prefixes & PREFIX_LOCK)
5350 goto illegal_op;
5351 break;
5352 case 0x9b: /* fwait */
5353 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5354 (HF_MP_MASK | HF_TS_MASK)) {
5355 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5356 } else {
5357 if (s->cc_op != CC_OP_DYNAMIC)
5358 gen_op_set_cc_op(s->cc_op);
5359 gen_jmp_im(pc_start - s->cs_base);
5360 gen_op_fwait();
5361 }
5362 break;
5363 case 0xcc: /* int3 */
5364 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5365 break;
5366 case 0xcd: /* int N */
5367 val = ldub_code(s->pc++);
5368 if (s->vm86 && s->iopl != 3) {
5369 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5370 } else {
5371 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5372 }
5373 break;
5374 case 0xce: /* into */
5375 if (CODE64(s))
5376 goto illegal_op;
5377 if (s->cc_op != CC_OP_DYNAMIC)
5378 gen_op_set_cc_op(s->cc_op);
5379 gen_jmp_im(pc_start - s->cs_base);
5380 gen_op_into(s->pc - pc_start);
5381 break;
5382 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5383 #if 1
5384 gen_debug(s, pc_start - s->cs_base);
5385 #else
5386 /* start debug */
5387 tb_flush(cpu_single_env);
5388 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5389 #endif
5390 break;
5391 case 0xfa: /* cli */
5392 if (!s->vm86) {
5393 if (s->cpl <= s->iopl) {
5394 gen_op_cli();
5395 } else {
5396 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5397 }
5398 } else {
5399 if (s->iopl == 3) {
5400 gen_op_cli();
5401 } else {
5402 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5403 }
5404 }
5405 break;
5406 case 0xfb: /* sti */
5407 if (!s->vm86) {
5408 if (s->cpl <= s->iopl) {
5409 gen_sti:
5410 gen_op_sti();
5411 /* interruptions are enabled only the first insn after sti */
5412 /* If several instructions disable interrupts, only the
5413 _first_ does it */
5414 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5415 gen_op_set_inhibit_irq();
5416 /* give a chance to handle pending irqs */
5417 gen_jmp_im(s->pc - s->cs_base);
5418 gen_eob(s);
5419 } else {
5420 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5421 }
5422 } else {
5423 if (s->iopl == 3) {
5424 goto gen_sti;
5425 } else {
5426 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5427 }
5428 }
5429 break;
5430 case 0x62: /* bound */
5431 if (CODE64(s))
5432 goto illegal_op;
5433 ot = dflag ? OT_LONG : OT_WORD;
5434 modrm = ldub_code(s->pc++);
5435 reg = (modrm >> 3) & 7;
5436 mod = (modrm >> 6) & 3;
5437 if (mod == 3)
5438 goto illegal_op;
5439 gen_op_mov_TN_reg[ot][0][reg]();
5440 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5441 gen_jmp_im(pc_start - s->cs_base);
5442 if (ot == OT_WORD)
5443 gen_op_boundw();
5444 else
5445 gen_op_boundl();
5446 break;
5447 case 0x1c8 ... 0x1cf: /* bswap reg */
5448 reg = (b & 7) | REX_B(s);
5449 #ifdef TARGET_X86_64
5450 if (dflag == 2) {
5451 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5452 gen_op_bswapq_T0();
5453 gen_op_mov_reg_T0[OT_QUAD][reg]();
5454 } else
5455 #endif
5456 {
5457 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5458 gen_op_bswapl_T0();
5459 gen_op_mov_reg_T0[OT_LONG][reg]();
5460 }
5461 break;
5462 case 0xd6: /* salc */
5463 if (CODE64(s))
5464 goto illegal_op;
5465 if (s->cc_op != CC_OP_DYNAMIC)
5466 gen_op_set_cc_op(s->cc_op);
5467 gen_op_salc();
5468 break;
5469 case 0xe0: /* loopnz */
5470 case 0xe1: /* loopz */
5471 if (s->cc_op != CC_OP_DYNAMIC)
5472 gen_op_set_cc_op(s->cc_op);
5473 /* FALL THRU */
5474 case 0xe2: /* loop */
5475 case 0xe3: /* jecxz */
5476 {
5477 int l1, l2;
5478
5479 tval = (int8_t)insn_get(s, OT_BYTE);
5480 next_eip = s->pc - s->cs_base;
5481 tval += next_eip;
5482 if (s->dflag == 0)
5483 tval &= 0xffff;
5484
5485 l1 = gen_new_label();
5486 l2 = gen_new_label();
5487 b &= 3;
5488 if (b == 3) {
5489 gen_op_jz_ecx[s->aflag](l1);
5490 } else {
5491 gen_op_dec_ECX[s->aflag]();
5492 if (b <= 1)
5493 gen_op_mov_T0_cc();
5494 gen_op_loop[s->aflag][b](l1);
5495 }
5496
5497 gen_jmp_im(next_eip);
5498 gen_op_jmp_label(l2);
5499 gen_set_label(l1);
5500 gen_jmp_im(tval);
5501 gen_set_label(l2);
5502 gen_eob(s);
5503 }
5504 break;
5505 case 0x130: /* wrmsr */
5506 case 0x132: /* rdmsr */
5507 if (s->cpl != 0) {
5508 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5509 } else {
5510 if (b & 2)
5511 gen_op_rdmsr();
5512 else
5513 gen_op_wrmsr();
5514 }
5515 break;
5516 case 0x131: /* rdtsc */
5517 gen_jmp_im(pc_start - s->cs_base);
5518 gen_op_rdtsc();
5519 break;
5520 case 0x134: /* sysenter */
5521 if (CODE64(s))
5522 goto illegal_op;
5523 if (!s->pe) {
5524 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5525 } else {
5526 if (s->cc_op != CC_OP_DYNAMIC) {
5527 gen_op_set_cc_op(s->cc_op);
5528 s->cc_op = CC_OP_DYNAMIC;
5529 }
5530 gen_jmp_im(pc_start - s->cs_base);
5531 gen_op_sysenter();
5532 gen_eob(s);
5533 }
5534 break;
5535 case 0x135: /* sysexit */
5536 if (CODE64(s))
5537 goto illegal_op;
5538 if (!s->pe) {
5539 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5540 } else {
5541 if (s->cc_op != CC_OP_DYNAMIC) {
5542 gen_op_set_cc_op(s->cc_op);
5543 s->cc_op = CC_OP_DYNAMIC;
5544 }
5545 gen_jmp_im(pc_start - s->cs_base);
5546 gen_op_sysexit();
5547 gen_eob(s);
5548 }
5549 break;
5550 #ifdef TARGET_X86_64
5551 case 0x105: /* syscall */
5552 /* XXX: is it usable in real mode ? */
5553 if (s->cc_op != CC_OP_DYNAMIC) {
5554 gen_op_set_cc_op(s->cc_op);
5555 s->cc_op = CC_OP_DYNAMIC;
5556 }
5557 gen_jmp_im(pc_start - s->cs_base);
5558 gen_op_syscall(s->pc - pc_start);
5559 gen_eob(s);
5560 break;
5561 case 0x107: /* sysret */
5562 if (!s->pe) {
5563 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5564 } else {
5565 if (s->cc_op != CC_OP_DYNAMIC) {
5566 gen_op_set_cc_op(s->cc_op);
5567 s->cc_op = CC_OP_DYNAMIC;
5568 }
5569 gen_jmp_im(pc_start - s->cs_base);
5570 gen_op_sysret(s->dflag);
5571 /* condition codes are modified only in long mode */
5572 if (s->lma)
5573 s->cc_op = CC_OP_EFLAGS;
5574 gen_eob(s);
5575 }
5576 break;
5577 #endif
5578 case 0x1a2: /* cpuid */
5579 gen_op_cpuid();
5580 break;
5581 case 0xf4: /* hlt */
5582 if (s->cpl != 0) {
5583 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5584 } else {
5585 if (s->cc_op != CC_OP_DYNAMIC)
5586 gen_op_set_cc_op(s->cc_op);
5587 gen_jmp_im(s->pc - s->cs_base);
5588 gen_op_hlt();
5589 s->is_jmp = 3;
5590 }
5591 break;
5592 case 0x100:
5593 modrm = ldub_code(s->pc++);
5594 mod = (modrm >> 6) & 3;
5595 op = (modrm >> 3) & 7;
5596 switch(op) {
5597 case 0: /* sldt */
5598 if (!s->pe || s->vm86)
5599 goto illegal_op;
5600 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5601 ot = OT_WORD;
5602 if (mod == 3)
5603 ot += s->dflag;
5604 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5605 break;
5606 case 2: /* lldt */
5607 if (!s->pe || s->vm86)
5608 goto illegal_op;
5609 if (s->cpl != 0) {
5610 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5611 } else {
5612 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5613 gen_jmp_im(pc_start - s->cs_base);
5614 gen_op_lldt_T0();
5615 }
5616 break;
5617 case 1: /* str */
5618 if (!s->pe || s->vm86)
5619 goto illegal_op;
5620 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5621 ot = OT_WORD;
5622 if (mod == 3)
5623 ot += s->dflag;
5624 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5625 break;
5626 case 3: /* ltr */
5627 if (!s->pe || s->vm86)
5628 goto illegal_op;
5629 if (s->cpl != 0) {
5630 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5631 } else {
5632 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5633 gen_jmp_im(pc_start - s->cs_base);
5634 gen_op_ltr_T0();
5635 }
5636 break;
5637 case 4: /* verr */
5638 case 5: /* verw */
5639 if (!s->pe || s->vm86)
5640 goto illegal_op;
5641 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5642 if (s->cc_op != CC_OP_DYNAMIC)
5643 gen_op_set_cc_op(s->cc_op);
5644 if (op == 4)
5645 gen_op_verr();
5646 else
5647 gen_op_verw();
5648 s->cc_op = CC_OP_EFLAGS;
5649 break;
5650 default:
5651 goto illegal_op;
5652 }
5653 break;
5654 case 0x101:
5655 modrm = ldub_code(s->pc++);
5656 mod = (modrm >> 6) & 3;
5657 op = (modrm >> 3) & 7;
5658 rm = modrm & 7;
5659 switch(op) {
5660 case 0: /* sgdt */
5661 if (mod == 3)
5662 goto illegal_op;
5663 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5664 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5665 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5666 gen_add_A0_im(s, 2);
5667 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5668 if (!s->dflag)
5669 gen_op_andl_T0_im(0xffffff);
5670 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5671 break;
5672 case 1:
5673 if (mod == 3) {
5674 switch (rm) {
5675 case 0: /* monitor */
5676 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5677 s->cpl != 0)
5678 goto illegal_op;
5679 gen_jmp_im(pc_start - s->cs_base);
5680 #ifdef TARGET_X86_64
5681 if (s->aflag == 2) {
5682 gen_op_movq_A0_reg[R_EBX]();
5683 gen_op_addq_A0_AL();
5684 } else
5685 #endif
5686 {
5687 gen_op_movl_A0_reg[R_EBX]();
5688 gen_op_addl_A0_AL();
5689 if (s->aflag == 0)
5690 gen_op_andl_A0_ffff();
5691 }
5692 gen_add_A0_ds_seg(s);
5693 gen_op_monitor();
5694 break;
5695 case 1: /* mwait */
5696 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5697 s->cpl != 0)
5698 goto illegal_op;
5699 if (s->cc_op != CC_OP_DYNAMIC) {
5700 gen_op_set_cc_op(s->cc_op);
5701 s->cc_op = CC_OP_DYNAMIC;
5702 }
5703 gen_jmp_im(s->pc - s->cs_base);
5704 gen_op_mwait();
5705 gen_eob(s);
5706 break;
5707 default:
5708 goto illegal_op;
5709 }
5710 } else { /* sidt */
5711 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5712 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5713 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5714 gen_add_A0_im(s, 2);
5715 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5716 if (!s->dflag)
5717 gen_op_andl_T0_im(0xffffff);
5718 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5719 }
5720 break;
5721 case 2: /* lgdt */
5722 case 3: /* lidt */
5723 if (mod == 3)
5724 goto illegal_op;
5725 if (s->cpl != 0) {
5726 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5727 } else {
5728 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5729 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5730 gen_add_A0_im(s, 2);
5731 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5732 if (!s->dflag)
5733 gen_op_andl_T0_im(0xffffff);
5734 if (op == 2) {
5735 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5736 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5737 } else {
5738 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5739 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5740 }
5741 }
5742 break;
5743 case 4: /* smsw */
5744 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5745 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5746 break;
5747 case 6: /* lmsw */
5748 if (s->cpl != 0) {
5749 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5750 } else {
5751 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5752 gen_op_lmsw_T0();
5753 gen_jmp_im(s->pc - s->cs_base);
5754 gen_eob(s);
5755 }
5756 break;
5757 case 7: /* invlpg */
5758 if (s->cpl != 0) {
5759 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5760 } else {
5761 if (mod == 3) {
5762 #ifdef TARGET_X86_64
5763 if (CODE64(s) && rm == 0) {
5764 /* swapgs */
5765 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5766 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5767 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5768 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5769 } else
5770 #endif
5771 {
5772 goto illegal_op;
5773 }
5774 } else {
5775 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5776 gen_op_invlpg_A0();
5777 gen_jmp_im(s->pc - s->cs_base);
5778 gen_eob(s);
5779 }
5780 }
5781 break;
5782 default:
5783 goto illegal_op;
5784 }
5785 break;
5786 case 0x108: /* invd */
5787 case 0x109: /* wbinvd */
5788 if (s->cpl != 0) {
5789 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5790 } else {
5791 /* nothing to do */
5792 }
5793 break;
5794 case 0x63: /* arpl or movslS (x86_64) */
5795 #ifdef TARGET_X86_64
5796 if (CODE64(s)) {
5797 int d_ot;
5798 /* d_ot is the size of destination */
5799 d_ot = dflag + OT_WORD;
5800
5801 modrm = ldub_code(s->pc++);
5802 reg = ((modrm >> 3) & 7) | rex_r;
5803 mod = (modrm >> 6) & 3;
5804 rm = (modrm & 7) | REX_B(s);
5805
5806 if (mod == 3) {
5807 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5808 /* sign extend */
5809 if (d_ot == OT_QUAD)
5810 gen_op_movslq_T0_T0();
5811 gen_op_mov_reg_T0[d_ot][reg]();
5812 } else {
5813 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5814 if (d_ot == OT_QUAD) {
5815 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5816 } else {
5817 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5818 }
5819 gen_op_mov_reg_T0[d_ot][reg]();
5820 }
5821 } else
5822 #endif
5823 {
5824 if (!s->pe || s->vm86)
5825 goto illegal_op;
5826 ot = dflag ? OT_LONG : OT_WORD;
5827 modrm = ldub_code(s->pc++);
5828 reg = (modrm >> 3) & 7;
5829 mod = (modrm >> 6) & 3;
5830 rm = modrm & 7;
5831 if (mod != 3) {
5832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5833 gen_op_ld_T0_A0[ot + s->mem_index]();
5834 } else {
5835 gen_op_mov_TN_reg[ot][0][rm]();
5836 }
5837 if (s->cc_op != CC_OP_DYNAMIC)
5838 gen_op_set_cc_op(s->cc_op);
5839 gen_op_arpl();
5840 s->cc_op = CC_OP_EFLAGS;
5841 if (mod != 3) {
5842 gen_op_st_T0_A0[ot + s->mem_index]();
5843 } else {
5844 gen_op_mov_reg_T0[ot][rm]();
5845 }
5846 gen_op_arpl_update();
5847 }
5848 break;
5849 case 0x102: /* lar */
5850 case 0x103: /* lsl */
5851 if (!s->pe || s->vm86)
5852 goto illegal_op;
5853 ot = dflag ? OT_LONG : OT_WORD;
5854 modrm = ldub_code(s->pc++);
5855 reg = ((modrm >> 3) & 7) | rex_r;
5856 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5857 gen_op_mov_TN_reg[ot][1][reg]();
5858 if (s->cc_op != CC_OP_DYNAMIC)
5859 gen_op_set_cc_op(s->cc_op);
5860 if (b == 0x102)
5861 gen_op_lar();
5862 else
5863 gen_op_lsl();
5864 s->cc_op = CC_OP_EFLAGS;
5865 gen_op_mov_reg_T1[ot][reg]();
5866 break;
5867 case 0x118:
5868 modrm = ldub_code(s->pc++);
5869 mod = (modrm >> 6) & 3;
5870 op = (modrm >> 3) & 7;
5871 switch(op) {
5872 case 0: /* prefetchnta */
5873 case 1: /* prefetchnt0 */
5874 case 2: /* prefetchnt0 */
5875 case 3: /* prefetchnt0 */
5876 if (mod == 3)
5877 goto illegal_op;
5878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5879 /* nothing more to do */
5880 break;
5881 default: /* nop (multi byte) */
5882 gen_nop_modrm(s, modrm);
5883 break;
5884 }
5885 break;
5886 case 0x119 ... 0x11f: /* nop (multi byte) */
5887 modrm = ldub_code(s->pc++);
5888 gen_nop_modrm(s, modrm);
5889 break;
5890 case 0x120: /* mov reg, crN */
5891 case 0x122: /* mov crN, reg */
5892 if (s->cpl != 0) {
5893 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5894 } else {
5895 modrm = ldub_code(s->pc++);
5896 if ((modrm & 0xc0) != 0xc0)
5897 goto illegal_op;
5898 rm = (modrm & 7) | REX_B(s);
5899 reg = ((modrm >> 3) & 7) | rex_r;
5900 if (CODE64(s))
5901 ot = OT_QUAD;
5902 else
5903 ot = OT_LONG;
5904 switch(reg) {
5905 case 0:
5906 case 2:
5907 case 3:
5908 case 4:
5909 case 8:
5910 if (b & 2) {
5911 gen_op_mov_TN_reg[ot][0][rm]();
5912 gen_op_movl_crN_T0(reg);
5913 gen_jmp_im(s->pc - s->cs_base);
5914 gen_eob(s);
5915 } else {
5916 #if !defined(CONFIG_USER_ONLY)
5917 if (reg == 8)
5918 gen_op_movtl_T0_cr8();
5919 else
5920 #endif
5921 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5922 gen_op_mov_reg_T0[ot][rm]();
5923 }
5924 break;
5925 default:
5926 goto illegal_op;
5927 }
5928 }
5929 break;
5930 case 0x121: /* mov reg, drN */
5931 case 0x123: /* mov drN, reg */
5932 if (s->cpl != 0) {
5933 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5934 } else {
5935 modrm = ldub_code(s->pc++);
5936 if ((modrm & 0xc0) != 0xc0)
5937 goto illegal_op;
5938 rm = (modrm & 7) | REX_B(s);
5939 reg = ((modrm >> 3) & 7) | rex_r;
5940 if (CODE64(s))
5941 ot = OT_QUAD;
5942 else
5943 ot = OT_LONG;
5944 /* XXX: do it dynamically with CR4.DE bit */
5945 if (reg == 4 || reg == 5 || reg >= 8)
5946 goto illegal_op;
5947 if (b & 2) {
5948 gen_op_mov_TN_reg[ot][0][rm]();
5949 gen_op_movl_drN_T0(reg);
5950 gen_jmp_im(s->pc - s->cs_base);
5951 gen_eob(s);
5952 } else {
5953 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5954 gen_op_mov_reg_T0[ot][rm]();
5955 }
5956 }
5957 break;
5958 case 0x106: /* clts */
5959 if (s->cpl != 0) {
5960 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5961 } else {
5962 gen_op_clts();
5963 /* abort block because static cpu state changed */
5964 gen_jmp_im(s->pc - s->cs_base);
5965 gen_eob(s);
5966 }
5967 break;
5968 /* MMX/SSE/SSE2/PNI support */
5969 case 0x1c3: /* MOVNTI reg, mem */
5970 if (!(s->cpuid_features & CPUID_SSE2))
5971 goto illegal_op;
5972 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5973 modrm = ldub_code(s->pc++);
5974 mod = (modrm >> 6) & 3;
5975 if (mod == 3)
5976 goto illegal_op;
5977 reg = ((modrm >> 3) & 7) | rex_r;
5978 /* generate a generic store */
5979 gen_ldst_modrm(s, modrm, ot, reg, 1);
5980 break;
5981 case 0x1ae:
5982 modrm = ldub_code(s->pc++);
5983 mod = (modrm >> 6) & 3;
5984 op = (modrm >> 3) & 7;
5985 switch(op) {
5986 case 0: /* fxsave */
5987 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5988 (s->flags & HF_EM_MASK))
5989 goto illegal_op;
5990 if (s->flags & HF_TS_MASK) {
5991 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5992 break;
5993 }
5994 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5995 gen_op_fxsave_A0((s->dflag == 2));
5996 break;
5997 case 1: /* fxrstor */
5998 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5999 (s->flags & HF_EM_MASK))
6000 goto illegal_op;
6001 if (s->flags & HF_TS_MASK) {
6002 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6003 break;
6004 }
6005 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6006 gen_op_fxrstor_A0((s->dflag == 2));
6007 break;
6008 case 2: /* ldmxcsr */
6009 case 3: /* stmxcsr */
6010 if (s->flags & HF_TS_MASK) {
6011 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6012 break;
6013 }
6014 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6015 mod == 3)
6016 goto illegal_op;
6017 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6018 if (op == 2) {
6019 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6020 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6021 } else {
6022 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6023 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6024 }
6025 break;
6026 case 5: /* lfence */
6027 case 6: /* mfence */
6028 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6029 goto illegal_op;
6030 break;
6031 case 7: /* sfence / clflush */
6032 if ((modrm & 0xc7) == 0xc0) {
6033 /* sfence */
6034 if (!(s->cpuid_features & CPUID_SSE))
6035 goto illegal_op;
6036 } else {
6037 /* clflush */
6038 if (!(s->cpuid_features & CPUID_CLFLUSH))
6039 goto illegal_op;
6040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6041 }
6042 break;
6043 default:
6044 goto illegal_op;
6045 }
6046 break;
6047 case 0x10d: /* prefetch */
6048 modrm = ldub_code(s->pc++);
6049 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6050 /* ignore for now */
6051 break;
6052 case 0x1aa: /* rsm */
6053 if (!(s->flags & HF_SMM_MASK))
6054 goto illegal_op;
6055 if (s->cc_op != CC_OP_DYNAMIC) {
6056 gen_op_set_cc_op(s->cc_op);
6057 s->cc_op = CC_OP_DYNAMIC;
6058 }
6059 gen_jmp_im(s->pc - s->cs_base);
6060 gen_op_rsm();
6061 gen_eob(s);
6062 break;
6063 case 0x110 ... 0x117:
6064 case 0x128 ... 0x12f:
6065 case 0x150 ... 0x177:
6066 case 0x17c ... 0x17f:
6067 case 0x1c2:
6068 case 0x1c4 ... 0x1c6:
6069 case 0x1d0 ... 0x1fe:
6070 gen_sse(s, b, pc_start, rex_r);
6071 break;
6072 default:
6073 goto illegal_op;
6074 }
6075 /* lock generation */
6076 if (s->prefix & PREFIX_LOCK)
6077 gen_op_unlock();
6078 return s->pc;
6079 illegal_op:
6080 if (s->prefix & PREFIX_LOCK)
6081 gen_op_unlock();
6082 /* XXX: ensure that no lock was generated */
6083 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6084 return s->pc;
6085 }
6086
6087 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6088 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6089
6090 /* flags read by an operation */
6091 static uint16_t opc_read_flags[NB_OPS] = {
6092 [INDEX_op_aas] = CC_A,
6093 [INDEX_op_aaa] = CC_A,
6094 [INDEX_op_das] = CC_A | CC_C,
6095 [INDEX_op_daa] = CC_A | CC_C,
6096
6097 /* subtle: due to the incl/decl implementation, C is used */
6098 [INDEX_op_update_inc_cc] = CC_C,
6099
6100 [INDEX_op_into] = CC_O,
6101
6102 [INDEX_op_jb_subb] = CC_C,
6103 [INDEX_op_jb_subw] = CC_C,
6104 [INDEX_op_jb_subl] = CC_C,
6105
6106 [INDEX_op_jz_subb] = CC_Z,
6107 [INDEX_op_jz_subw] = CC_Z,
6108 [INDEX_op_jz_subl] = CC_Z,
6109
6110 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6111 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6112 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6113
6114 [INDEX_op_js_subb] = CC_S,
6115 [INDEX_op_js_subw] = CC_S,
6116 [INDEX_op_js_subl] = CC_S,
6117
6118 [INDEX_op_jl_subb] = CC_O | CC_S,
6119 [INDEX_op_jl_subw] = CC_O | CC_S,
6120 [INDEX_op_jl_subl] = CC_O | CC_S,
6121
6122 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6123 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6124 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6125
6126 [INDEX_op_loopnzw] = CC_Z,
6127 [INDEX_op_loopnzl] = CC_Z,
6128 [INDEX_op_loopzw] = CC_Z,
6129 [INDEX_op_loopzl] = CC_Z,
6130
6131 [INDEX_op_seto_T0_cc] = CC_O,
6132 [INDEX_op_setb_T0_cc] = CC_C,
6133 [INDEX_op_setz_T0_cc] = CC_Z,
6134 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6135 [INDEX_op_sets_T0_cc] = CC_S,
6136 [INDEX_op_setp_T0_cc] = CC_P,
6137 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6138 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6139
6140 [INDEX_op_setb_T0_subb] = CC_C,
6141 [INDEX_op_setb_T0_subw] = CC_C,
6142 [INDEX_op_setb_T0_subl] = CC_C,
6143
6144 [INDEX_op_setz_T0_subb] = CC_Z,
6145 [INDEX_op_setz_T0_subw] = CC_Z,
6146 [INDEX_op_setz_T0_subl] = CC_Z,
6147
6148 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6149 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6150 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6151
6152 [INDEX_op_sets_T0_subb] = CC_S,
6153 [INDEX_op_sets_T0_subw] = CC_S,
6154 [INDEX_op_sets_T0_subl] = CC_S,
6155
6156 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6157 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6158 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6159
6160 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6161 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6162 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6163
6164 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6165 [INDEX_op_cmc] = CC_C,
6166 [INDEX_op_salc] = CC_C,
6167
6168 /* needed for correct flag optimisation before string ops */
6169 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6170 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6171 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6172 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6173
6174 #ifdef TARGET_X86_64
6175 [INDEX_op_jb_subq] = CC_C,
6176 [INDEX_op_jz_subq] = CC_Z,
6177 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6178 [INDEX_op_js_subq] = CC_S,
6179 [INDEX_op_jl_subq] = CC_O | CC_S,
6180 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6181
6182 [INDEX_op_loopnzq] = CC_Z,
6183 [INDEX_op_loopzq] = CC_Z,
6184
6185 [INDEX_op_setb_T0_subq] = CC_C,
6186 [INDEX_op_setz_T0_subq] = CC_Z,
6187 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6188 [INDEX_op_sets_T0_subq] = CC_S,
6189 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6190 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6191
6192 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6193 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6194 #endif
6195
6196 #define DEF_READF(SUFFIX)\
6197 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6198 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6199 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6200 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6201 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6202 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6203 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6204 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6205 \
6206 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6207 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6208 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6209 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6210 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6211 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6212 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6213 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6214
6215 DEF_READF( )
6216 DEF_READF(_raw)
6217 #ifndef CONFIG_USER_ONLY
6218 DEF_READF(_kernel)
6219 DEF_READF(_user)
6220 #endif
6221 };
6222
6223 /* flags written by an operation */
6224 static uint16_t opc_write_flags[NB_OPS] = {
6225 [INDEX_op_update2_cc] = CC_OSZAPC,
6226 [INDEX_op_update1_cc] = CC_OSZAPC,
6227 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6228 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6229 /* subtle: due to the incl/decl implementation, C is used */
6230 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6231 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6232
6233 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6234 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6235 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6236 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6237 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6238 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6239 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6240 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6241 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6242 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6243 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6244
6245 /* sse */
6246 [INDEX_op_ucomiss] = CC_OSZAPC,
6247 [INDEX_op_ucomisd] = CC_OSZAPC,
6248 [INDEX_op_comiss] = CC_OSZAPC,
6249 [INDEX_op_comisd] = CC_OSZAPC,
6250
6251 /* bcd */
6252 [INDEX_op_aam] = CC_OSZAPC,
6253 [INDEX_op_aad] = CC_OSZAPC,
6254 [INDEX_op_aas] = CC_OSZAPC,
6255 [INDEX_op_aaa] = CC_OSZAPC,
6256 [INDEX_op_das] = CC_OSZAPC,
6257 [INDEX_op_daa] = CC_OSZAPC,
6258
6259 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6260 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6261 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6262 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6263 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6264 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6265 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6266 [INDEX_op_clc] = CC_C,
6267 [INDEX_op_stc] = CC_C,
6268 [INDEX_op_cmc] = CC_C,
6269
6270 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6271 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6272 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6273 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6274 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6275 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6276 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6277 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6278 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6279 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6280 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6281 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6282
6283 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6284 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6285 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6286 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6287 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6288 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6289
6290 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6291 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6292 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6293 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6294
6295 [INDEX_op_cmpxchg8b] = CC_Z,
6296 [INDEX_op_lar] = CC_Z,
6297 [INDEX_op_lsl] = CC_Z,
6298 [INDEX_op_verr] = CC_Z,
6299 [INDEX_op_verw] = CC_Z,
6300 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6301 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6302
6303 #define DEF_WRITEF(SUFFIX)\
6304 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6305 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6306 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6307 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6308 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6309 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6310 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6311 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6312 \
6313 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6314 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6315 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6316 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6317 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6318 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6319 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6320 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6321 \
6322 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6323 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6324 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6325 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6326 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6327 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6328 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6329 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6330 \
6331 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6332 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6333 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6334 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6335 \
6336 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6337 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6338 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6339 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6340 \
6341 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6342 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6343 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6344 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6345 \
6346 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6347 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6348 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6349 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6350 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6351 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6352 \
6353 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6354 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6355 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6356 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6357 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6358 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6359 \
6360 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6361 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6362 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6363 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6364
6365
6366 DEF_WRITEF( )
6367 DEF_WRITEF(_raw)
6368 #ifndef CONFIG_USER_ONLY
6369 DEF_WRITEF(_kernel)
6370 DEF_WRITEF(_user)
6371 #endif
6372 };
6373
6374 /* simpler form of an operation if no flags need to be generated */
6375 static uint16_t opc_simpler[NB_OPS] = {
6376 [INDEX_op_update2_cc] = INDEX_op_nop,
6377 [INDEX_op_update1_cc] = INDEX_op_nop,
6378 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6379 #if 0
6380 /* broken: CC_OP logic must be rewritten */
6381 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6382 #endif
6383
6384 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6385 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6386 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6387 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6388
6389 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6390 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6391 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6392 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6393
6394 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6395 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6396 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6397 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6398
6399 #define DEF_SIMPLER(SUFFIX)\
6400 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6401 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6402 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6403 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6404 \
6405 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6406 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6407 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6408 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6409
6410 DEF_SIMPLER( )
6411 DEF_SIMPLER(_raw)
6412 #ifndef CONFIG_USER_ONLY
6413 DEF_SIMPLER(_kernel)
6414 DEF_SIMPLER(_user)
6415 #endif
6416 };
6417
6418 void optimize_flags_init(void)
6419 {
6420 int i;
6421 /* put default values in arrays */
6422 for(i = 0; i < NB_OPS; i++) {
6423 if (opc_simpler[i] == 0)
6424 opc_simpler[i] = i;
6425 }
6426 }
6427
6428 /* CPU flags computation optimization: we move backward thru the
6429 generated code to see which flags are needed. The operation is
6430 modified if suitable */
6431 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6432 {
6433 uint16_t *opc_ptr;
6434 int live_flags, write_flags, op;
6435
6436 opc_ptr = opc_buf + opc_buf_len;
6437 /* live_flags contains the flags needed by the next instructions
6438 in the code. At the end of the block, we consider that all the
6439 flags are live. */
6440 live_flags = CC_OSZAPC;
6441 while (opc_ptr > opc_buf) {
6442 op = *--opc_ptr;
6443 /* if none of the flags written by the instruction is used,
6444 then we can try to find a simpler instruction */
6445 write_flags = opc_write_flags[op];
6446 if ((live_flags & write_flags) == 0) {
6447 *opc_ptr = opc_simpler[op];
6448 }
6449 /* compute the live flags before the instruction */
6450 live_flags &= ~write_flags;
6451 live_flags |= opc_read_flags[op];
6452 }
6453 }
6454
6455 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6456 basic block 'tb'. If search_pc is TRUE, also generate PC
6457 information for each intermediate instruction. */
6458 static inline int gen_intermediate_code_internal(CPUState *env,
6459 TranslationBlock *tb,
6460 int search_pc)
6461 {
6462 DisasContext dc1, *dc = &dc1;
6463 target_ulong pc_ptr;
6464 uint16_t *gen_opc_end;
6465 int flags, j, lj, cflags;
6466 target_ulong pc_start;
6467 target_ulong cs_base;
6468
6469 /* generate intermediate code */
6470 pc_start = tb->pc;
6471 cs_base = tb->cs_base;
6472 flags = tb->flags;
6473 cflags = tb->cflags;
6474
6475 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6476 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6477 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6478 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6479 dc->f_st = 0;
6480 dc->vm86 = (flags >> VM_SHIFT) & 1;
6481 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6482 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6483 dc->tf = (flags >> TF_SHIFT) & 1;
6484 dc->singlestep_enabled = env->singlestep_enabled;
6485 dc->cc_op = CC_OP_DYNAMIC;
6486 dc->cs_base = cs_base;
6487 dc->tb = tb;
6488 dc->popl_esp_hack = 0;
6489 /* select memory access functions */
6490 dc->mem_index = 0;
6491 if (flags & HF_SOFTMMU_MASK) {
6492 if (dc->cpl == 3)
6493 dc->mem_index = 2 * 4;
6494 else
6495 dc->mem_index = 1 * 4;
6496 }
6497 dc->cpuid_features = env->cpuid_features;
6498 dc->cpuid_ext_features = env->cpuid_ext_features;
6499 #ifdef TARGET_X86_64
6500 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6501 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6502 #endif
6503 dc->flags = flags;
6504 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6505 (flags & HF_INHIBIT_IRQ_MASK)
6506 #ifndef CONFIG_SOFTMMU
6507 || (flags & HF_SOFTMMU_MASK)
6508 #endif
6509 );
6510 #if 0
6511 /* check addseg logic */
6512 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6513 printf("ERROR addseg\n");
6514 #endif
6515
6516 gen_opc_ptr = gen_opc_buf;
6517 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6518 gen_opparam_ptr = gen_opparam_buf;
6519 nb_gen_labels = 0;
6520
6521 dc->is_jmp = DISAS_NEXT;
6522 pc_ptr = pc_start;
6523 lj = -1;
6524
6525 for(;;) {
6526 if (env->nb_breakpoints > 0) {
6527 for(j = 0; j < env->nb_breakpoints; j++) {
6528 if (env->breakpoints[j] == pc_ptr) {
6529 gen_debug(dc, pc_ptr - dc->cs_base);
6530 break;
6531 }
6532 }
6533 }
6534 if (search_pc) {
6535 j = gen_opc_ptr - gen_opc_buf;
6536 if (lj < j) {
6537 lj++;
6538 while (lj < j)
6539 gen_opc_instr_start[lj++] = 0;
6540 }
6541 gen_opc_pc[lj] = pc_ptr;
6542 gen_opc_cc_op[lj] = dc->cc_op;
6543 gen_opc_instr_start[lj] = 1;
6544 }
6545 pc_ptr = disas_insn(dc, pc_ptr);
6546 /* stop translation if indicated */
6547 if (dc->is_jmp)
6548 break;
6549 /* if single step mode, we generate only one instruction and
6550 generate an exception */
6551 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6552 the flag and abort the translation to give the irqs a
6553 change to be happen */
6554 if (dc->tf || dc->singlestep_enabled ||
6555 (flags & HF_INHIBIT_IRQ_MASK) ||
6556 (cflags & CF_SINGLE_INSN)) {
6557 gen_jmp_im(pc_ptr - dc->cs_base);
6558 gen_eob(dc);
6559 break;
6560 }
6561 /* if too long translation, stop generation too */
6562 if (gen_opc_ptr >= gen_opc_end ||
6563 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6564 gen_jmp_im(pc_ptr - dc->cs_base);
6565 gen_eob(dc);
6566 break;
6567 }
6568 }
6569 *gen_opc_ptr = INDEX_op_end;
6570 /* we don't forget to fill the last values */
6571 if (search_pc) {
6572 j = gen_opc_ptr - gen_opc_buf;
6573 lj++;
6574 while (lj <= j)
6575 gen_opc_instr_start[lj++] = 0;
6576 }
6577
6578 #ifdef DEBUG_DISAS
6579 if (loglevel & CPU_LOG_TB_CPU) {
6580 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6581 }
6582 if (loglevel & CPU_LOG_TB_IN_ASM) {
6583 int disas_flags;
6584 fprintf(logfile, "----------------\n");
6585 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6586 #ifdef TARGET_X86_64
6587 if (dc->code64)
6588 disas_flags = 2;
6589 else
6590 #endif
6591 disas_flags = !dc->code32;
6592 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6593 fprintf(logfile, "\n");
6594 if (loglevel & CPU_LOG_TB_OP) {
6595 fprintf(logfile, "OP:\n");
6596 dump_ops(gen_opc_buf, gen_opparam_buf);
6597 fprintf(logfile, "\n");
6598 }
6599 }
6600 #endif
6601
6602 /* optimize flag computations */
6603 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6604
6605 #ifdef DEBUG_DISAS
6606 if (loglevel & CPU_LOG_TB_OP_OPT) {
6607 fprintf(logfile, "AFTER FLAGS OPT:\n");
6608 dump_ops(gen_opc_buf, gen_opparam_buf);
6609 fprintf(logfile, "\n");
6610 }
6611 #endif
6612 if (!search_pc)
6613 tb->size = pc_ptr - pc_start;
6614 return 0;
6615 }
6616
6617 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6618 {
6619 return gen_intermediate_code_internal(env, tb, 0);
6620 }
6621
6622 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6623 {
6624 return gen_intermediate_code_internal(env, tb, 1);
6625 }
6626