]> git.proxmox.com Git - qemu.git/blob - target-i386/translate.c
loop insn fix for non x86 hosts
[qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
35
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
41
42 #ifdef TARGET_X86_64
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49 #if 1
50 #define BUGGY_64(x) NULL
51 #endif
52 #else
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
55 #define CODE64(s) 0
56 #define REX_X(s) 0
57 #define REX_B(s) 0
58 #endif
59
60 #ifdef TARGET_X86_64
61 static int x86_64_hregs;
62 #endif
63
64 #ifdef USE_DIRECT_JUMP
65 #define TBPARAM(x)
66 #else
67 #define TBPARAM(x) (long)(x)
68 #endif
69
70 typedef struct DisasContext {
71 /* current insn context */
72 int override; /* -1 if no override */
73 int prefix;
74 int aflag, dflag;
75 target_ulong pc; /* pc = eip + cs_base */
76 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base; /* base of CS segment */
80 int pe; /* protected mode */
81 int code32; /* 32 bit code segment */
82 #ifdef TARGET_X86_64
83 int lma; /* long mode active */
84 int code64; /* 64 bit code segment */
85 int rex_x, rex_b;
86 #endif
87 int ss32; /* 32 bit stack segment */
88 int cc_op; /* current CC operation */
89 int addseg; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st; /* currently unused */
91 int vm86; /* vm86 mode */
92 int cpl;
93 int iopl;
94 int tf; /* TF cpu flag */
95 int singlestep_enabled; /* "hardware" single step enabled */
96 int jmp_opt; /* use direct block chaining for direct jumps */
97 int mem_index; /* select memory access functions */
98 int flags; /* all execution flags */
99 struct TranslationBlock *tb;
100 int popl_esp_hack; /* for correct popl with esp base handling */
101 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_features;
103 } DisasContext;
104
105 static void gen_eob(DisasContext *s);
106 static void gen_jmp(DisasContext *s, target_ulong eip);
107 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
108
109 /* i386 arith/logic operations */
110 enum {
111 OP_ADDL,
112 OP_ORL,
113 OP_ADCL,
114 OP_SBBL,
115 OP_ANDL,
116 OP_SUBL,
117 OP_XORL,
118 OP_CMPL,
119 };
120
121 /* i386 shift ops */
122 enum {
123 OP_ROL,
124 OP_ROR,
125 OP_RCL,
126 OP_RCR,
127 OP_SHL,
128 OP_SHR,
129 OP_SHL1, /* undocumented */
130 OP_SAR = 7,
131 };
132
133 enum {
134 #define DEF(s, n, copy_size) INDEX_op_ ## s,
135 #include "opc.h"
136 #undef DEF
137 NB_OPS,
138 };
139
140 #include "gen-op.h"
141
142 /* operand size */
143 enum {
144 OT_BYTE = 0,
145 OT_WORD,
146 OT_LONG,
147 OT_QUAD,
148 };
149
150 enum {
151 /* I386 int registers */
152 OR_EAX, /* MUST be even numbered */
153 OR_ECX,
154 OR_EDX,
155 OR_EBX,
156 OR_ESP,
157 OR_EBP,
158 OR_ESI,
159 OR_EDI,
160
161 OR_TMP0 = 16, /* temporary operand register */
162 OR_TMP1,
163 OR_A0, /* temporary register used when doing address evaluation */
164 };
165
166 #ifdef TARGET_X86_64
167
168 #define NB_OP_SIZES 4
169
170 #define DEF_REGS(prefix, suffix) \
171 prefix ## EAX ## suffix,\
172 prefix ## ECX ## suffix,\
173 prefix ## EDX ## suffix,\
174 prefix ## EBX ## suffix,\
175 prefix ## ESP ## suffix,\
176 prefix ## EBP ## suffix,\
177 prefix ## ESI ## suffix,\
178 prefix ## EDI ## suffix,\
179 prefix ## R8 ## suffix,\
180 prefix ## R9 ## suffix,\
181 prefix ## R10 ## suffix,\
182 prefix ## R11 ## suffix,\
183 prefix ## R12 ## suffix,\
184 prefix ## R13 ## suffix,\
185 prefix ## R14 ## suffix,\
186 prefix ## R15 ## suffix,
187
188 #define DEF_BREGS(prefixb, prefixh, suffix) \
189 \
190 static void prefixb ## ESP ## suffix ## _wrapper(void) \
191 { \
192 if (x86_64_hregs) \
193 prefixb ## ESP ## suffix (); \
194 else \
195 prefixh ## EAX ## suffix (); \
196 } \
197 \
198 static void prefixb ## EBP ## suffix ## _wrapper(void) \
199 { \
200 if (x86_64_hregs) \
201 prefixb ## EBP ## suffix (); \
202 else \
203 prefixh ## ECX ## suffix (); \
204 } \
205 \
206 static void prefixb ## ESI ## suffix ## _wrapper(void) \
207 { \
208 if (x86_64_hregs) \
209 prefixb ## ESI ## suffix (); \
210 else \
211 prefixh ## EDX ## suffix (); \
212 } \
213 \
214 static void prefixb ## EDI ## suffix ## _wrapper(void) \
215 { \
216 if (x86_64_hregs) \
217 prefixb ## EDI ## suffix (); \
218 else \
219 prefixh ## EBX ## suffix (); \
220 }
221
222 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
223 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
224 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
225 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
226
227 #else /* !TARGET_X86_64 */
228
229 #define NB_OP_SIZES 3
230
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,
240
241 #endif /* !TARGET_X86_64 */
242
243 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
244 [OT_BYTE] = {
245 gen_op_movb_EAX_T0,
246 gen_op_movb_ECX_T0,
247 gen_op_movb_EDX_T0,
248 gen_op_movb_EBX_T0,
249 #ifdef TARGET_X86_64
250 gen_op_movb_ESP_T0_wrapper,
251 gen_op_movb_EBP_T0_wrapper,
252 gen_op_movb_ESI_T0_wrapper,
253 gen_op_movb_EDI_T0_wrapper,
254 gen_op_movb_R8_T0,
255 gen_op_movb_R9_T0,
256 gen_op_movb_R10_T0,
257 gen_op_movb_R11_T0,
258 gen_op_movb_R12_T0,
259 gen_op_movb_R13_T0,
260 gen_op_movb_R14_T0,
261 gen_op_movb_R15_T0,
262 #else
263 gen_op_movh_EAX_T0,
264 gen_op_movh_ECX_T0,
265 gen_op_movh_EDX_T0,
266 gen_op_movh_EBX_T0,
267 #endif
268 },
269 [OT_WORD] = {
270 DEF_REGS(gen_op_movw_, _T0)
271 },
272 [OT_LONG] = {
273 DEF_REGS(gen_op_movl_, _T0)
274 },
275 #ifdef TARGET_X86_64
276 [OT_QUAD] = {
277 DEF_REGS(gen_op_movq_, _T0)
278 },
279 #endif
280 };
281
282 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
283 [OT_BYTE] = {
284 gen_op_movb_EAX_T1,
285 gen_op_movb_ECX_T1,
286 gen_op_movb_EDX_T1,
287 gen_op_movb_EBX_T1,
288 #ifdef TARGET_X86_64
289 gen_op_movb_ESP_T1_wrapper,
290 gen_op_movb_EBP_T1_wrapper,
291 gen_op_movb_ESI_T1_wrapper,
292 gen_op_movb_EDI_T1_wrapper,
293 gen_op_movb_R8_T1,
294 gen_op_movb_R9_T1,
295 gen_op_movb_R10_T1,
296 gen_op_movb_R11_T1,
297 gen_op_movb_R12_T1,
298 gen_op_movb_R13_T1,
299 gen_op_movb_R14_T1,
300 gen_op_movb_R15_T1,
301 #else
302 gen_op_movh_EAX_T1,
303 gen_op_movh_ECX_T1,
304 gen_op_movh_EDX_T1,
305 gen_op_movh_EBX_T1,
306 #endif
307 },
308 [OT_WORD] = {
309 DEF_REGS(gen_op_movw_, _T1)
310 },
311 [OT_LONG] = {
312 DEF_REGS(gen_op_movl_, _T1)
313 },
314 #ifdef TARGET_X86_64
315 [OT_QUAD] = {
316 DEF_REGS(gen_op_movq_, _T1)
317 },
318 #endif
319 };
320
321 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
322 [0] = {
323 DEF_REGS(gen_op_movw_, _A0)
324 },
325 [1] = {
326 DEF_REGS(gen_op_movl_, _A0)
327 },
328 #ifdef TARGET_X86_64
329 [2] = {
330 DEF_REGS(gen_op_movq_, _A0)
331 },
332 #endif
333 };
334
335 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
336 {
337 [OT_BYTE] = {
338 {
339 gen_op_movl_T0_EAX,
340 gen_op_movl_T0_ECX,
341 gen_op_movl_T0_EDX,
342 gen_op_movl_T0_EBX,
343 #ifdef TARGET_X86_64
344 gen_op_movl_T0_ESP_wrapper,
345 gen_op_movl_T0_EBP_wrapper,
346 gen_op_movl_T0_ESI_wrapper,
347 gen_op_movl_T0_EDI_wrapper,
348 gen_op_movl_T0_R8,
349 gen_op_movl_T0_R9,
350 gen_op_movl_T0_R10,
351 gen_op_movl_T0_R11,
352 gen_op_movl_T0_R12,
353 gen_op_movl_T0_R13,
354 gen_op_movl_T0_R14,
355 gen_op_movl_T0_R15,
356 #else
357 gen_op_movh_T0_EAX,
358 gen_op_movh_T0_ECX,
359 gen_op_movh_T0_EDX,
360 gen_op_movh_T0_EBX,
361 #endif
362 },
363 {
364 gen_op_movl_T1_EAX,
365 gen_op_movl_T1_ECX,
366 gen_op_movl_T1_EDX,
367 gen_op_movl_T1_EBX,
368 #ifdef TARGET_X86_64
369 gen_op_movl_T1_ESP_wrapper,
370 gen_op_movl_T1_EBP_wrapper,
371 gen_op_movl_T1_ESI_wrapper,
372 gen_op_movl_T1_EDI_wrapper,
373 gen_op_movl_T1_R8,
374 gen_op_movl_T1_R9,
375 gen_op_movl_T1_R10,
376 gen_op_movl_T1_R11,
377 gen_op_movl_T1_R12,
378 gen_op_movl_T1_R13,
379 gen_op_movl_T1_R14,
380 gen_op_movl_T1_R15,
381 #else
382 gen_op_movh_T1_EAX,
383 gen_op_movh_T1_ECX,
384 gen_op_movh_T1_EDX,
385 gen_op_movh_T1_EBX,
386 #endif
387 },
388 },
389 [OT_WORD] = {
390 {
391 DEF_REGS(gen_op_movl_T0_, )
392 },
393 {
394 DEF_REGS(gen_op_movl_T1_, )
395 },
396 },
397 [OT_LONG] = {
398 {
399 DEF_REGS(gen_op_movl_T0_, )
400 },
401 {
402 DEF_REGS(gen_op_movl_T1_, )
403 },
404 },
405 #ifdef TARGET_X86_64
406 [OT_QUAD] = {
407 {
408 DEF_REGS(gen_op_movl_T0_, )
409 },
410 {
411 DEF_REGS(gen_op_movl_T1_, )
412 },
413 },
414 #endif
415 };
416
417 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
418 DEF_REGS(gen_op_movl_A0_, )
419 };
420
421 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
422 [0] = {
423 DEF_REGS(gen_op_addl_A0_, )
424 },
425 [1] = {
426 DEF_REGS(gen_op_addl_A0_, _s1)
427 },
428 [2] = {
429 DEF_REGS(gen_op_addl_A0_, _s2)
430 },
431 [3] = {
432 DEF_REGS(gen_op_addl_A0_, _s3)
433 },
434 };
435
436 #ifdef TARGET_X86_64
437 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
438 DEF_REGS(gen_op_movq_A0_, )
439 };
440
441 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
442 [0] = {
443 DEF_REGS(gen_op_addq_A0_, )
444 },
445 [1] = {
446 DEF_REGS(gen_op_addq_A0_, _s1)
447 },
448 [2] = {
449 DEF_REGS(gen_op_addq_A0_, _s2)
450 },
451 [3] = {
452 DEF_REGS(gen_op_addq_A0_, _s3)
453 },
454 };
455 #endif
456
457 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
458 [0] = {
459 DEF_REGS(gen_op_cmovw_, _T1_T0)
460 },
461 [1] = {
462 DEF_REGS(gen_op_cmovl_, _T1_T0)
463 },
464 #ifdef TARGET_X86_64
465 [2] = {
466 DEF_REGS(gen_op_cmovq_, _T1_T0)
467 },
468 #endif
469 };
470
471 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
472 NULL,
473 gen_op_orl_T0_T1,
474 NULL,
475 NULL,
476 gen_op_andl_T0_T1,
477 NULL,
478 gen_op_xorl_T0_T1,
479 NULL,
480 };
481
482 #define DEF_ARITHC(SUFFIX)\
483 {\
484 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
486 },\
487 {\
488 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
490 },\
491 {\
492 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
494 },\
495 {\
496 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
498 },
499
500 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
501 DEF_ARITHC( )
502 };
503
504 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
505 DEF_ARITHC(_raw)
506 #ifndef CONFIG_USER_ONLY
507 DEF_ARITHC(_kernel)
508 DEF_ARITHC(_user)
509 #endif
510 };
511
512 static const int cc_op_arithb[8] = {
513 CC_OP_ADDB,
514 CC_OP_LOGICB,
515 CC_OP_ADDB,
516 CC_OP_SUBB,
517 CC_OP_LOGICB,
518 CC_OP_SUBB,
519 CC_OP_LOGICB,
520 CC_OP_SUBB,
521 };
522
523 #define DEF_CMPXCHG(SUFFIX)\
524 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
528
529 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
530 DEF_CMPXCHG( )
531 };
532
533 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
534 DEF_CMPXCHG(_raw)
535 #ifndef CONFIG_USER_ONLY
536 DEF_CMPXCHG(_kernel)
537 DEF_CMPXCHG(_user)
538 #endif
539 };
540
541 #define DEF_SHIFT(SUFFIX)\
542 {\
543 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
551 },\
552 {\
553 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
561 },\
562 {\
563 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
571 },\
572 {\
573 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
581 },
582
583 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
584 DEF_SHIFT( )
585 };
586
587 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
588 DEF_SHIFT(_raw)
589 #ifndef CONFIG_USER_ONLY
590 DEF_SHIFT(_kernel)
591 DEF_SHIFT(_user)
592 #endif
593 };
594
595 #define DEF_SHIFTD(SUFFIX, op)\
596 {\
597 NULL,\
598 NULL,\
599 },\
600 {\
601 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603 },\
604 {\
605 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607 },\
608 {\
609 },
610
611 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
612 DEF_SHIFTD(, im)
613 };
614
615 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
616 DEF_SHIFTD(, ECX)
617 };
618
619 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
620 DEF_SHIFTD(_raw, im)
621 #ifndef CONFIG_USER_ONLY
622 DEF_SHIFTD(_kernel, im)
623 DEF_SHIFTD(_user, im)
624 #endif
625 };
626
627 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
628 DEF_SHIFTD(_raw, ECX)
629 #ifndef CONFIG_USER_ONLY
630 DEF_SHIFTD(_kernel, ECX)
631 DEF_SHIFTD(_user, ECX)
632 #endif
633 };
634
635 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
636 [0] = {
637 gen_op_btw_T0_T1_cc,
638 gen_op_btsw_T0_T1_cc,
639 gen_op_btrw_T0_T1_cc,
640 gen_op_btcw_T0_T1_cc,
641 },
642 [1] = {
643 gen_op_btl_T0_T1_cc,
644 gen_op_btsl_T0_T1_cc,
645 gen_op_btrl_T0_T1_cc,
646 gen_op_btcl_T0_T1_cc,
647 },
648 #ifdef TARGET_X86_64
649 [2] = {
650 gen_op_btq_T0_T1_cc,
651 gen_op_btsq_T0_T1_cc,
652 gen_op_btrq_T0_T1_cc,
653 gen_op_btcq_T0_T1_cc,
654 },
655 #endif
656 };
657
658 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
659 gen_op_add_bitw_A0_T1,
660 gen_op_add_bitl_A0_T1,
661 X86_64_ONLY(gen_op_add_bitq_A0_T1),
662 };
663
664 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
665 [0] = {
666 gen_op_bsfw_T0_cc,
667 gen_op_bsrw_T0_cc,
668 },
669 [1] = {
670 gen_op_bsfl_T0_cc,
671 gen_op_bsrl_T0_cc,
672 },
673 #ifdef TARGET_X86_64
674 [2] = {
675 gen_op_bsfq_T0_cc,
676 gen_op_bsrq_T0_cc,
677 },
678 #endif
679 };
680
681 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
682 gen_op_ldsb_raw_T0_A0,
683 gen_op_ldsw_raw_T0_A0,
684 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
685 NULL,
686 #ifndef CONFIG_USER_ONLY
687 gen_op_ldsb_kernel_T0_A0,
688 gen_op_ldsw_kernel_T0_A0,
689 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
690 NULL,
691
692 gen_op_ldsb_user_T0_A0,
693 gen_op_ldsw_user_T0_A0,
694 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
695 NULL,
696 #endif
697 };
698
699 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
700 gen_op_ldub_raw_T0_A0,
701 gen_op_lduw_raw_T0_A0,
702 NULL,
703 NULL,
704
705 #ifndef CONFIG_USER_ONLY
706 gen_op_ldub_kernel_T0_A0,
707 gen_op_lduw_kernel_T0_A0,
708 NULL,
709 NULL,
710
711 gen_op_ldub_user_T0_A0,
712 gen_op_lduw_user_T0_A0,
713 NULL,
714 NULL,
715 #endif
716 };
717
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
720 gen_op_ldub_raw_T0_A0,
721 gen_op_lduw_raw_T0_A0,
722 gen_op_ldl_raw_T0_A0,
723 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
724
725 #ifndef CONFIG_USER_ONLY
726 gen_op_ldub_kernel_T0_A0,
727 gen_op_lduw_kernel_T0_A0,
728 gen_op_ldl_kernel_T0_A0,
729 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
730
731 gen_op_ldub_user_T0_A0,
732 gen_op_lduw_user_T0_A0,
733 gen_op_ldl_user_T0_A0,
734 X86_64_ONLY(gen_op_ldq_user_T0_A0),
735 #endif
736 };
737
738 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
739 gen_op_ldub_raw_T1_A0,
740 gen_op_lduw_raw_T1_A0,
741 gen_op_ldl_raw_T1_A0,
742 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
743
744 #ifndef CONFIG_USER_ONLY
745 gen_op_ldub_kernel_T1_A0,
746 gen_op_lduw_kernel_T1_A0,
747 gen_op_ldl_kernel_T1_A0,
748 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
749
750 gen_op_ldub_user_T1_A0,
751 gen_op_lduw_user_T1_A0,
752 gen_op_ldl_user_T1_A0,
753 X86_64_ONLY(gen_op_ldq_user_T1_A0),
754 #endif
755 };
756
757 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
758 gen_op_stb_raw_T0_A0,
759 gen_op_stw_raw_T0_A0,
760 gen_op_stl_raw_T0_A0,
761 X86_64_ONLY(gen_op_stq_raw_T0_A0),
762
763 #ifndef CONFIG_USER_ONLY
764 gen_op_stb_kernel_T0_A0,
765 gen_op_stw_kernel_T0_A0,
766 gen_op_stl_kernel_T0_A0,
767 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
768
769 gen_op_stb_user_T0_A0,
770 gen_op_stw_user_T0_A0,
771 gen_op_stl_user_T0_A0,
772 X86_64_ONLY(gen_op_stq_user_T0_A0),
773 #endif
774 };
775
776 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
777 NULL,
778 gen_op_stw_raw_T1_A0,
779 gen_op_stl_raw_T1_A0,
780 X86_64_ONLY(gen_op_stq_raw_T1_A0),
781
782 #ifndef CONFIG_USER_ONLY
783 NULL,
784 gen_op_stw_kernel_T1_A0,
785 gen_op_stl_kernel_T1_A0,
786 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
787
788 NULL,
789 gen_op_stw_user_T1_A0,
790 gen_op_stl_user_T1_A0,
791 X86_64_ONLY(gen_op_stq_user_T1_A0),
792 #endif
793 };
794
795 static inline void gen_jmp_im(target_ulong pc)
796 {
797 #ifdef TARGET_X86_64
798 if (pc == (uint32_t)pc) {
799 gen_op_movl_eip_im(pc);
800 } else if (pc == (int32_t)pc) {
801 gen_op_movq_eip_im(pc);
802 } else {
803 gen_op_movq_eip_im64(pc >> 32, pc);
804 }
805 #else
806 gen_op_movl_eip_im(pc);
807 #endif
808 }
809
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
811 {
812 int override;
813
814 override = s->override;
815 #ifdef TARGET_X86_64
816 if (s->aflag == 2) {
817 if (override >= 0) {
818 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
819 gen_op_addq_A0_reg_sN[0][R_ESI]();
820 } else {
821 gen_op_movq_A0_reg[R_ESI]();
822 }
823 } else
824 #endif
825 if (s->aflag) {
826 /* 32 bit address */
827 if (s->addseg && override < 0)
828 override = R_DS;
829 if (override >= 0) {
830 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
831 gen_op_addl_A0_reg_sN[0][R_ESI]();
832 } else {
833 gen_op_movl_A0_reg[R_ESI]();
834 }
835 } else {
836 /* 16 address, always override */
837 if (override < 0)
838 override = R_DS;
839 gen_op_movl_A0_reg[R_ESI]();
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
842 }
843 }
844
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
846 {
847 #ifdef TARGET_X86_64
848 if (s->aflag == 2) {
849 gen_op_movq_A0_reg[R_EDI]();
850 } else
851 #endif
852 if (s->aflag) {
853 if (s->addseg) {
854 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
855 gen_op_addl_A0_reg_sN[0][R_EDI]();
856 } else {
857 gen_op_movl_A0_reg[R_EDI]();
858 }
859 } else {
860 gen_op_movl_A0_reg[R_EDI]();
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
863 }
864 }
865
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871 };
872
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874 gen_op_jnz_ecxw,
875 gen_op_jnz_ecxl,
876 X86_64_ONLY(gen_op_jnz_ecxq),
877 };
878
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
880 gen_op_jz_ecxw,
881 gen_op_jz_ecxl,
882 X86_64_ONLY(gen_op_jz_ecxq),
883 };
884
885 static GenOpFunc *gen_op_dec_ECX[3] = {
886 gen_op_decw_ECX,
887 gen_op_decl_ECX,
888 X86_64_ONLY(gen_op_decq_ECX),
889 };
890
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892 {
893 gen_op_jnz_subb,
894 gen_op_jnz_subw,
895 gen_op_jnz_subl,
896 X86_64_ONLY(gen_op_jnz_subq),
897 },
898 {
899 gen_op_jz_subb,
900 gen_op_jz_subw,
901 gen_op_jz_subl,
902 X86_64_ONLY(gen_op_jz_subq),
903 },
904 };
905
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
907 gen_op_inb_DX_T0,
908 gen_op_inw_DX_T0,
909 gen_op_inl_DX_T0,
910 };
911
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
913 gen_op_outb_DX_T0,
914 gen_op_outw_DX_T0,
915 gen_op_outl_DX_T0,
916 };
917
918 static GenOpFunc *gen_op_in[3] = {
919 gen_op_inb_T0_T1,
920 gen_op_inw_T0_T1,
921 gen_op_inl_T0_T1,
922 };
923
924 static GenOpFunc *gen_op_out[3] = {
925 gen_op_outb_T0_T1,
926 gen_op_outw_T0_T1,
927 gen_op_outl_T0_T1,
928 };
929
930 static GenOpFunc *gen_check_io_T0[3] = {
931 gen_op_check_iob_T0,
932 gen_op_check_iow_T0,
933 gen_op_check_iol_T0,
934 };
935
936 static GenOpFunc *gen_check_io_DX[3] = {
937 gen_op_check_iob_DX,
938 gen_op_check_iow_DX,
939 gen_op_check_iol_DX,
940 };
941
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943 {
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
947 gen_jmp_im(cur_eip);
948 if (use_dx)
949 gen_check_io_DX[ot]();
950 else
951 gen_check_io_T0[ot]();
952 }
953 }
954
955 static inline void gen_movs(DisasContext *s, int ot)
956 {
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0[ot + s->mem_index]();
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0[ot + s->mem_index]();
961 gen_op_movl_T0_Dshift[ot]();
962 #ifdef TARGET_X86_64
963 if (s->aflag == 2) {
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
966 } else
967 #endif
968 if (s->aflag) {
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
971 } else {
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
974 }
975 }
976
977 static inline void gen_update_cc_op(DisasContext *s)
978 {
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
982 }
983 }
984
985 /* XXX: does not work with gdbstub "ice" single step - not a
986 serious problem */
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988 {
989 int l1, l2;
990
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
994 gen_set_label(l2);
995 gen_jmp_tb(s, next_eip, 1);
996 gen_set_label(l1);
997 return l2;
998 }
999
1000 static inline void gen_stos(DisasContext *s, int ot)
1001 {
1002 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0[ot + s->mem_index]();
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1009 } else
1010 #endif
1011 if (s->aflag) {
1012 gen_op_addl_EDI_T0();
1013 } else {
1014 gen_op_addw_EDI_T0();
1015 }
1016 }
1017
1018 static inline void gen_lods(DisasContext *s, int ot)
1019 {
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0[ot + s->mem_index]();
1022 gen_op_mov_reg_T0[ot][R_EAX]();
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1027 } else
1028 #endif
1029 if (s->aflag) {
1030 gen_op_addl_ESI_T0();
1031 } else {
1032 gen_op_addw_ESI_T0();
1033 }
1034 }
1035
1036 static inline void gen_scas(DisasContext *s, int ot)
1037 {
1038 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0[ot + s->mem_index]();
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1046 } else
1047 #endif
1048 if (s->aflag) {
1049 gen_op_addl_EDI_T0();
1050 } else {
1051 gen_op_addw_EDI_T0();
1052 }
1053 }
1054
1055 static inline void gen_cmps(DisasContext *s, int ot)
1056 {
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0[ot + s->mem_index]();
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0[ot + s->mem_index]();
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1067 } else
1068 #endif
1069 if (s->aflag) {
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1072 } else {
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1075 }
1076 }
1077
1078 static inline void gen_ins(DisasContext *s, int ot)
1079 {
1080 gen_string_movl_A0_EDI(s);
1081 gen_op_movl_T0_0();
1082 gen_op_st_T0_A0[ot + s->mem_index]();
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0[ot + s->mem_index]();
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1089 } else
1090 #endif
1091 if (s->aflag) {
1092 gen_op_addl_EDI_T0();
1093 } else {
1094 gen_op_addw_EDI_T0();
1095 }
1096 }
1097
1098 static inline void gen_outs(DisasContext *s, int ot)
1099 {
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0[ot + s->mem_index]();
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1107 } else
1108 #endif
1109 if (s->aflag) {
1110 gen_op_addl_ESI_T0();
1111 } else {
1112 gen_op_addw_ESI_T0();
1113 }
1114 }
1115
1116 /* same method as Valgrind : we generate jumps to current or next
1117 instruction */
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1121 { \
1122 int l2;\
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1129 if (!s->jmp_opt) \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1132 }
1133
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1138 int nz) \
1139 { \
1140 int l2;\
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1147 if (!s->jmp_opt) \
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1150 }
1151
1152 GEN_REPZ(movs)
1153 GEN_REPZ(stos)
1154 GEN_REPZ(lods)
1155 GEN_REPZ(ins)
1156 GEN_REPZ(outs)
1157 GEN_REPZ2(scas)
1158 GEN_REPZ2(cmps)
1159
1160 enum {
1161 JCC_O,
1162 JCC_B,
1163 JCC_Z,
1164 JCC_BE,
1165 JCC_S,
1166 JCC_P,
1167 JCC_L,
1168 JCC_LE,
1169 };
1170
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172 [OT_BYTE] = {
1173 NULL,
1174 gen_op_jb_subb,
1175 gen_op_jz_subb,
1176 gen_op_jbe_subb,
1177 gen_op_js_subb,
1178 NULL,
1179 gen_op_jl_subb,
1180 gen_op_jle_subb,
1181 },
1182 [OT_WORD] = {
1183 NULL,
1184 gen_op_jb_subw,
1185 gen_op_jz_subw,
1186 gen_op_jbe_subw,
1187 gen_op_js_subw,
1188 NULL,
1189 gen_op_jl_subw,
1190 gen_op_jle_subw,
1191 },
1192 [OT_LONG] = {
1193 NULL,
1194 gen_op_jb_subl,
1195 gen_op_jz_subl,
1196 gen_op_jbe_subl,
1197 gen_op_js_subl,
1198 NULL,
1199 gen_op_jl_subl,
1200 gen_op_jle_subl,
1201 },
1202 #ifdef TARGET_X86_64
1203 [OT_QUAD] = {
1204 NULL,
1205 BUGGY_64(gen_op_jb_subq),
1206 gen_op_jz_subq,
1207 BUGGY_64(gen_op_jbe_subq),
1208 gen_op_js_subq,
1209 NULL,
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1212 },
1213 #endif
1214 };
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1216 [0] = {
1217 gen_op_loopnzw,
1218 gen_op_loopzw,
1219 gen_op_jnz_ecxw,
1220 },
1221 [1] = {
1222 gen_op_loopnzl,
1223 gen_op_loopzl,
1224 gen_op_jnz_ecxl,
1225 },
1226 #ifdef TARGET_X86_64
1227 [2] = {
1228 gen_op_loopnzq,
1229 gen_op_loopzq,
1230 gen_op_jnz_ecxq,
1231 },
1232 #endif
1233 };
1234
1235 static GenOpFunc *gen_setcc_slow[8] = {
1236 gen_op_seto_T0_cc,
1237 gen_op_setb_T0_cc,
1238 gen_op_setz_T0_cc,
1239 gen_op_setbe_T0_cc,
1240 gen_op_sets_T0_cc,
1241 gen_op_setp_T0_cc,
1242 gen_op_setl_T0_cc,
1243 gen_op_setle_T0_cc,
1244 };
1245
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1247 [OT_BYTE] = {
1248 NULL,
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1253 NULL,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1256 },
1257 [OT_WORD] = {
1258 NULL,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1263 NULL,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1266 },
1267 [OT_LONG] = {
1268 NULL,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1273 NULL,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1276 },
1277 #ifdef TARGET_X86_64
1278 [OT_QUAD] = {
1279 NULL,
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1284 NULL,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1287 },
1288 #endif
1289 };
1290
1291 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292 gen_op_fadd_ST0_FT0,
1293 gen_op_fmul_ST0_FT0,
1294 gen_op_fcom_ST0_FT0,
1295 gen_op_fcom_ST0_FT0,
1296 gen_op_fsub_ST0_FT0,
1297 gen_op_fsubr_ST0_FT0,
1298 gen_op_fdiv_ST0_FT0,
1299 gen_op_fdivr_ST0_FT0,
1300 };
1301
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304 gen_op_fadd_STN_ST0,
1305 gen_op_fmul_STN_ST0,
1306 NULL,
1307 NULL,
1308 gen_op_fsubr_STN_ST0,
1309 gen_op_fsub_STN_ST0,
1310 gen_op_fdivr_STN_ST0,
1311 gen_op_fdiv_STN_ST0,
1312 };
1313
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1316 {
1317 GenOpFunc *gen_update_cc;
1318
1319 if (d != OR_TMP0) {
1320 gen_op_mov_TN_reg[ot][0][d]();
1321 } else {
1322 gen_op_ld_T0_A0[ot + s1->mem_index]();
1323 }
1324 switch(op) {
1325 case OP_ADCL:
1326 case OP_SBBL:
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1329 if (d != OR_TMP0) {
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0[ot][d]();
1332 } else {
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334 }
1335 s1->cc_op = CC_OP_DYNAMIC;
1336 goto the_end;
1337 case OP_ADDL:
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1341 break;
1342 case OP_SUBL:
1343 gen_op_subl_T0_T1();
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1346 break;
1347 default:
1348 case OP_ANDL:
1349 case OP_ORL:
1350 case OP_XORL:
1351 gen_op_arith_T0_T1_cc[op]();
1352 s1->cc_op = CC_OP_LOGICB + ot;
1353 gen_update_cc = gen_op_update1_cc;
1354 break;
1355 case OP_CMPL:
1356 gen_op_cmpl_T0_T1_cc();
1357 s1->cc_op = CC_OP_SUBB + ot;
1358 gen_update_cc = NULL;
1359 break;
1360 }
1361 if (op != OP_CMPL) {
1362 if (d != OR_TMP0)
1363 gen_op_mov_reg_T0[ot][d]();
1364 else
1365 gen_op_st_T0_A0[ot + s1->mem_index]();
1366 }
1367 /* the flags update must happen after the memory write (precise
1368 exception support) */
1369 if (gen_update_cc)
1370 gen_update_cc();
1371 the_end: ;
1372 }
1373
1374 /* if d == OR_TMP0, it means memory operand (address in A0) */
1375 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1376 {
1377 if (d != OR_TMP0)
1378 gen_op_mov_TN_reg[ot][0][d]();
1379 else
1380 gen_op_ld_T0_A0[ot + s1->mem_index]();
1381 if (s1->cc_op != CC_OP_DYNAMIC)
1382 gen_op_set_cc_op(s1->cc_op);
1383 if (c > 0) {
1384 gen_op_incl_T0();
1385 s1->cc_op = CC_OP_INCB + ot;
1386 } else {
1387 gen_op_decl_T0();
1388 s1->cc_op = CC_OP_DECB + ot;
1389 }
1390 if (d != OR_TMP0)
1391 gen_op_mov_reg_T0[ot][d]();
1392 else
1393 gen_op_st_T0_A0[ot + s1->mem_index]();
1394 gen_op_update_inc_cc();
1395 }
1396
1397 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1398 {
1399 if (d != OR_TMP0)
1400 gen_op_mov_TN_reg[ot][0][d]();
1401 else
1402 gen_op_ld_T0_A0[ot + s1->mem_index]();
1403 if (s != OR_TMP1)
1404 gen_op_mov_TN_reg[ot][1][s]();
1405 /* for zero counts, flags are not updated, so must do it dynamically */
1406 if (s1->cc_op != CC_OP_DYNAMIC)
1407 gen_op_set_cc_op(s1->cc_op);
1408
1409 if (d != OR_TMP0)
1410 gen_op_shift_T0_T1_cc[ot][op]();
1411 else
1412 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1413 if (d != OR_TMP0)
1414 gen_op_mov_reg_T0[ot][d]();
1415 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1416 }
1417
1418 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1419 {
1420 /* currently not optimized */
1421 gen_op_movl_T1_im(c);
1422 gen_shift(s1, op, ot, d, OR_TMP1);
1423 }
1424
1425 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1426 {
1427 target_long disp;
1428 int havesib;
1429 int base;
1430 int index;
1431 int scale;
1432 int opreg;
1433 int mod, rm, code, override, must_add_seg;
1434
1435 override = s->override;
1436 must_add_seg = s->addseg;
1437 if (override >= 0)
1438 must_add_seg = 1;
1439 mod = (modrm >> 6) & 3;
1440 rm = modrm & 7;
1441
1442 if (s->aflag) {
1443
1444 havesib = 0;
1445 base = rm;
1446 index = 0;
1447 scale = 0;
1448
1449 if (base == 4) {
1450 havesib = 1;
1451 code = ldub_code(s->pc++);
1452 scale = (code >> 6) & 3;
1453 index = ((code >> 3) & 7) | REX_X(s);
1454 base = (code & 7);
1455 }
1456 base |= REX_B(s);
1457
1458 switch (mod) {
1459 case 0:
1460 if ((base & 7) == 5) {
1461 base = -1;
1462 disp = (int32_t)ldl_code(s->pc);
1463 s->pc += 4;
1464 if (CODE64(s) && !havesib) {
1465 disp += s->pc + s->rip_offset;
1466 }
1467 } else {
1468 disp = 0;
1469 }
1470 break;
1471 case 1:
1472 disp = (int8_t)ldub_code(s->pc++);
1473 break;
1474 default:
1475 case 2:
1476 disp = ldl_code(s->pc);
1477 s->pc += 4;
1478 break;
1479 }
1480
1481 if (base >= 0) {
1482 /* for correct popl handling with esp */
1483 if (base == 4 && s->popl_esp_hack)
1484 disp += s->popl_esp_hack;
1485 #ifdef TARGET_X86_64
1486 if (s->aflag == 2) {
1487 gen_op_movq_A0_reg[base]();
1488 if (disp != 0) {
1489 if ((int32_t)disp == disp)
1490 gen_op_addq_A0_im(disp);
1491 else
1492 gen_op_addq_A0_im64(disp >> 32, disp);
1493 }
1494 } else
1495 #endif
1496 {
1497 gen_op_movl_A0_reg[base]();
1498 if (disp != 0)
1499 gen_op_addl_A0_im(disp);
1500 }
1501 } else {
1502 #ifdef TARGET_X86_64
1503 if (s->aflag == 2) {
1504 if ((int32_t)disp == disp)
1505 gen_op_movq_A0_im(disp);
1506 else
1507 gen_op_movq_A0_im64(disp >> 32, disp);
1508 } else
1509 #endif
1510 {
1511 gen_op_movl_A0_im(disp);
1512 }
1513 }
1514 /* XXX: index == 4 is always invalid */
1515 if (havesib && (index != 4 || scale != 0)) {
1516 #ifdef TARGET_X86_64
1517 if (s->aflag == 2) {
1518 gen_op_addq_A0_reg_sN[scale][index]();
1519 } else
1520 #endif
1521 {
1522 gen_op_addl_A0_reg_sN[scale][index]();
1523 }
1524 }
1525 if (must_add_seg) {
1526 if (override < 0) {
1527 if (base == R_EBP || base == R_ESP)
1528 override = R_SS;
1529 else
1530 override = R_DS;
1531 }
1532 #ifdef TARGET_X86_64
1533 if (s->aflag == 2) {
1534 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1535 } else
1536 #endif
1537 {
1538 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1539 }
1540 }
1541 } else {
1542 switch (mod) {
1543 case 0:
1544 if (rm == 6) {
1545 disp = lduw_code(s->pc);
1546 s->pc += 2;
1547 gen_op_movl_A0_im(disp);
1548 rm = 0; /* avoid SS override */
1549 goto no_rm;
1550 } else {
1551 disp = 0;
1552 }
1553 break;
1554 case 1:
1555 disp = (int8_t)ldub_code(s->pc++);
1556 break;
1557 default:
1558 case 2:
1559 disp = lduw_code(s->pc);
1560 s->pc += 2;
1561 break;
1562 }
1563 switch(rm) {
1564 case 0:
1565 gen_op_movl_A0_reg[R_EBX]();
1566 gen_op_addl_A0_reg_sN[0][R_ESI]();
1567 break;
1568 case 1:
1569 gen_op_movl_A0_reg[R_EBX]();
1570 gen_op_addl_A0_reg_sN[0][R_EDI]();
1571 break;
1572 case 2:
1573 gen_op_movl_A0_reg[R_EBP]();
1574 gen_op_addl_A0_reg_sN[0][R_ESI]();
1575 break;
1576 case 3:
1577 gen_op_movl_A0_reg[R_EBP]();
1578 gen_op_addl_A0_reg_sN[0][R_EDI]();
1579 break;
1580 case 4:
1581 gen_op_movl_A0_reg[R_ESI]();
1582 break;
1583 case 5:
1584 gen_op_movl_A0_reg[R_EDI]();
1585 break;
1586 case 6:
1587 gen_op_movl_A0_reg[R_EBP]();
1588 break;
1589 default:
1590 case 7:
1591 gen_op_movl_A0_reg[R_EBX]();
1592 break;
1593 }
1594 if (disp != 0)
1595 gen_op_addl_A0_im(disp);
1596 gen_op_andl_A0_ffff();
1597 no_rm:
1598 if (must_add_seg) {
1599 if (override < 0) {
1600 if (rm == 2 || rm == 3 || rm == 6)
1601 override = R_SS;
1602 else
1603 override = R_DS;
1604 }
1605 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1606 }
1607 }
1608
1609 opreg = OR_A0;
1610 disp = 0;
1611 *reg_ptr = opreg;
1612 *offset_ptr = disp;
1613 }
1614
1615 /* used for LEA and MOV AX, mem */
1616 static void gen_add_A0_ds_seg(DisasContext *s)
1617 {
1618 int override, must_add_seg;
1619 must_add_seg = s->addseg;
1620 override = R_DS;
1621 if (s->override >= 0) {
1622 override = s->override;
1623 must_add_seg = 1;
1624 } else {
1625 override = R_DS;
1626 }
1627 if (must_add_seg) {
1628 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1629 }
1630 }
1631
1632 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1633 OR_TMP0 */
1634 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1635 {
1636 int mod, rm, opreg, disp;
1637
1638 mod = (modrm >> 6) & 3;
1639 rm = (modrm & 7) | REX_B(s);
1640 if (mod == 3) {
1641 if (is_store) {
1642 if (reg != OR_TMP0)
1643 gen_op_mov_TN_reg[ot][0][reg]();
1644 gen_op_mov_reg_T0[ot][rm]();
1645 } else {
1646 gen_op_mov_TN_reg[ot][0][rm]();
1647 if (reg != OR_TMP0)
1648 gen_op_mov_reg_T0[ot][reg]();
1649 }
1650 } else {
1651 gen_lea_modrm(s, modrm, &opreg, &disp);
1652 if (is_store) {
1653 if (reg != OR_TMP0)
1654 gen_op_mov_TN_reg[ot][0][reg]();
1655 gen_op_st_T0_A0[ot + s->mem_index]();
1656 } else {
1657 gen_op_ld_T0_A0[ot + s->mem_index]();
1658 if (reg != OR_TMP0)
1659 gen_op_mov_reg_T0[ot][reg]();
1660 }
1661 }
1662 }
1663
1664 static inline uint32_t insn_get(DisasContext *s, int ot)
1665 {
1666 uint32_t ret;
1667
1668 switch(ot) {
1669 case OT_BYTE:
1670 ret = ldub_code(s->pc);
1671 s->pc++;
1672 break;
1673 case OT_WORD:
1674 ret = lduw_code(s->pc);
1675 s->pc += 2;
1676 break;
1677 default:
1678 case OT_LONG:
1679 ret = ldl_code(s->pc);
1680 s->pc += 4;
1681 break;
1682 }
1683 return ret;
1684 }
1685
1686 static inline int insn_const_size(unsigned int ot)
1687 {
1688 if (ot <= OT_LONG)
1689 return 1 << ot;
1690 else
1691 return 4;
1692 }
1693
1694 static inline void gen_jcc(DisasContext *s, int b,
1695 target_ulong val, target_ulong next_eip)
1696 {
1697 TranslationBlock *tb;
1698 int inv, jcc_op;
1699 GenOpFunc1 *func;
1700 target_ulong tmp;
1701 int l1, l2;
1702
1703 inv = b & 1;
1704 jcc_op = (b >> 1) & 7;
1705
1706 if (s->jmp_opt) {
1707 switch(s->cc_op) {
1708 /* we optimize the cmp/jcc case */
1709 case CC_OP_SUBB:
1710 case CC_OP_SUBW:
1711 case CC_OP_SUBL:
1712 case CC_OP_SUBQ:
1713 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1714 break;
1715
1716 /* some jumps are easy to compute */
1717 case CC_OP_ADDB:
1718 case CC_OP_ADDW:
1719 case CC_OP_ADDL:
1720 case CC_OP_ADDQ:
1721
1722 case CC_OP_ADCB:
1723 case CC_OP_ADCW:
1724 case CC_OP_ADCL:
1725 case CC_OP_ADCQ:
1726
1727 case CC_OP_SBBB:
1728 case CC_OP_SBBW:
1729 case CC_OP_SBBL:
1730 case CC_OP_SBBQ:
1731
1732 case CC_OP_LOGICB:
1733 case CC_OP_LOGICW:
1734 case CC_OP_LOGICL:
1735 case CC_OP_LOGICQ:
1736
1737 case CC_OP_INCB:
1738 case CC_OP_INCW:
1739 case CC_OP_INCL:
1740 case CC_OP_INCQ:
1741
1742 case CC_OP_DECB:
1743 case CC_OP_DECW:
1744 case CC_OP_DECL:
1745 case CC_OP_DECQ:
1746
1747 case CC_OP_SHLB:
1748 case CC_OP_SHLW:
1749 case CC_OP_SHLL:
1750 case CC_OP_SHLQ:
1751
1752 case CC_OP_SARB:
1753 case CC_OP_SARW:
1754 case CC_OP_SARL:
1755 case CC_OP_SARQ:
1756 switch(jcc_op) {
1757 case JCC_Z:
1758 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1759 break;
1760 case JCC_S:
1761 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1762 break;
1763 default:
1764 func = NULL;
1765 break;
1766 }
1767 break;
1768 default:
1769 func = NULL;
1770 break;
1771 }
1772
1773 if (s->cc_op != CC_OP_DYNAMIC)
1774 gen_op_set_cc_op(s->cc_op);
1775
1776 if (!func) {
1777 gen_setcc_slow[jcc_op]();
1778 func = gen_op_jnz_T0_label;
1779 }
1780
1781 if (inv) {
1782 tmp = val;
1783 val = next_eip;
1784 next_eip = tmp;
1785 }
1786 tb = s->tb;
1787
1788 l1 = gen_new_label();
1789 func(l1);
1790
1791 gen_op_goto_tb0(TBPARAM(tb));
1792 gen_jmp_im(next_eip);
1793 gen_op_movl_T0_im((long)tb + 0);
1794 gen_op_exit_tb();
1795
1796 gen_set_label(l1);
1797 gen_op_goto_tb1(TBPARAM(tb));
1798 gen_jmp_im(val);
1799 gen_op_movl_T0_im((long)tb + 1);
1800 gen_op_exit_tb();
1801
1802 s->is_jmp = 3;
1803 } else {
1804
1805 if (s->cc_op != CC_OP_DYNAMIC) {
1806 gen_op_set_cc_op(s->cc_op);
1807 s->cc_op = CC_OP_DYNAMIC;
1808 }
1809 gen_setcc_slow[jcc_op]();
1810 if (inv) {
1811 tmp = val;
1812 val = next_eip;
1813 next_eip = tmp;
1814 }
1815 l1 = gen_new_label();
1816 l2 = gen_new_label();
1817 gen_op_jnz_T0_label(l1);
1818 gen_jmp_im(next_eip);
1819 gen_op_jmp_label(l2);
1820 gen_set_label(l1);
1821 gen_jmp_im(val);
1822 gen_set_label(l2);
1823 gen_eob(s);
1824 }
1825 }
1826
1827 static void gen_setcc(DisasContext *s, int b)
1828 {
1829 int inv, jcc_op;
1830 GenOpFunc *func;
1831
1832 inv = b & 1;
1833 jcc_op = (b >> 1) & 7;
1834 switch(s->cc_op) {
1835 /* we optimize the cmp/jcc case */
1836 case CC_OP_SUBB:
1837 case CC_OP_SUBW:
1838 case CC_OP_SUBL:
1839 case CC_OP_SUBQ:
1840 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1841 if (!func)
1842 goto slow_jcc;
1843 break;
1844
1845 /* some jumps are easy to compute */
1846 case CC_OP_ADDB:
1847 case CC_OP_ADDW:
1848 case CC_OP_ADDL:
1849 case CC_OP_ADDQ:
1850
1851 case CC_OP_LOGICB:
1852 case CC_OP_LOGICW:
1853 case CC_OP_LOGICL:
1854 case CC_OP_LOGICQ:
1855
1856 case CC_OP_INCB:
1857 case CC_OP_INCW:
1858 case CC_OP_INCL:
1859 case CC_OP_INCQ:
1860
1861 case CC_OP_DECB:
1862 case CC_OP_DECW:
1863 case CC_OP_DECL:
1864 case CC_OP_DECQ:
1865
1866 case CC_OP_SHLB:
1867 case CC_OP_SHLW:
1868 case CC_OP_SHLL:
1869 case CC_OP_SHLQ:
1870 switch(jcc_op) {
1871 case JCC_Z:
1872 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1873 break;
1874 case JCC_S:
1875 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1876 break;
1877 default:
1878 goto slow_jcc;
1879 }
1880 break;
1881 default:
1882 slow_jcc:
1883 if (s->cc_op != CC_OP_DYNAMIC)
1884 gen_op_set_cc_op(s->cc_op);
1885 func = gen_setcc_slow[jcc_op];
1886 break;
1887 }
1888 func();
1889 if (inv) {
1890 gen_op_xor_T0_1();
1891 }
1892 }
1893
1894 /* move T0 to seg_reg and compute if the CPU state may change. Never
1895 call this function with seg_reg == R_CS */
1896 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1897 {
1898 if (s->pe && !s->vm86) {
1899 /* XXX: optimize by finding processor state dynamically */
1900 if (s->cc_op != CC_OP_DYNAMIC)
1901 gen_op_set_cc_op(s->cc_op);
1902 gen_jmp_im(cur_eip);
1903 gen_op_movl_seg_T0(seg_reg);
1904 /* abort translation because the addseg value may change or
1905 because ss32 may change. For R_SS, translation must always
1906 stop as a special handling must be done to disable hardware
1907 interrupts for the next instruction */
1908 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1909 s->is_jmp = 3;
1910 } else {
1911 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1912 if (seg_reg == R_SS)
1913 s->is_jmp = 3;
1914 }
1915 }
1916
1917 static inline void gen_stack_update(DisasContext *s, int addend)
1918 {
1919 #ifdef TARGET_X86_64
1920 if (CODE64(s)) {
1921 if (addend == 8)
1922 gen_op_addq_ESP_8();
1923 else
1924 gen_op_addq_ESP_im(addend);
1925 } else
1926 #endif
1927 if (s->ss32) {
1928 if (addend == 2)
1929 gen_op_addl_ESP_2();
1930 else if (addend == 4)
1931 gen_op_addl_ESP_4();
1932 else
1933 gen_op_addl_ESP_im(addend);
1934 } else {
1935 if (addend == 2)
1936 gen_op_addw_ESP_2();
1937 else if (addend == 4)
1938 gen_op_addw_ESP_4();
1939 else
1940 gen_op_addw_ESP_im(addend);
1941 }
1942 }
1943
1944 /* generate a push. It depends on ss32, addseg and dflag */
1945 static void gen_push_T0(DisasContext *s)
1946 {
1947 #ifdef TARGET_X86_64
1948 if (CODE64(s)) {
1949 /* XXX: check 16 bit behaviour */
1950 gen_op_movq_A0_reg[R_ESP]();
1951 gen_op_subq_A0_8();
1952 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1953 gen_op_movq_ESP_A0();
1954 } else
1955 #endif
1956 {
1957 gen_op_movl_A0_reg[R_ESP]();
1958 if (!s->dflag)
1959 gen_op_subl_A0_2();
1960 else
1961 gen_op_subl_A0_4();
1962 if (s->ss32) {
1963 if (s->addseg) {
1964 gen_op_movl_T1_A0();
1965 gen_op_addl_A0_SS();
1966 }
1967 } else {
1968 gen_op_andl_A0_ffff();
1969 gen_op_movl_T1_A0();
1970 gen_op_addl_A0_SS();
1971 }
1972 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1973 if (s->ss32 && !s->addseg)
1974 gen_op_movl_ESP_A0();
1975 else
1976 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1977 }
1978 }
1979
1980 /* generate a push. It depends on ss32, addseg and dflag */
1981 /* slower version for T1, only used for call Ev */
1982 static void gen_push_T1(DisasContext *s)
1983 {
1984 #ifdef TARGET_X86_64
1985 if (CODE64(s)) {
1986 /* XXX: check 16 bit behaviour */
1987 gen_op_movq_A0_reg[R_ESP]();
1988 gen_op_subq_A0_8();
1989 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1990 gen_op_movq_ESP_A0();
1991 } else
1992 #endif
1993 {
1994 gen_op_movl_A0_reg[R_ESP]();
1995 if (!s->dflag)
1996 gen_op_subl_A0_2();
1997 else
1998 gen_op_subl_A0_4();
1999 if (s->ss32) {
2000 if (s->addseg) {
2001 gen_op_addl_A0_SS();
2002 }
2003 } else {
2004 gen_op_andl_A0_ffff();
2005 gen_op_addl_A0_SS();
2006 }
2007 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2008
2009 if (s->ss32 && !s->addseg)
2010 gen_op_movl_ESP_A0();
2011 else
2012 gen_stack_update(s, (-2) << s->dflag);
2013 }
2014 }
2015
2016 /* two step pop is necessary for precise exceptions */
2017 static void gen_pop_T0(DisasContext *s)
2018 {
2019 #ifdef TARGET_X86_64
2020 if (CODE64(s)) {
2021 /* XXX: check 16 bit behaviour */
2022 gen_op_movq_A0_reg[R_ESP]();
2023 gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2024 } else
2025 #endif
2026 {
2027 gen_op_movl_A0_reg[R_ESP]();
2028 if (s->ss32) {
2029 if (s->addseg)
2030 gen_op_addl_A0_SS();
2031 } else {
2032 gen_op_andl_A0_ffff();
2033 gen_op_addl_A0_SS();
2034 }
2035 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2036 }
2037 }
2038
2039 static void gen_pop_update(DisasContext *s)
2040 {
2041 #ifdef TARGET_X86_64
2042 if (CODE64(s)) {
2043 gen_stack_update(s, 8);
2044 } else
2045 #endif
2046 {
2047 gen_stack_update(s, 2 << s->dflag);
2048 }
2049 }
2050
2051 static void gen_stack_A0(DisasContext *s)
2052 {
2053 gen_op_movl_A0_ESP();
2054 if (!s->ss32)
2055 gen_op_andl_A0_ffff();
2056 gen_op_movl_T1_A0();
2057 if (s->addseg)
2058 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2059 }
2060
2061 /* NOTE: wrap around in 16 bit not fully handled */
2062 static void gen_pusha(DisasContext *s)
2063 {
2064 int i;
2065 gen_op_movl_A0_ESP();
2066 gen_op_addl_A0_im(-16 << s->dflag);
2067 if (!s->ss32)
2068 gen_op_andl_A0_ffff();
2069 gen_op_movl_T1_A0();
2070 if (s->addseg)
2071 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2072 for(i = 0;i < 8; i++) {
2073 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2074 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2075 gen_op_addl_A0_im(2 << s->dflag);
2076 }
2077 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2078 }
2079
2080 /* NOTE: wrap around in 16 bit not fully handled */
2081 static void gen_popa(DisasContext *s)
2082 {
2083 int i;
2084 gen_op_movl_A0_ESP();
2085 if (!s->ss32)
2086 gen_op_andl_A0_ffff();
2087 gen_op_movl_T1_A0();
2088 gen_op_addl_T1_im(16 << s->dflag);
2089 if (s->addseg)
2090 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2091 for(i = 0;i < 8; i++) {
2092 /* ESP is not reloaded */
2093 if (i != 3) {
2094 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2095 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2096 }
2097 gen_op_addl_A0_im(2 << s->dflag);
2098 }
2099 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2100 }
2101
2102 static void gen_enter(DisasContext *s, int esp_addend, int level)
2103 {
2104 int ot, opsize;
2105
2106 ot = s->dflag + OT_WORD;
2107 level &= 0x1f;
2108 opsize = 2 << s->dflag;
2109
2110 gen_op_movl_A0_ESP();
2111 gen_op_addl_A0_im(-opsize);
2112 if (!s->ss32)
2113 gen_op_andl_A0_ffff();
2114 gen_op_movl_T1_A0();
2115 if (s->addseg)
2116 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2117 /* push bp */
2118 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2119 gen_op_st_T0_A0[ot + s->mem_index]();
2120 if (level) {
2121 gen_op_enter_level(level, s->dflag);
2122 }
2123 gen_op_mov_reg_T1[ot][R_EBP]();
2124 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2125 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2126 }
2127
2128 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2129 {
2130 if (s->cc_op != CC_OP_DYNAMIC)
2131 gen_op_set_cc_op(s->cc_op);
2132 gen_jmp_im(cur_eip);
2133 gen_op_raise_exception(trapno);
2134 s->is_jmp = 3;
2135 }
2136
2137 /* an interrupt is different from an exception because of the
2138 priviledge checks */
2139 static void gen_interrupt(DisasContext *s, int intno,
2140 target_ulong cur_eip, target_ulong next_eip)
2141 {
2142 if (s->cc_op != CC_OP_DYNAMIC)
2143 gen_op_set_cc_op(s->cc_op);
2144 gen_jmp_im(cur_eip);
2145 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2146 s->is_jmp = 3;
2147 }
2148
2149 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2150 {
2151 if (s->cc_op != CC_OP_DYNAMIC)
2152 gen_op_set_cc_op(s->cc_op);
2153 gen_jmp_im(cur_eip);
2154 gen_op_debug();
2155 s->is_jmp = 3;
2156 }
2157
2158 /* generate a generic end of block. Trace exception is also generated
2159 if needed */
2160 static void gen_eob(DisasContext *s)
2161 {
2162 if (s->cc_op != CC_OP_DYNAMIC)
2163 gen_op_set_cc_op(s->cc_op);
2164 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2165 gen_op_reset_inhibit_irq();
2166 }
2167 if (s->singlestep_enabled) {
2168 gen_op_debug();
2169 } else if (s->tf) {
2170 gen_op_raise_exception(EXCP01_SSTP);
2171 } else {
2172 gen_op_movl_T0_0();
2173 gen_op_exit_tb();
2174 }
2175 s->is_jmp = 3;
2176 }
2177
2178 /* generate a jump to eip. No segment change must happen before as a
2179 direct call to the next block may occur */
2180 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2181 {
2182 TranslationBlock *tb = s->tb;
2183
2184 if (s->jmp_opt) {
2185 if (s->cc_op != CC_OP_DYNAMIC)
2186 gen_op_set_cc_op(s->cc_op);
2187 if (tb_num)
2188 gen_op_goto_tb1(TBPARAM(tb));
2189 else
2190 gen_op_goto_tb0(TBPARAM(tb));
2191 gen_jmp_im(eip);
2192 gen_op_movl_T0_im((long)tb + tb_num);
2193 gen_op_exit_tb();
2194 s->is_jmp = 3;
2195 } else {
2196 gen_jmp_im(eip);
2197 gen_eob(s);
2198 }
2199 }
2200
2201 static void gen_jmp(DisasContext *s, target_ulong eip)
2202 {
2203 gen_jmp_tb(s, eip, 0);
2204 }
2205
2206 static void gen_movtl_T0_im(target_ulong val)
2207 {
2208 #ifdef TARGET_X86_64
2209 if ((int32_t)val == val) {
2210 gen_op_movl_T0_im(val);
2211 } else {
2212 gen_op_movq_T0_im64(val >> 32, val);
2213 }
2214 #else
2215 gen_op_movl_T0_im(val);
2216 #endif
2217 }
2218
2219 static void gen_movtl_T1_im(target_ulong val)
2220 {
2221 #ifdef TARGET_X86_64
2222 if ((int32_t)val == val) {
2223 gen_op_movl_T1_im(val);
2224 } else {
2225 gen_op_movq_T1_im64(val >> 32, val);
2226 }
2227 #else
2228 gen_op_movl_T1_im(val);
2229 #endif
2230 }
2231
2232 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2233 gen_op_ldq_raw_env_A0,
2234 #ifndef CONFIG_USER_ONLY
2235 gen_op_ldq_kernel_env_A0,
2236 gen_op_ldq_user_env_A0,
2237 #endif
2238 };
2239
2240 static GenOpFunc1 *gen_stq_env_A0[3] = {
2241 gen_op_stq_raw_env_A0,
2242 #ifndef CONFIG_USER_ONLY
2243 gen_op_stq_kernel_env_A0,
2244 gen_op_stq_user_env_A0,
2245 #endif
2246 };
2247
2248 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2249 gen_op_ldo_raw_env_A0,
2250 #ifndef CONFIG_USER_ONLY
2251 gen_op_ldo_kernel_env_A0,
2252 gen_op_ldo_user_env_A0,
2253 #endif
2254 };
2255
2256 static GenOpFunc1 *gen_sto_env_A0[3] = {
2257 gen_op_sto_raw_env_A0,
2258 #ifndef CONFIG_USER_ONLY
2259 gen_op_sto_kernel_env_A0,
2260 gen_op_sto_user_env_A0,
2261 #endif
2262 };
2263
2264 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2265
2266 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2267 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2268 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2269
2270 static GenOpFunc2 *sse_op_table1[256][4] = {
2271 /* pure SSE operations */
2272 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2273 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2274 [0x12] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2275 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2276 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2277 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2278 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2279 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2280
2281 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2282 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2283 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2284 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2285 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2286 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2287 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2288 [0x2f] = { gen_op_comiss, gen_op_comisd },
2289 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2290 [0x51] = SSE_FOP(sqrt),
2291 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2292 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2293 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2294 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2295 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2296 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2297 [0x58] = SSE_FOP(add),
2298 [0x59] = SSE_FOP(mul),
2299 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2300 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2301 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2302 [0x5c] = SSE_FOP(sub),
2303 [0x5d] = SSE_FOP(min),
2304 [0x5e] = SSE_FOP(div),
2305 [0x5f] = SSE_FOP(max),
2306
2307 [0xc2] = SSE_FOP(cmpeq),
2308 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2309
2310 /* MMX ops and their SSE extensions */
2311 [0x60] = MMX_OP2(punpcklbw),
2312 [0x61] = MMX_OP2(punpcklwd),
2313 [0x62] = MMX_OP2(punpckldq),
2314 [0x63] = MMX_OP2(packsswb),
2315 [0x64] = MMX_OP2(pcmpgtb),
2316 [0x65] = MMX_OP2(pcmpgtw),
2317 [0x66] = MMX_OP2(pcmpgtl),
2318 [0x67] = MMX_OP2(packuswb),
2319 [0x68] = MMX_OP2(punpckhbw),
2320 [0x69] = MMX_OP2(punpckhwd),
2321 [0x6a] = MMX_OP2(punpckhdq),
2322 [0x6b] = MMX_OP2(packssdw),
2323 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2324 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2325 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2326 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2327 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2328 (GenOpFunc2 *)gen_op_pshufd_xmm,
2329 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2330 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2331 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2332 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2333 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2334 [0x74] = MMX_OP2(pcmpeqb),
2335 [0x75] = MMX_OP2(pcmpeqw),
2336 [0x76] = MMX_OP2(pcmpeql),
2337 [0x77] = { SSE_SPECIAL }, /* emms */
2338 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2339 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2340 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2341 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2342 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2343 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2344 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2345 [0xd1] = MMX_OP2(psrlw),
2346 [0xd2] = MMX_OP2(psrld),
2347 [0xd3] = MMX_OP2(psrlq),
2348 [0xd4] = MMX_OP2(paddq),
2349 [0xd5] = MMX_OP2(pmullw),
2350 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2351 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2352 [0xd8] = MMX_OP2(psubusb),
2353 [0xd9] = MMX_OP2(psubusw),
2354 [0xda] = MMX_OP2(pminub),
2355 [0xdb] = MMX_OP2(pand),
2356 [0xdc] = MMX_OP2(paddusb),
2357 [0xdd] = MMX_OP2(paddusw),
2358 [0xde] = MMX_OP2(pmaxub),
2359 [0xdf] = MMX_OP2(pandn),
2360 [0xe0] = MMX_OP2(pavgb),
2361 [0xe1] = MMX_OP2(psraw),
2362 [0xe2] = MMX_OP2(psrad),
2363 [0xe3] = MMX_OP2(pavgw),
2364 [0xe4] = MMX_OP2(pmulhuw),
2365 [0xe5] = MMX_OP2(pmulhw),
2366 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2367 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2368 [0xe8] = MMX_OP2(psubsb),
2369 [0xe9] = MMX_OP2(psubsw),
2370 [0xea] = MMX_OP2(pminsw),
2371 [0xeb] = MMX_OP2(por),
2372 [0xec] = MMX_OP2(paddsb),
2373 [0xed] = MMX_OP2(paddsw),
2374 [0xee] = MMX_OP2(pmaxsw),
2375 [0xef] = MMX_OP2(pxor),
2376 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu (PNI) */
2377 [0xf1] = MMX_OP2(psllw),
2378 [0xf2] = MMX_OP2(pslld),
2379 [0xf3] = MMX_OP2(psllq),
2380 [0xf4] = MMX_OP2(pmuludq),
2381 [0xf5] = MMX_OP2(pmaddwd),
2382 [0xf6] = MMX_OP2(psadbw),
2383 [0xf7] = MMX_OP2(maskmov),
2384 [0xf8] = MMX_OP2(psubb),
2385 [0xf9] = MMX_OP2(psubw),
2386 [0xfa] = MMX_OP2(psubl),
2387 [0xfb] = MMX_OP2(psubq),
2388 [0xfc] = MMX_OP2(paddb),
2389 [0xfd] = MMX_OP2(paddw),
2390 [0xfe] = MMX_OP2(paddl),
2391 };
2392
2393 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2394 [0 + 2] = MMX_OP2(psrlw),
2395 [0 + 4] = MMX_OP2(psraw),
2396 [0 + 6] = MMX_OP2(psllw),
2397 [8 + 2] = MMX_OP2(psrld),
2398 [8 + 4] = MMX_OP2(psrad),
2399 [8 + 6] = MMX_OP2(pslld),
2400 [16 + 2] = MMX_OP2(psrlq),
2401 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2402 [16 + 6] = MMX_OP2(psllq),
2403 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2404 };
2405
2406 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2407 gen_op_cvtsi2ss,
2408 gen_op_cvtsi2sd,
2409 X86_64_ONLY(gen_op_cvtsq2ss),
2410 X86_64_ONLY(gen_op_cvtsq2sd),
2411
2412 gen_op_cvttss2si,
2413 gen_op_cvttsd2si,
2414 X86_64_ONLY(gen_op_cvttss2sq),
2415 X86_64_ONLY(gen_op_cvttsd2sq),
2416
2417 gen_op_cvtss2si,
2418 gen_op_cvtsd2si,
2419 X86_64_ONLY(gen_op_cvtss2sq),
2420 X86_64_ONLY(gen_op_cvtsd2sq),
2421 };
2422
2423 static GenOpFunc2 *sse_op_table4[8][4] = {
2424 SSE_FOP(cmpeq),
2425 SSE_FOP(cmplt),
2426 SSE_FOP(cmple),
2427 SSE_FOP(cmpunord),
2428 SSE_FOP(cmpneq),
2429 SSE_FOP(cmpnlt),
2430 SSE_FOP(cmpnle),
2431 SSE_FOP(cmpord),
2432 };
2433
2434 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2435 {
2436 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2437 int modrm, mod, rm, reg, reg_addr, offset_addr;
2438 GenOpFunc2 *sse_op2;
2439 GenOpFunc3 *sse_op3;
2440
2441 b &= 0xff;
2442 if (s->prefix & PREFIX_DATA)
2443 b1 = 1;
2444 else if (s->prefix & PREFIX_REPZ)
2445 b1 = 2;
2446 else if (s->prefix & PREFIX_REPNZ)
2447 b1 = 3;
2448 else
2449 b1 = 0;
2450 sse_op2 = sse_op_table1[b][b1];
2451 if (!sse_op2)
2452 goto illegal_op;
2453 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2454 is_xmm = 1;
2455 } else {
2456 if (b1 == 0) {
2457 /* MMX case */
2458 is_xmm = 0;
2459 } else {
2460 is_xmm = 1;
2461 }
2462 }
2463 /* simple MMX/SSE operation */
2464 if (s->flags & HF_TS_MASK) {
2465 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2466 return;
2467 }
2468 if (s->flags & HF_EM_MASK) {
2469 illegal_op:
2470 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2471 return;
2472 }
2473 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2474 goto illegal_op;
2475 if (b == 0x77) {
2476 /* emms */
2477 gen_op_emms();
2478 return;
2479 }
2480 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2481 the static cpu state) */
2482 if (!is_xmm) {
2483 gen_op_enter_mmx();
2484 }
2485
2486 modrm = ldub_code(s->pc++);
2487 reg = ((modrm >> 3) & 7);
2488 if (is_xmm)
2489 reg |= rex_r;
2490 mod = (modrm >> 6) & 3;
2491 if (sse_op2 == SSE_SPECIAL) {
2492 b |= (b1 << 8);
2493 switch(b) {
2494 case 0x0e7: /* movntq */
2495 if (mod == 3)
2496 goto illegal_op;
2497 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2498 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2499 break;
2500 case 0x1e7: /* movntdq */
2501 case 0x02b: /* movntps */
2502 case 0x12b: /* movntps */
2503 case 0x2f0: /* lddqu */
2504 if (mod == 3)
2505 goto illegal_op;
2506 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2507 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2508 break;
2509 case 0x6e: /* movd mm, ea */
2510 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2511 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2512 break;
2513 case 0x16e: /* movd xmm, ea */
2514 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2515 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2516 break;
2517 case 0x6f: /* movq mm, ea */
2518 if (mod != 3) {
2519 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2520 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2521 } else {
2522 rm = (modrm & 7);
2523 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2524 offsetof(CPUX86State,fpregs[rm].mmx));
2525 }
2526 break;
2527 case 0x010: /* movups */
2528 case 0x110: /* movupd */
2529 case 0x028: /* movaps */
2530 case 0x128: /* movapd */
2531 case 0x16f: /* movdqa xmm, ea */
2532 case 0x26f: /* movdqu xmm, ea */
2533 if (mod != 3) {
2534 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2535 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2536 } else {
2537 rm = (modrm & 7) | REX_B(s);
2538 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2539 offsetof(CPUX86State,xmm_regs[rm]));
2540 }
2541 break;
2542 case 0x210: /* movss xmm, ea */
2543 if (mod != 3) {
2544 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2545 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2546 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2547 gen_op_movl_T0_0();
2548 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2549 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2550 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2551 } else {
2552 rm = (modrm & 7) | REX_B(s);
2553 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2554 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2555 }
2556 break;
2557 case 0x310: /* movsd xmm, ea */
2558 if (mod != 3) {
2559 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2560 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2561 gen_op_movl_T0_0();
2562 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2563 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2564 } else {
2565 rm = (modrm & 7) | REX_B(s);
2566 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2567 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2568 }
2569 break;
2570 case 0x012: /* movlps */
2571 case 0x112: /* movlpd */
2572 if (mod != 3) {
2573 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2574 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2575 } else {
2576 /* movhlps */
2577 rm = (modrm & 7) | REX_B(s);
2578 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2579 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2580 }
2581 break;
2582 case 0x016: /* movhps */
2583 case 0x116: /* movhpd */
2584 if (mod != 3) {
2585 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2586 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2587 } else {
2588 /* movlhps */
2589 rm = (modrm & 7) | REX_B(s);
2590 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2591 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2592 }
2593 break;
2594 case 0x216: /* movshdup */
2595 if (mod != 3) {
2596 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2597 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2598 } else {
2599 rm = (modrm & 7) | REX_B(s);
2600 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2601 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2602 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2603 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2604 }
2605 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2606 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2607 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2608 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2609 break;
2610 case 0x7e: /* movd ea, mm */
2611 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2612 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2613 break;
2614 case 0x17e: /* movd ea, xmm */
2615 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2616 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2617 break;
2618 case 0x27e: /* movq xmm, ea */
2619 if (mod != 3) {
2620 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2621 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2622 } else {
2623 rm = (modrm & 7) | REX_B(s);
2624 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2625 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2626 }
2627 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2628 break;
2629 case 0x7f: /* movq ea, mm */
2630 if (mod != 3) {
2631 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2632 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2633 } else {
2634 rm = (modrm & 7);
2635 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2636 offsetof(CPUX86State,fpregs[reg].mmx));
2637 }
2638 break;
2639 case 0x011: /* movups */
2640 case 0x111: /* movupd */
2641 case 0x029: /* movaps */
2642 case 0x129: /* movapd */
2643 case 0x17f: /* movdqa ea, xmm */
2644 case 0x27f: /* movdqu ea, xmm */
2645 if (mod != 3) {
2646 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2647 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2648 } else {
2649 rm = (modrm & 7) | REX_B(s);
2650 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2651 offsetof(CPUX86State,xmm_regs[reg]));
2652 }
2653 break;
2654 case 0x211: /* movss ea, xmm */
2655 if (mod != 3) {
2656 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2657 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2658 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2659 } else {
2660 rm = (modrm & 7) | REX_B(s);
2661 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2662 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2663 }
2664 break;
2665 case 0x311: /* movsd ea, xmm */
2666 if (mod != 3) {
2667 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2668 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2669 } else {
2670 rm = (modrm & 7) | REX_B(s);
2671 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2672 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2673 }
2674 break;
2675 case 0x013: /* movlps */
2676 case 0x113: /* movlpd */
2677 if (mod != 3) {
2678 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2679 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2680 } else {
2681 goto illegal_op;
2682 }
2683 break;
2684 case 0x017: /* movhps */
2685 case 0x117: /* movhpd */
2686 if (mod != 3) {
2687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2688 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2689 } else {
2690 goto illegal_op;
2691 }
2692 break;
2693 case 0x71: /* shift mm, im */
2694 case 0x72:
2695 case 0x73:
2696 case 0x171: /* shift xmm, im */
2697 case 0x172:
2698 case 0x173:
2699 val = ldub_code(s->pc++);
2700 if (is_xmm) {
2701 gen_op_movl_T0_im(val);
2702 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2703 gen_op_movl_T0_0();
2704 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2705 op1_offset = offsetof(CPUX86State,xmm_t0);
2706 } else {
2707 gen_op_movl_T0_im(val);
2708 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2709 gen_op_movl_T0_0();
2710 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2711 op1_offset = offsetof(CPUX86State,mmx_t0);
2712 }
2713 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2714 if (!sse_op2)
2715 goto illegal_op;
2716 if (is_xmm) {
2717 rm = (modrm & 7) | REX_B(s);
2718 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2719 } else {
2720 rm = (modrm & 7);
2721 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2722 }
2723 sse_op2(op2_offset, op1_offset);
2724 break;
2725 case 0x050: /* movmskps */
2726 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[reg]));
2727 rm = (modrm & 7) | REX_B(s);
2728 gen_op_mov_reg_T0[OT_LONG][rm]();
2729 break;
2730 case 0x150: /* movmskpd */
2731 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[reg]));
2732 rm = (modrm & 7) | REX_B(s);
2733 gen_op_mov_reg_T0[OT_LONG][rm]();
2734 break;
2735 case 0x02a: /* cvtpi2ps */
2736 case 0x12a: /* cvtpi2pd */
2737 gen_op_enter_mmx();
2738 if (mod != 3) {
2739 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2740 op2_offset = offsetof(CPUX86State,mmx_t0);
2741 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2742 } else {
2743 rm = (modrm & 7);
2744 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2745 }
2746 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2747 switch(b >> 8) {
2748 case 0x0:
2749 gen_op_cvtpi2ps(op1_offset, op2_offset);
2750 break;
2751 default:
2752 case 0x1:
2753 gen_op_cvtpi2pd(op1_offset, op2_offset);
2754 break;
2755 }
2756 break;
2757 case 0x22a: /* cvtsi2ss */
2758 case 0x32a: /* cvtsi2sd */
2759 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2760 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2761 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2762 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2763 break;
2764 case 0x02c: /* cvttps2pi */
2765 case 0x12c: /* cvttpd2pi */
2766 case 0x02d: /* cvtps2pi */
2767 case 0x12d: /* cvtpd2pi */
2768 gen_op_enter_mmx();
2769 if (mod != 3) {
2770 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2771 op2_offset = offsetof(CPUX86State,xmm_t0);
2772 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2773 } else {
2774 rm = (modrm & 7) | REX_B(s);
2775 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2776 }
2777 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2778 switch(b) {
2779 case 0x02c:
2780 gen_op_cvttps2pi(op1_offset, op2_offset);
2781 break;
2782 case 0x12c:
2783 gen_op_cvttpd2pi(op1_offset, op2_offset);
2784 break;
2785 case 0x02d:
2786 gen_op_cvtps2pi(op1_offset, op2_offset);
2787 break;
2788 case 0x12d:
2789 gen_op_cvtpd2pi(op1_offset, op2_offset);
2790 break;
2791 }
2792 break;
2793 case 0x22c: /* cvttss2si */
2794 case 0x32c: /* cvttsd2si */
2795 case 0x22d: /* cvtss2si */
2796 case 0x32d: /* cvtsd2si */
2797 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2798 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2799 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2800 (b & 1) * 4](op1_offset);
2801 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2802 break;
2803 case 0xc4: /* pinsrw */
2804 case 0x1c4:
2805 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2806 val = ldub_code(s->pc++);
2807 if (b1) {
2808 val &= 7;
2809 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2810 } else {
2811 val &= 3;
2812 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2813 }
2814 break;
2815 case 0xc5: /* pextrw */
2816 case 0x1c5:
2817 if (mod != 3)
2818 goto illegal_op;
2819 val = ldub_code(s->pc++);
2820 if (b1) {
2821 val &= 7;
2822 rm = (modrm & 7) | REX_B(s);
2823 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2824 } else {
2825 val &= 3;
2826 rm = (modrm & 7);
2827 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2828 }
2829 reg = ((modrm >> 3) & 7) | rex_r;
2830 gen_op_mov_reg_T0[OT_LONG][reg]();
2831 break;
2832 case 0x1d6: /* movq ea, xmm */
2833 if (mod != 3) {
2834 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2835 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2836 } else {
2837 rm = (modrm & 7) | REX_B(s);
2838 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2839 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2840 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2841 }
2842 break;
2843 case 0x2d6: /* movq2dq */
2844 gen_op_enter_mmx();
2845 rm = (modrm & 7) | REX_B(s);
2846 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2847 offsetof(CPUX86State,fpregs[reg & 7].mmx));
2848 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2849 break;
2850 case 0x3d6: /* movdq2q */
2851 gen_op_enter_mmx();
2852 rm = (modrm & 7);
2853 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2854 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2855 break;
2856 case 0xd7: /* pmovmskb */
2857 case 0x1d7:
2858 if (mod != 3)
2859 goto illegal_op;
2860 if (b1) {
2861 rm = (modrm & 7) | REX_B(s);
2862 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2863 } else {
2864 rm = (modrm & 7);
2865 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2866 }
2867 reg = ((modrm >> 3) & 7) | rex_r;
2868 gen_op_mov_reg_T0[OT_LONG][reg]();
2869 break;
2870 default:
2871 goto illegal_op;
2872 }
2873 } else {
2874 /* generic MMX or SSE operation */
2875 if (b == 0xf7) {
2876 /* maskmov : we must prepare A0 */
2877 if (mod != 3)
2878 goto illegal_op;
2879 #ifdef TARGET_X86_64
2880 if (CODE64(s)) {
2881 gen_op_movq_A0_reg[R_EDI]();
2882 } else
2883 #endif
2884 {
2885 gen_op_movl_A0_reg[R_EDI]();
2886 if (s->aflag == 0)
2887 gen_op_andl_A0_ffff();
2888 }
2889 gen_add_A0_ds_seg(s);
2890 }
2891 if (is_xmm) {
2892 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2893 if (mod != 3) {
2894 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2895 op2_offset = offsetof(CPUX86State,xmm_t0);
2896 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
2897 b == 0xc2)) {
2898 /* specific case for SSE single instructions */
2899 if (b1 == 2) {
2900 /* 32 bit access */
2901 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2902 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2903 } else {
2904 /* 64 bit access */
2905 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
2906 }
2907 } else {
2908 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2909 }
2910 } else {
2911 rm = (modrm & 7) | REX_B(s);
2912 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2913 }
2914 } else {
2915 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
2916 if (mod != 3) {
2917 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2918 op2_offset = offsetof(CPUX86State,mmx_t0);
2919 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2920 } else {
2921 rm = (modrm & 7);
2922 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2923 }
2924 }
2925 switch(b) {
2926 case 0x70: /* pshufx insn */
2927 case 0xc6: /* pshufx insn */
2928 val = ldub_code(s->pc++);
2929 sse_op3 = (GenOpFunc3 *)sse_op2;
2930 sse_op3(op1_offset, op2_offset, val);
2931 break;
2932 case 0xc2:
2933 /* compare insns */
2934 val = ldub_code(s->pc++);
2935 if (val >= 8)
2936 goto illegal_op;
2937 sse_op2 = sse_op_table4[val][b1];
2938 sse_op2(op1_offset, op2_offset);
2939 break;
2940 default:
2941 sse_op2(op1_offset, op2_offset);
2942 break;
2943 }
2944 if (b == 0x2e || b == 0x2f) {
2945 s->cc_op = CC_OP_EFLAGS;
2946 }
2947 }
2948 }
2949
2950
2951 /* convert one instruction. s->is_jmp is set if the translation must
2952 be stopped. Return the next pc value */
2953 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2954 {
2955 int b, prefixes, aflag, dflag;
2956 int shift, ot;
2957 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2958 target_ulong next_eip, tval;
2959 int rex_w, rex_r;
2960
2961 s->pc = pc_start;
2962 prefixes = 0;
2963 aflag = s->code32;
2964 dflag = s->code32;
2965 s->override = -1;
2966 rex_w = -1;
2967 rex_r = 0;
2968 #ifdef TARGET_X86_64
2969 s->rex_x = 0;
2970 s->rex_b = 0;
2971 x86_64_hregs = 0;
2972 #endif
2973 s->rip_offset = 0; /* for relative ip address */
2974 next_byte:
2975 b = ldub_code(s->pc);
2976 s->pc++;
2977 /* check prefixes */
2978 #ifdef TARGET_X86_64
2979 if (CODE64(s)) {
2980 switch (b) {
2981 case 0xf3:
2982 prefixes |= PREFIX_REPZ;
2983 goto next_byte;
2984 case 0xf2:
2985 prefixes |= PREFIX_REPNZ;
2986 goto next_byte;
2987 case 0xf0:
2988 prefixes |= PREFIX_LOCK;
2989 goto next_byte;
2990 case 0x2e:
2991 s->override = R_CS;
2992 goto next_byte;
2993 case 0x36:
2994 s->override = R_SS;
2995 goto next_byte;
2996 case 0x3e:
2997 s->override = R_DS;
2998 goto next_byte;
2999 case 0x26:
3000 s->override = R_ES;
3001 goto next_byte;
3002 case 0x64:
3003 s->override = R_FS;
3004 goto next_byte;
3005 case 0x65:
3006 s->override = R_GS;
3007 goto next_byte;
3008 case 0x66:
3009 prefixes |= PREFIX_DATA;
3010 goto next_byte;
3011 case 0x67:
3012 prefixes |= PREFIX_ADR;
3013 goto next_byte;
3014 case 0x40 ... 0x4f:
3015 /* REX prefix */
3016 rex_w = (b >> 3) & 1;
3017 rex_r = (b & 0x4) << 1;
3018 s->rex_x = (b & 0x2) << 2;
3019 REX_B(s) = (b & 0x1) << 3;
3020 x86_64_hregs = 1; /* select uniform byte register addressing */
3021 goto next_byte;
3022 }
3023 if (rex_w == 1) {
3024 /* 0x66 is ignored if rex.w is set */
3025 dflag = 2;
3026 } else {
3027 if (prefixes & PREFIX_DATA)
3028 dflag ^= 1;
3029 }
3030 if (!(prefixes & PREFIX_ADR))
3031 aflag = 2;
3032 } else
3033 #endif
3034 {
3035 switch (b) {
3036 case 0xf3:
3037 prefixes |= PREFIX_REPZ;
3038 goto next_byte;
3039 case 0xf2:
3040 prefixes |= PREFIX_REPNZ;
3041 goto next_byte;
3042 case 0xf0:
3043 prefixes |= PREFIX_LOCK;
3044 goto next_byte;
3045 case 0x2e:
3046 s->override = R_CS;
3047 goto next_byte;
3048 case 0x36:
3049 s->override = R_SS;
3050 goto next_byte;
3051 case 0x3e:
3052 s->override = R_DS;
3053 goto next_byte;
3054 case 0x26:
3055 s->override = R_ES;
3056 goto next_byte;
3057 case 0x64:
3058 s->override = R_FS;
3059 goto next_byte;
3060 case 0x65:
3061 s->override = R_GS;
3062 goto next_byte;
3063 case 0x66:
3064 prefixes |= PREFIX_DATA;
3065 goto next_byte;
3066 case 0x67:
3067 prefixes |= PREFIX_ADR;
3068 goto next_byte;
3069 }
3070 if (prefixes & PREFIX_DATA)
3071 dflag ^= 1;
3072 if (prefixes & PREFIX_ADR)
3073 aflag ^= 1;
3074 }
3075
3076 s->prefix = prefixes;
3077 s->aflag = aflag;
3078 s->dflag = dflag;
3079
3080 /* lock generation */
3081 if (prefixes & PREFIX_LOCK)
3082 gen_op_lock();
3083
3084 /* now check op code */
3085 reswitch:
3086 switch(b) {
3087 case 0x0f:
3088 /**************************/
3089 /* extended op code */
3090 b = ldub_code(s->pc++) | 0x100;
3091 goto reswitch;
3092
3093 /**************************/
3094 /* arith & logic */
3095 case 0x00 ... 0x05:
3096 case 0x08 ... 0x0d:
3097 case 0x10 ... 0x15:
3098 case 0x18 ... 0x1d:
3099 case 0x20 ... 0x25:
3100 case 0x28 ... 0x2d:
3101 case 0x30 ... 0x35:
3102 case 0x38 ... 0x3d:
3103 {
3104 int op, f, val;
3105 op = (b >> 3) & 7;
3106 f = (b >> 1) & 3;
3107
3108 if ((b & 1) == 0)
3109 ot = OT_BYTE;
3110 else
3111 ot = dflag + OT_WORD;
3112
3113 switch(f) {
3114 case 0: /* OP Ev, Gv */
3115 modrm = ldub_code(s->pc++);
3116 reg = ((modrm >> 3) & 7) | rex_r;
3117 mod = (modrm >> 6) & 3;
3118 rm = (modrm & 7) | REX_B(s);
3119 if (mod != 3) {
3120 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3121 opreg = OR_TMP0;
3122 } else if (op == OP_XORL && rm == reg) {
3123 xor_zero:
3124 /* xor reg, reg optimisation */
3125 gen_op_movl_T0_0();
3126 s->cc_op = CC_OP_LOGICB + ot;
3127 gen_op_mov_reg_T0[ot][reg]();
3128 gen_op_update1_cc();
3129 break;
3130 } else {
3131 opreg = rm;
3132 }
3133 gen_op_mov_TN_reg[ot][1][reg]();
3134 gen_op(s, op, ot, opreg);
3135 break;
3136 case 1: /* OP Gv, Ev */
3137 modrm = ldub_code(s->pc++);
3138 mod = (modrm >> 6) & 3;
3139 reg = ((modrm >> 3) & 7) | rex_r;
3140 rm = (modrm & 7) | REX_B(s);
3141 if (mod != 3) {
3142 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3143 gen_op_ld_T1_A0[ot + s->mem_index]();
3144 } else if (op == OP_XORL && rm == reg) {
3145 goto xor_zero;
3146 } else {
3147 gen_op_mov_TN_reg[ot][1][rm]();
3148 }
3149 gen_op(s, op, ot, reg);
3150 break;
3151 case 2: /* OP A, Iv */
3152 val = insn_get(s, ot);
3153 gen_op_movl_T1_im(val);
3154 gen_op(s, op, ot, OR_EAX);
3155 break;
3156 }
3157 }
3158 break;
3159
3160 case 0x80: /* GRP1 */
3161 case 0x81:
3162 case 0x82:
3163 case 0x83:
3164 {
3165 int val;
3166
3167 if ((b & 1) == 0)
3168 ot = OT_BYTE;
3169 else
3170 ot = dflag + OT_WORD;
3171
3172 modrm = ldub_code(s->pc++);
3173 mod = (modrm >> 6) & 3;
3174 rm = (modrm & 7) | REX_B(s);
3175 op = (modrm >> 3) & 7;
3176
3177 if (mod != 3) {
3178 if (b == 0x83)
3179 s->rip_offset = 1;
3180 else
3181 s->rip_offset = insn_const_size(ot);
3182 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3183 opreg = OR_TMP0;
3184 } else {
3185 opreg = rm;
3186 }
3187
3188 switch(b) {
3189 default:
3190 case 0x80:
3191 case 0x81:
3192 case 0x82:
3193 val = insn_get(s, ot);
3194 break;
3195 case 0x83:
3196 val = (int8_t)insn_get(s, OT_BYTE);
3197 break;
3198 }
3199 gen_op_movl_T1_im(val);
3200 gen_op(s, op, ot, opreg);
3201 }
3202 break;
3203
3204 /**************************/
3205 /* inc, dec, and other misc arith */
3206 case 0x40 ... 0x47: /* inc Gv */
3207 ot = dflag ? OT_LONG : OT_WORD;
3208 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3209 break;
3210 case 0x48 ... 0x4f: /* dec Gv */
3211 ot = dflag ? OT_LONG : OT_WORD;
3212 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3213 break;
3214 case 0xf6: /* GRP3 */
3215 case 0xf7:
3216 if ((b & 1) == 0)
3217 ot = OT_BYTE;
3218 else
3219 ot = dflag + OT_WORD;
3220
3221 modrm = ldub_code(s->pc++);
3222 mod = (modrm >> 6) & 3;
3223 rm = (modrm & 7) | REX_B(s);
3224 op = (modrm >> 3) & 7;
3225 if (mod != 3) {
3226 if (op == 0)
3227 s->rip_offset = insn_const_size(ot);
3228 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3229 gen_op_ld_T0_A0[ot + s->mem_index]();
3230 } else {
3231 gen_op_mov_TN_reg[ot][0][rm]();
3232 }
3233
3234 switch(op) {
3235 case 0: /* test */
3236 val = insn_get(s, ot);
3237 gen_op_movl_T1_im(val);
3238 gen_op_testl_T0_T1_cc();
3239 s->cc_op = CC_OP_LOGICB + ot;
3240 break;
3241 case 2: /* not */
3242 gen_op_notl_T0();
3243 if (mod != 3) {
3244 gen_op_st_T0_A0[ot + s->mem_index]();
3245 } else {
3246 gen_op_mov_reg_T0[ot][rm]();
3247 }
3248 break;
3249 case 3: /* neg */
3250 gen_op_negl_T0();
3251 if (mod != 3) {
3252 gen_op_st_T0_A0[ot + s->mem_index]();
3253 } else {
3254 gen_op_mov_reg_T0[ot][rm]();
3255 }
3256 gen_op_update_neg_cc();
3257 s->cc_op = CC_OP_SUBB + ot;
3258 break;
3259 case 4: /* mul */
3260 switch(ot) {
3261 case OT_BYTE:
3262 gen_op_mulb_AL_T0();
3263 s->cc_op = CC_OP_MULB;
3264 break;
3265 case OT_WORD:
3266 gen_op_mulw_AX_T0();
3267 s->cc_op = CC_OP_MULW;
3268 break;
3269 default:
3270 case OT_LONG:
3271 gen_op_mull_EAX_T0();
3272 s->cc_op = CC_OP_MULL;
3273 break;
3274 #ifdef TARGET_X86_64
3275 case OT_QUAD:
3276 gen_op_mulq_EAX_T0();
3277 s->cc_op = CC_OP_MULQ;
3278 break;
3279 #endif
3280 }
3281 break;
3282 case 5: /* imul */
3283 switch(ot) {
3284 case OT_BYTE:
3285 gen_op_imulb_AL_T0();
3286 s->cc_op = CC_OP_MULB;
3287 break;
3288 case OT_WORD:
3289 gen_op_imulw_AX_T0();
3290 s->cc_op = CC_OP_MULW;
3291 break;
3292 default:
3293 case OT_LONG:
3294 gen_op_imull_EAX_T0();
3295 s->cc_op = CC_OP_MULL;
3296 break;
3297 #ifdef TARGET_X86_64
3298 case OT_QUAD:
3299 gen_op_imulq_EAX_T0();
3300 s->cc_op = CC_OP_MULQ;
3301 break;
3302 #endif
3303 }
3304 break;
3305 case 6: /* div */
3306 switch(ot) {
3307 case OT_BYTE:
3308 gen_jmp_im(pc_start - s->cs_base);
3309 gen_op_divb_AL_T0();
3310 break;
3311 case OT_WORD:
3312 gen_jmp_im(pc_start - s->cs_base);
3313 gen_op_divw_AX_T0();
3314 break;
3315 default:
3316 case OT_LONG:
3317 gen_jmp_im(pc_start - s->cs_base);
3318 gen_op_divl_EAX_T0();
3319 break;
3320 #ifdef TARGET_X86_64
3321 case OT_QUAD:
3322 gen_jmp_im(pc_start - s->cs_base);
3323 gen_op_divq_EAX_T0();
3324 break;
3325 #endif
3326 }
3327 break;
3328 case 7: /* idiv */
3329 switch(ot) {
3330 case OT_BYTE:
3331 gen_jmp_im(pc_start - s->cs_base);
3332 gen_op_idivb_AL_T0();
3333 break;
3334 case OT_WORD:
3335 gen_jmp_im(pc_start - s->cs_base);
3336 gen_op_idivw_AX_T0();
3337 break;
3338 default:
3339 case OT_LONG:
3340 gen_jmp_im(pc_start - s->cs_base);
3341 gen_op_idivl_EAX_T0();
3342 break;
3343 #ifdef TARGET_X86_64
3344 case OT_QUAD:
3345 gen_jmp_im(pc_start - s->cs_base);
3346 gen_op_idivq_EAX_T0();
3347 break;
3348 #endif
3349 }
3350 break;
3351 default:
3352 goto illegal_op;
3353 }
3354 break;
3355
3356 case 0xfe: /* GRP4 */
3357 case 0xff: /* GRP5 */
3358 if ((b & 1) == 0)
3359 ot = OT_BYTE;
3360 else
3361 ot = dflag + OT_WORD;
3362
3363 modrm = ldub_code(s->pc++);
3364 mod = (modrm >> 6) & 3;
3365 rm = (modrm & 7) | REX_B(s);
3366 op = (modrm >> 3) & 7;
3367 if (op >= 2 && b == 0xfe) {
3368 goto illegal_op;
3369 }
3370 if (CODE64(s)) {
3371 if (op >= 2 && op <= 5) {
3372 /* operand size for jumps is 64 bit */
3373 ot = OT_QUAD;
3374 } else if (op == 6) {
3375 /* default push size is 64 bit */
3376 ot = dflag ? OT_QUAD : OT_WORD;
3377 }
3378 }
3379 if (mod != 3) {
3380 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3381 if (op >= 2 && op != 3 && op != 5)
3382 gen_op_ld_T0_A0[ot + s->mem_index]();
3383 } else {
3384 gen_op_mov_TN_reg[ot][0][rm]();
3385 }
3386
3387 switch(op) {
3388 case 0: /* inc Ev */
3389 if (mod != 3)
3390 opreg = OR_TMP0;
3391 else
3392 opreg = rm;
3393 gen_inc(s, ot, opreg, 1);
3394 break;
3395 case 1: /* dec Ev */
3396 if (mod != 3)
3397 opreg = OR_TMP0;
3398 else
3399 opreg = rm;
3400 gen_inc(s, ot, opreg, -1);
3401 break;
3402 case 2: /* call Ev */
3403 /* XXX: optimize if memory (no 'and' is necessary) */
3404 if (s->dflag == 0)
3405 gen_op_andl_T0_ffff();
3406 next_eip = s->pc - s->cs_base;
3407 gen_movtl_T1_im(next_eip);
3408 gen_push_T1(s);
3409 gen_op_jmp_T0();
3410 gen_eob(s);
3411 break;
3412 case 3: /* lcall Ev */
3413 gen_op_ld_T1_A0[ot + s->mem_index]();
3414 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3415 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3416 do_lcall:
3417 if (s->pe && !s->vm86) {
3418 if (s->cc_op != CC_OP_DYNAMIC)
3419 gen_op_set_cc_op(s->cc_op);
3420 gen_jmp_im(pc_start - s->cs_base);
3421 gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
3422 } else {
3423 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3424 }
3425 gen_eob(s);
3426 break;
3427 case 4: /* jmp Ev */
3428 if (s->dflag == 0)
3429 gen_op_andl_T0_ffff();
3430 gen_op_jmp_T0();
3431 gen_eob(s);
3432 break;
3433 case 5: /* ljmp Ev */
3434 gen_op_ld_T1_A0[ot + s->mem_index]();
3435 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3436 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3437 do_ljmp:
3438 if (s->pe && !s->vm86) {
3439 if (s->cc_op != CC_OP_DYNAMIC)
3440 gen_op_set_cc_op(s->cc_op);
3441 gen_jmp_im(pc_start - s->cs_base);
3442 gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
3443 } else {
3444 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3445 gen_op_movl_T0_T1();
3446 gen_op_jmp_T0();
3447 }
3448 gen_eob(s);
3449 break;
3450 case 6: /* push Ev */
3451 gen_push_T0(s);
3452 break;
3453 default:
3454 goto illegal_op;
3455 }
3456 break;
3457
3458 case 0x84: /* test Ev, Gv */
3459 case 0x85:
3460 if ((b & 1) == 0)
3461 ot = OT_BYTE;
3462 else
3463 ot = dflag + OT_WORD;
3464
3465 modrm = ldub_code(s->pc++);
3466 mod = (modrm >> 6) & 3;
3467 rm = (modrm & 7) | REX_B(s);
3468 reg = ((modrm >> 3) & 7) | rex_r;
3469
3470 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3471 gen_op_mov_TN_reg[ot][1][reg]();
3472 gen_op_testl_T0_T1_cc();
3473 s->cc_op = CC_OP_LOGICB + ot;
3474 break;
3475
3476 case 0xa8: /* test eAX, Iv */
3477 case 0xa9:
3478 if ((b & 1) == 0)
3479 ot = OT_BYTE;
3480 else
3481 ot = dflag + OT_WORD;
3482 val = insn_get(s, ot);
3483
3484 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3485 gen_op_movl_T1_im(val);
3486 gen_op_testl_T0_T1_cc();
3487 s->cc_op = CC_OP_LOGICB + ot;
3488 break;
3489
3490 case 0x98: /* CWDE/CBW */
3491 #ifdef TARGET_X86_64
3492 if (dflag == 2) {
3493 gen_op_movslq_RAX_EAX();
3494 } else
3495 #endif
3496 if (dflag == 1)
3497 gen_op_movswl_EAX_AX();
3498 else
3499 gen_op_movsbw_AX_AL();
3500 break;
3501 case 0x99: /* CDQ/CWD */
3502 #ifdef TARGET_X86_64
3503 if (dflag == 2) {
3504 gen_op_movsqo_RDX_RAX();
3505 } else
3506 #endif
3507 if (dflag == 1)
3508 gen_op_movslq_EDX_EAX();
3509 else
3510 gen_op_movswl_DX_AX();
3511 break;
3512 case 0x1af: /* imul Gv, Ev */
3513 case 0x69: /* imul Gv, Ev, I */
3514 case 0x6b:
3515 ot = dflag + OT_WORD;
3516 modrm = ldub_code(s->pc++);
3517 reg = ((modrm >> 3) & 7) | rex_r;
3518 if (b == 0x69)
3519 s->rip_offset = insn_const_size(ot);
3520 else if (b == 0x6b)
3521 s->rip_offset = 1;
3522 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3523 if (b == 0x69) {
3524 val = insn_get(s, ot);
3525 gen_op_movl_T1_im(val);
3526 } else if (b == 0x6b) {
3527 val = (int8_t)insn_get(s, OT_BYTE);
3528 gen_op_movl_T1_im(val);
3529 } else {
3530 gen_op_mov_TN_reg[ot][1][reg]();
3531 }
3532
3533 #ifdef TARGET_X86_64
3534 if (ot == OT_QUAD) {
3535 gen_op_imulq_T0_T1();
3536 } else
3537 #endif
3538 if (ot == OT_LONG) {
3539 gen_op_imull_T0_T1();
3540 } else {
3541 gen_op_imulw_T0_T1();
3542 }
3543 gen_op_mov_reg_T0[ot][reg]();
3544 s->cc_op = CC_OP_MULB + ot;
3545 break;
3546 case 0x1c0:
3547 case 0x1c1: /* xadd Ev, Gv */
3548 if ((b & 1) == 0)
3549 ot = OT_BYTE;
3550 else
3551 ot = dflag + OT_WORD;
3552 modrm = ldub_code(s->pc++);
3553 reg = ((modrm >> 3) & 7) | rex_r;
3554 mod = (modrm >> 6) & 3;
3555 if (mod == 3) {
3556 rm = (modrm & 7) | REX_B(s);
3557 gen_op_mov_TN_reg[ot][0][reg]();
3558 gen_op_mov_TN_reg[ot][1][rm]();
3559 gen_op_addl_T0_T1();
3560 gen_op_mov_reg_T1[ot][reg]();
3561 gen_op_mov_reg_T0[ot][rm]();
3562 } else {
3563 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3564 gen_op_mov_TN_reg[ot][0][reg]();
3565 gen_op_ld_T1_A0[ot + s->mem_index]();
3566 gen_op_addl_T0_T1();
3567 gen_op_st_T0_A0[ot + s->mem_index]();
3568 gen_op_mov_reg_T1[ot][reg]();
3569 }
3570 gen_op_update2_cc();
3571 s->cc_op = CC_OP_ADDB + ot;
3572 break;
3573 case 0x1b0:
3574 case 0x1b1: /* cmpxchg Ev, Gv */
3575 if ((b & 1) == 0)
3576 ot = OT_BYTE;
3577 else
3578 ot = dflag + OT_WORD;
3579 modrm = ldub_code(s->pc++);
3580 reg = ((modrm >> 3) & 7) | rex_r;
3581 mod = (modrm >> 6) & 3;
3582 gen_op_mov_TN_reg[ot][1][reg]();
3583 if (mod == 3) {
3584 rm = (modrm & 7) | REX_B(s);
3585 gen_op_mov_TN_reg[ot][0][rm]();
3586 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3587 gen_op_mov_reg_T0[ot][rm]();
3588 } else {
3589 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3590 gen_op_ld_T0_A0[ot + s->mem_index]();
3591 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3592 }
3593 s->cc_op = CC_OP_SUBB + ot;
3594 break;
3595 case 0x1c7: /* cmpxchg8b */
3596 modrm = ldub_code(s->pc++);
3597 mod = (modrm >> 6) & 3;
3598 if (mod == 3)
3599 goto illegal_op;
3600 if (s->cc_op != CC_OP_DYNAMIC)
3601 gen_op_set_cc_op(s->cc_op);
3602 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3603 gen_op_cmpxchg8b();
3604 s->cc_op = CC_OP_EFLAGS;
3605 break;
3606
3607 /**************************/
3608 /* push/pop */
3609 case 0x50 ... 0x57: /* push */
3610 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3611 gen_push_T0(s);
3612 break;
3613 case 0x58 ... 0x5f: /* pop */
3614 if (CODE64(s)) {
3615 ot = dflag ? OT_QUAD : OT_WORD;
3616 } else {
3617 ot = dflag + OT_WORD;
3618 }
3619 gen_pop_T0(s);
3620 /* NOTE: order is important for pop %sp */
3621 gen_pop_update(s);
3622 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3623 break;
3624 case 0x60: /* pusha */
3625 if (CODE64(s))
3626 goto illegal_op;
3627 gen_pusha(s);
3628 break;
3629 case 0x61: /* popa */
3630 if (CODE64(s))
3631 goto illegal_op;
3632 gen_popa(s);
3633 break;
3634 case 0x68: /* push Iv */
3635 case 0x6a:
3636 if (CODE64(s)) {
3637 ot = dflag ? OT_QUAD : OT_WORD;
3638 } else {
3639 ot = dflag + OT_WORD;
3640 }
3641 if (b == 0x68)
3642 val = insn_get(s, ot);
3643 else
3644 val = (int8_t)insn_get(s, OT_BYTE);
3645 gen_op_movl_T0_im(val);
3646 gen_push_T0(s);
3647 break;
3648 case 0x8f: /* pop Ev */
3649 if (CODE64(s)) {
3650 ot = dflag ? OT_QUAD : OT_WORD;
3651 } else {
3652 ot = dflag + OT_WORD;
3653 }
3654 modrm = ldub_code(s->pc++);
3655 mod = (modrm >> 6) & 3;
3656 gen_pop_T0(s);
3657 if (mod == 3) {
3658 /* NOTE: order is important for pop %sp */
3659 gen_pop_update(s);
3660 rm = (modrm & 7) | REX_B(s);
3661 gen_op_mov_reg_T0[ot][rm]();
3662 } else {
3663 /* NOTE: order is important too for MMU exceptions */
3664 s->popl_esp_hack = 1 << ot;
3665 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3666 s->popl_esp_hack = 0;
3667 gen_pop_update(s);
3668 }
3669 break;
3670 case 0xc8: /* enter */
3671 {
3672 /* XXX: long mode support */
3673 int level;
3674 val = lduw_code(s->pc);
3675 s->pc += 2;
3676 level = ldub_code(s->pc++);
3677 gen_enter(s, val, level);
3678 }
3679 break;
3680 case 0xc9: /* leave */
3681 /* XXX: exception not precise (ESP is updated before potential exception) */
3682 /* XXX: may be invalid for 16 bit in long mode */
3683 if (CODE64(s)) {
3684 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3685 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3686 } else if (s->ss32) {
3687 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3688 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3689 } else {
3690 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3691 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3692 }
3693 gen_pop_T0(s);
3694 if (CODE64(s)) {
3695 ot = dflag ? OT_QUAD : OT_WORD;
3696 } else {
3697 ot = dflag + OT_WORD;
3698 }
3699 gen_op_mov_reg_T0[ot][R_EBP]();
3700 gen_pop_update(s);
3701 break;
3702 case 0x06: /* push es */
3703 case 0x0e: /* push cs */
3704 case 0x16: /* push ss */
3705 case 0x1e: /* push ds */
3706 if (CODE64(s))
3707 goto illegal_op;
3708 gen_op_movl_T0_seg(b >> 3);
3709 gen_push_T0(s);
3710 break;
3711 case 0x1a0: /* push fs */
3712 case 0x1a8: /* push gs */
3713 gen_op_movl_T0_seg((b >> 3) & 7);
3714 gen_push_T0(s);
3715 break;
3716 case 0x07: /* pop es */
3717 case 0x17: /* pop ss */
3718 case 0x1f: /* pop ds */
3719 if (CODE64(s))
3720 goto illegal_op;
3721 reg = b >> 3;
3722 gen_pop_T0(s);
3723 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3724 gen_pop_update(s);
3725 if (reg == R_SS) {
3726 /* if reg == SS, inhibit interrupts/trace. */
3727 /* If several instructions disable interrupts, only the
3728 _first_ does it */
3729 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3730 gen_op_set_inhibit_irq();
3731 s->tf = 0;
3732 }
3733 if (s->is_jmp) {
3734 gen_jmp_im(s->pc - s->cs_base);
3735 gen_eob(s);
3736 }
3737 break;
3738 case 0x1a1: /* pop fs */
3739 case 0x1a9: /* pop gs */
3740 gen_pop_T0(s);
3741 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3742 gen_pop_update(s);
3743 if (s->is_jmp) {
3744 gen_jmp_im(s->pc - s->cs_base);
3745 gen_eob(s);
3746 }
3747 break;
3748
3749 /**************************/
3750 /* mov */
3751 case 0x88:
3752 case 0x89: /* mov Gv, Ev */
3753 if ((b & 1) == 0)
3754 ot = OT_BYTE;
3755 else
3756 ot = dflag + OT_WORD;
3757 modrm = ldub_code(s->pc++);
3758 reg = ((modrm >> 3) & 7) | rex_r;
3759
3760 /* generate a generic store */
3761 gen_ldst_modrm(s, modrm, ot, reg, 1);
3762 break;
3763 case 0xc6:
3764 case 0xc7: /* mov Ev, Iv */
3765 if ((b & 1) == 0)
3766 ot = OT_BYTE;
3767 else
3768 ot = dflag + OT_WORD;
3769 modrm = ldub_code(s->pc++);
3770 mod = (modrm >> 6) & 3;
3771 if (mod != 3) {
3772 s->rip_offset = insn_const_size(ot);
3773 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3774 }
3775 val = insn_get(s, ot);
3776 gen_op_movl_T0_im(val);
3777 if (mod != 3)
3778 gen_op_st_T0_A0[ot + s->mem_index]();
3779 else
3780 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3781 break;
3782 case 0x8a:
3783 case 0x8b: /* mov Ev, Gv */
3784 if ((b & 1) == 0)
3785 ot = OT_BYTE;
3786 else
3787 ot = OT_WORD + dflag;
3788 modrm = ldub_code(s->pc++);
3789 reg = ((modrm >> 3) & 7) | rex_r;
3790
3791 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3792 gen_op_mov_reg_T0[ot][reg]();
3793 break;
3794 case 0x8e: /* mov seg, Gv */
3795 modrm = ldub_code(s->pc++);
3796 reg = (modrm >> 3) & 7;
3797 if (reg >= 6 || reg == R_CS)
3798 goto illegal_op;
3799 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3800 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3801 if (reg == R_SS) {
3802 /* if reg == SS, inhibit interrupts/trace */
3803 /* If several instructions disable interrupts, only the
3804 _first_ does it */
3805 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3806 gen_op_set_inhibit_irq();
3807 s->tf = 0;
3808 }
3809 if (s->is_jmp) {
3810 gen_jmp_im(s->pc - s->cs_base);
3811 gen_eob(s);
3812 }
3813 break;
3814 case 0x8c: /* mov Gv, seg */
3815 modrm = ldub_code(s->pc++);
3816 reg = (modrm >> 3) & 7;
3817 mod = (modrm >> 6) & 3;
3818 if (reg >= 6)
3819 goto illegal_op;
3820 gen_op_movl_T0_seg(reg);
3821 if (mod == 3)
3822 ot = OT_WORD + dflag;
3823 else
3824 ot = OT_WORD;
3825 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3826 break;
3827
3828 case 0x1b6: /* movzbS Gv, Eb */
3829 case 0x1b7: /* movzwS Gv, Eb */
3830 case 0x1be: /* movsbS Gv, Eb */
3831 case 0x1bf: /* movswS Gv, Eb */
3832 {
3833 int d_ot;
3834 /* d_ot is the size of destination */
3835 d_ot = dflag + OT_WORD;
3836 /* ot is the size of source */
3837 ot = (b & 1) + OT_BYTE;
3838 modrm = ldub_code(s->pc++);
3839 reg = ((modrm >> 3) & 7) | rex_r;
3840 mod = (modrm >> 6) & 3;
3841 rm = (modrm & 7) | REX_B(s);
3842
3843 if (mod == 3) {
3844 gen_op_mov_TN_reg[ot][0][rm]();
3845 switch(ot | (b & 8)) {
3846 case OT_BYTE:
3847 gen_op_movzbl_T0_T0();
3848 break;
3849 case OT_BYTE | 8:
3850 gen_op_movsbl_T0_T0();
3851 break;
3852 case OT_WORD:
3853 gen_op_movzwl_T0_T0();
3854 break;
3855 default:
3856 case OT_WORD | 8:
3857 gen_op_movswl_T0_T0();
3858 break;
3859 }
3860 gen_op_mov_reg_T0[d_ot][reg]();
3861 } else {
3862 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3863 if (b & 8) {
3864 gen_op_lds_T0_A0[ot + s->mem_index]();
3865 } else {
3866 gen_op_ldu_T0_A0[ot + s->mem_index]();
3867 }
3868 gen_op_mov_reg_T0[d_ot][reg]();
3869 }
3870 }
3871 break;
3872
3873 case 0x8d: /* lea */
3874 ot = dflag + OT_WORD;
3875 modrm = ldub_code(s->pc++);
3876 mod = (modrm >> 6) & 3;
3877 if (mod == 3)
3878 goto illegal_op;
3879 reg = ((modrm >> 3) & 7) | rex_r;
3880 /* we must ensure that no segment is added */
3881 s->override = -1;
3882 val = s->addseg;
3883 s->addseg = 0;
3884 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3885 s->addseg = val;
3886 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3887 break;
3888
3889 case 0xa0: /* mov EAX, Ov */
3890 case 0xa1:
3891 case 0xa2: /* mov Ov, EAX */
3892 case 0xa3:
3893 {
3894 target_ulong offset_addr;
3895
3896 if ((b & 1) == 0)
3897 ot = OT_BYTE;
3898 else
3899 ot = dflag + OT_WORD;
3900 #ifdef TARGET_X86_64
3901 if (CODE64(s)) {
3902 offset_addr = ldq_code(s->pc);
3903 s->pc += 8;
3904 if (offset_addr == (int32_t)offset_addr)
3905 gen_op_movq_A0_im(offset_addr);
3906 else
3907 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3908 } else
3909 #endif
3910 {
3911 if (s->aflag) {
3912 offset_addr = insn_get(s, OT_LONG);
3913 } else {
3914 offset_addr = insn_get(s, OT_WORD);
3915 }
3916 gen_op_movl_A0_im(offset_addr);
3917 }
3918 gen_add_A0_ds_seg(s);
3919 if ((b & 2) == 0) {
3920 gen_op_ld_T0_A0[ot + s->mem_index]();
3921 gen_op_mov_reg_T0[ot][R_EAX]();
3922 } else {
3923 gen_op_mov_TN_reg[ot][0][R_EAX]();
3924 gen_op_st_T0_A0[ot + s->mem_index]();
3925 }
3926 }
3927 break;
3928 case 0xd7: /* xlat */
3929 #ifdef TARGET_X86_64
3930 if (CODE64(s)) {
3931 gen_op_movq_A0_reg[R_EBX]();
3932 gen_op_addq_A0_AL();
3933 } else
3934 #endif
3935 {
3936 gen_op_movl_A0_reg[R_EBX]();
3937 gen_op_addl_A0_AL();
3938 if (s->aflag == 0)
3939 gen_op_andl_A0_ffff();
3940 }
3941 gen_add_A0_ds_seg(s);
3942 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3943 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3944 break;
3945 case 0xb0 ... 0xb7: /* mov R, Ib */
3946 val = insn_get(s, OT_BYTE);
3947 gen_op_movl_T0_im(val);
3948 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3949 break;
3950 case 0xb8 ... 0xbf: /* mov R, Iv */
3951 #ifdef TARGET_X86_64
3952 if (dflag == 2) {
3953 uint64_t tmp;
3954 /* 64 bit case */
3955 tmp = ldq_code(s->pc);
3956 s->pc += 8;
3957 reg = (b & 7) | REX_B(s);
3958 gen_movtl_T0_im(tmp);
3959 gen_op_mov_reg_T0[OT_QUAD][reg]();
3960 } else
3961 #endif
3962 {
3963 ot = dflag ? OT_LONG : OT_WORD;
3964 val = insn_get(s, ot);
3965 reg = (b & 7) | REX_B(s);
3966 gen_op_movl_T0_im(val);
3967 gen_op_mov_reg_T0[ot][reg]();
3968 }
3969 break;
3970
3971 case 0x91 ... 0x97: /* xchg R, EAX */
3972 ot = dflag + OT_WORD;
3973 reg = (b & 7) | REX_B(s);
3974 rm = R_EAX;
3975 goto do_xchg_reg;
3976 case 0x86:
3977 case 0x87: /* xchg Ev, Gv */
3978 if ((b & 1) == 0)
3979 ot = OT_BYTE;
3980 else
3981 ot = dflag + OT_WORD;
3982 modrm = ldub_code(s->pc++);
3983 reg = ((modrm >> 3) & 7) | rex_r;
3984 mod = (modrm >> 6) & 3;
3985 if (mod == 3) {
3986 rm = (modrm & 7) | REX_B(s);
3987 do_xchg_reg:
3988 gen_op_mov_TN_reg[ot][0][reg]();
3989 gen_op_mov_TN_reg[ot][1][rm]();
3990 gen_op_mov_reg_T0[ot][rm]();
3991 gen_op_mov_reg_T1[ot][reg]();
3992 } else {
3993 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3994 gen_op_mov_TN_reg[ot][0][reg]();
3995 /* for xchg, lock is implicit */
3996 if (!(prefixes & PREFIX_LOCK))
3997 gen_op_lock();
3998 gen_op_ld_T1_A0[ot + s->mem_index]();
3999 gen_op_st_T0_A0[ot + s->mem_index]();
4000 if (!(prefixes & PREFIX_LOCK))
4001 gen_op_unlock();
4002 gen_op_mov_reg_T1[ot][reg]();
4003 }
4004 break;
4005 case 0xc4: /* les Gv */
4006 if (CODE64(s))
4007 goto illegal_op;
4008 op = R_ES;
4009 goto do_lxx;
4010 case 0xc5: /* lds Gv */
4011 if (CODE64(s))
4012 goto illegal_op;
4013 op = R_DS;
4014 goto do_lxx;
4015 case 0x1b2: /* lss Gv */
4016 op = R_SS;
4017 goto do_lxx;
4018 case 0x1b4: /* lfs Gv */
4019 op = R_FS;
4020 goto do_lxx;
4021 case 0x1b5: /* lgs Gv */
4022 op = R_GS;
4023 do_lxx:
4024 ot = dflag ? OT_LONG : OT_WORD;
4025 modrm = ldub_code(s->pc++);
4026 reg = ((modrm >> 3) & 7) | rex_r;
4027 mod = (modrm >> 6) & 3;
4028 if (mod == 3)
4029 goto illegal_op;
4030 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4031 gen_op_ld_T1_A0[ot + s->mem_index]();
4032 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
4033 /* load the segment first to handle exceptions properly */
4034 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4035 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4036 /* then put the data */
4037 gen_op_mov_reg_T1[ot][reg]();
4038 if (s->is_jmp) {
4039 gen_jmp_im(s->pc - s->cs_base);
4040 gen_eob(s);
4041 }
4042 break;
4043
4044 /************************/
4045 /* shifts */
4046 case 0xc0:
4047 case 0xc1:
4048 /* shift Ev,Ib */
4049 shift = 2;
4050 grp2:
4051 {
4052 if ((b & 1) == 0)
4053 ot = OT_BYTE;
4054 else
4055 ot = dflag + OT_WORD;
4056
4057 modrm = ldub_code(s->pc++);
4058 mod = (modrm >> 6) & 3;
4059 op = (modrm >> 3) & 7;
4060
4061 if (mod != 3) {
4062 if (shift == 2) {
4063 s->rip_offset = 1;
4064 }
4065 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4066 opreg = OR_TMP0;
4067 } else {
4068 opreg = (modrm & 7) | REX_B(s);
4069 }
4070
4071 /* simpler op */
4072 if (shift == 0) {
4073 gen_shift(s, op, ot, opreg, OR_ECX);
4074 } else {
4075 if (shift == 2) {
4076 shift = ldub_code(s->pc++);
4077 }
4078 gen_shifti(s, op, ot, opreg, shift);
4079 }
4080 }
4081 break;
4082 case 0xd0:
4083 case 0xd1:
4084 /* shift Ev,1 */
4085 shift = 1;
4086 goto grp2;
4087 case 0xd2:
4088 case 0xd3:
4089 /* shift Ev,cl */
4090 shift = 0;
4091 goto grp2;
4092
4093 case 0x1a4: /* shld imm */
4094 op = 0;
4095 shift = 1;
4096 goto do_shiftd;
4097 case 0x1a5: /* shld cl */
4098 op = 0;
4099 shift = 0;
4100 goto do_shiftd;
4101 case 0x1ac: /* shrd imm */
4102 op = 1;
4103 shift = 1;
4104 goto do_shiftd;
4105 case 0x1ad: /* shrd cl */
4106 op = 1;
4107 shift = 0;
4108 do_shiftd:
4109 ot = dflag + OT_WORD;
4110 modrm = ldub_code(s->pc++);
4111 mod = (modrm >> 6) & 3;
4112 rm = (modrm & 7) | REX_B(s);
4113 reg = ((modrm >> 3) & 7) | rex_r;
4114
4115 if (mod != 3) {
4116 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4117 gen_op_ld_T0_A0[ot + s->mem_index]();
4118 } else {
4119 gen_op_mov_TN_reg[ot][0][rm]();
4120 }
4121 gen_op_mov_TN_reg[ot][1][reg]();
4122
4123 if (shift) {
4124 val = ldub_code(s->pc++);
4125 if (ot == OT_QUAD)
4126 val &= 0x3f;
4127 else
4128 val &= 0x1f;
4129 if (val) {
4130 if (mod == 3)
4131 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4132 else
4133 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4134 if (op == 0 && ot != OT_WORD)
4135 s->cc_op = CC_OP_SHLB + ot;
4136 else
4137 s->cc_op = CC_OP_SARB + ot;
4138 }
4139 } else {
4140 if (s->cc_op != CC_OP_DYNAMIC)
4141 gen_op_set_cc_op(s->cc_op);
4142 if (mod == 3)
4143 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4144 else
4145 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4146 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4147 }
4148 if (mod == 3) {
4149 gen_op_mov_reg_T0[ot][rm]();
4150 }
4151 break;
4152
4153 /************************/
4154 /* floats */
4155 case 0xd8 ... 0xdf:
4156 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4157 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4158 /* XXX: what to do if illegal op ? */
4159 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4160 break;
4161 }
4162 modrm = ldub_code(s->pc++);
4163 mod = (modrm >> 6) & 3;
4164 rm = modrm & 7;
4165 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4166 if (mod != 3) {
4167 /* memory op */
4168 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4169 switch(op) {
4170 case 0x00 ... 0x07: /* fxxxs */
4171 case 0x10 ... 0x17: /* fixxxl */
4172 case 0x20 ... 0x27: /* fxxxl */
4173 case 0x30 ... 0x37: /* fixxx */
4174 {
4175 int op1;
4176 op1 = op & 7;
4177
4178 switch(op >> 4) {
4179 case 0:
4180 gen_op_flds_FT0_A0();
4181 break;
4182 case 1:
4183 gen_op_fildl_FT0_A0();
4184 break;
4185 case 2:
4186 gen_op_fldl_FT0_A0();
4187 break;
4188 case 3:
4189 default:
4190 gen_op_fild_FT0_A0();
4191 break;
4192 }
4193
4194 gen_op_fp_arith_ST0_FT0[op1]();
4195 if (op1 == 3) {
4196 /* fcomp needs pop */
4197 gen_op_fpop();
4198 }
4199 }
4200 break;
4201 case 0x08: /* flds */
4202 case 0x0a: /* fsts */
4203 case 0x0b: /* fstps */
4204 case 0x18: /* fildl */
4205 case 0x1a: /* fistl */
4206 case 0x1b: /* fistpl */
4207 case 0x28: /* fldl */
4208 case 0x2a: /* fstl */
4209 case 0x2b: /* fstpl */
4210 case 0x38: /* filds */
4211 case 0x3a: /* fists */
4212 case 0x3b: /* fistps */
4213
4214 switch(op & 7) {
4215 case 0:
4216 switch(op >> 4) {
4217 case 0:
4218 gen_op_flds_ST0_A0();
4219 break;
4220 case 1:
4221 gen_op_fildl_ST0_A0();
4222 break;
4223 case 2:
4224 gen_op_fldl_ST0_A0();
4225 break;
4226 case 3:
4227 default:
4228 gen_op_fild_ST0_A0();
4229 break;
4230 }
4231 break;
4232 default:
4233 switch(op >> 4) {
4234 case 0:
4235 gen_op_fsts_ST0_A0();
4236 break;
4237 case 1:
4238 gen_op_fistl_ST0_A0();
4239 break;
4240 case 2:
4241 gen_op_fstl_ST0_A0();
4242 break;
4243 case 3:
4244 default:
4245 gen_op_fist_ST0_A0();
4246 break;
4247 }
4248 if ((op & 7) == 3)
4249 gen_op_fpop();
4250 break;
4251 }
4252 break;
4253 case 0x0c: /* fldenv mem */
4254 gen_op_fldenv_A0(s->dflag);
4255 break;
4256 case 0x0d: /* fldcw mem */
4257 gen_op_fldcw_A0();
4258 break;
4259 case 0x0e: /* fnstenv mem */
4260 gen_op_fnstenv_A0(s->dflag);
4261 break;
4262 case 0x0f: /* fnstcw mem */
4263 gen_op_fnstcw_A0();
4264 break;
4265 case 0x1d: /* fldt mem */
4266 gen_op_fldt_ST0_A0();
4267 break;
4268 case 0x1f: /* fstpt mem */
4269 gen_op_fstt_ST0_A0();
4270 gen_op_fpop();
4271 break;
4272 case 0x2c: /* frstor mem */
4273 gen_op_frstor_A0(s->dflag);
4274 break;
4275 case 0x2e: /* fnsave mem */
4276 gen_op_fnsave_A0(s->dflag);
4277 break;
4278 case 0x2f: /* fnstsw mem */
4279 gen_op_fnstsw_A0();
4280 break;
4281 case 0x3c: /* fbld */
4282 gen_op_fbld_ST0_A0();
4283 break;
4284 case 0x3e: /* fbstp */
4285 gen_op_fbst_ST0_A0();
4286 gen_op_fpop();
4287 break;
4288 case 0x3d: /* fildll */
4289 gen_op_fildll_ST0_A0();
4290 break;
4291 case 0x3f: /* fistpll */
4292 gen_op_fistll_ST0_A0();
4293 gen_op_fpop();
4294 break;
4295 default:
4296 goto illegal_op;
4297 }
4298 } else {
4299 /* register float ops */
4300 opreg = rm;
4301
4302 switch(op) {
4303 case 0x08: /* fld sti */
4304 gen_op_fpush();
4305 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4306 break;
4307 case 0x09: /* fxchg sti */
4308 case 0x29: /* fxchg4 sti, undocumented op */
4309 case 0x39: /* fxchg7 sti, undocumented op */
4310 gen_op_fxchg_ST0_STN(opreg);
4311 break;
4312 case 0x0a: /* grp d9/2 */
4313 switch(rm) {
4314 case 0: /* fnop */
4315 /* check exceptions (FreeBSD FPU probe) */
4316 if (s->cc_op != CC_OP_DYNAMIC)
4317 gen_op_set_cc_op(s->cc_op);
4318 gen_jmp_im(pc_start - s->cs_base);
4319 gen_op_fwait();
4320 break;
4321 default:
4322 goto illegal_op;
4323 }
4324 break;
4325 case 0x0c: /* grp d9/4 */
4326 switch(rm) {
4327 case 0: /* fchs */
4328 gen_op_fchs_ST0();
4329 break;
4330 case 1: /* fabs */
4331 gen_op_fabs_ST0();
4332 break;
4333 case 4: /* ftst */
4334 gen_op_fldz_FT0();
4335 gen_op_fcom_ST0_FT0();
4336 break;
4337 case 5: /* fxam */
4338 gen_op_fxam_ST0();
4339 break;
4340 default:
4341 goto illegal_op;
4342 }
4343 break;
4344 case 0x0d: /* grp d9/5 */
4345 {
4346 switch(rm) {
4347 case 0:
4348 gen_op_fpush();
4349 gen_op_fld1_ST0();
4350 break;
4351 case 1:
4352 gen_op_fpush();
4353 gen_op_fldl2t_ST0();
4354 break;
4355 case 2:
4356 gen_op_fpush();
4357 gen_op_fldl2e_ST0();
4358 break;
4359 case 3:
4360 gen_op_fpush();
4361 gen_op_fldpi_ST0();
4362 break;
4363 case 4:
4364 gen_op_fpush();
4365 gen_op_fldlg2_ST0();
4366 break;
4367 case 5:
4368 gen_op_fpush();
4369 gen_op_fldln2_ST0();
4370 break;
4371 case 6:
4372 gen_op_fpush();
4373 gen_op_fldz_ST0();
4374 break;
4375 default:
4376 goto illegal_op;
4377 }
4378 }
4379 break;
4380 case 0x0e: /* grp d9/6 */
4381 switch(rm) {
4382 case 0: /* f2xm1 */
4383 gen_op_f2xm1();
4384 break;
4385 case 1: /* fyl2x */
4386 gen_op_fyl2x();
4387 break;
4388 case 2: /* fptan */
4389 gen_op_fptan();
4390 break;
4391 case 3: /* fpatan */
4392 gen_op_fpatan();
4393 break;
4394 case 4: /* fxtract */
4395 gen_op_fxtract();
4396 break;
4397 case 5: /* fprem1 */
4398 gen_op_fprem1();
4399 break;
4400 case 6: /* fdecstp */
4401 gen_op_fdecstp();
4402 break;
4403 default:
4404 case 7: /* fincstp */
4405 gen_op_fincstp();
4406 break;
4407 }
4408 break;
4409 case 0x0f: /* grp d9/7 */
4410 switch(rm) {
4411 case 0: /* fprem */
4412 gen_op_fprem();
4413 break;
4414 case 1: /* fyl2xp1 */
4415 gen_op_fyl2xp1();
4416 break;
4417 case 2: /* fsqrt */
4418 gen_op_fsqrt();
4419 break;
4420 case 3: /* fsincos */
4421 gen_op_fsincos();
4422 break;
4423 case 5: /* fscale */
4424 gen_op_fscale();
4425 break;
4426 case 4: /* frndint */
4427 gen_op_frndint();
4428 break;
4429 case 6: /* fsin */
4430 gen_op_fsin();
4431 break;
4432 default:
4433 case 7: /* fcos */
4434 gen_op_fcos();
4435 break;
4436 }
4437 break;
4438 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4439 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4440 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4441 {
4442 int op1;
4443
4444 op1 = op & 7;
4445 if (op >= 0x20) {
4446 gen_op_fp_arith_STN_ST0[op1](opreg);
4447 if (op >= 0x30)
4448 gen_op_fpop();
4449 } else {
4450 gen_op_fmov_FT0_STN(opreg);
4451 gen_op_fp_arith_ST0_FT0[op1]();
4452 }
4453 }
4454 break;
4455 case 0x02: /* fcom */
4456 case 0x22: /* fcom2, undocumented op */
4457 gen_op_fmov_FT0_STN(opreg);
4458 gen_op_fcom_ST0_FT0();
4459 break;
4460 case 0x03: /* fcomp */
4461 case 0x23: /* fcomp3, undocumented op */
4462 case 0x32: /* fcomp5, undocumented op */
4463 gen_op_fmov_FT0_STN(opreg);
4464 gen_op_fcom_ST0_FT0();
4465 gen_op_fpop();
4466 break;
4467 case 0x15: /* da/5 */
4468 switch(rm) {
4469 case 1: /* fucompp */
4470 gen_op_fmov_FT0_STN(1);
4471 gen_op_fucom_ST0_FT0();
4472 gen_op_fpop();
4473 gen_op_fpop();
4474 break;
4475 default:
4476 goto illegal_op;
4477 }
4478 break;
4479 case 0x1c:
4480 switch(rm) {
4481 case 0: /* feni (287 only, just do nop here) */
4482 break;
4483 case 1: /* fdisi (287 only, just do nop here) */
4484 break;
4485 case 2: /* fclex */
4486 gen_op_fclex();
4487 break;
4488 case 3: /* fninit */
4489 gen_op_fninit();
4490 break;
4491 case 4: /* fsetpm (287 only, just do nop here) */
4492 break;
4493 default:
4494 goto illegal_op;
4495 }
4496 break;
4497 case 0x1d: /* fucomi */
4498 if (s->cc_op != CC_OP_DYNAMIC)
4499 gen_op_set_cc_op(s->cc_op);
4500 gen_op_fmov_FT0_STN(opreg);
4501 gen_op_fucomi_ST0_FT0();
4502 s->cc_op = CC_OP_EFLAGS;
4503 break;
4504 case 0x1e: /* fcomi */
4505 if (s->cc_op != CC_OP_DYNAMIC)
4506 gen_op_set_cc_op(s->cc_op);
4507 gen_op_fmov_FT0_STN(opreg);
4508 gen_op_fcomi_ST0_FT0();
4509 s->cc_op = CC_OP_EFLAGS;
4510 break;
4511 case 0x28: /* ffree sti */
4512 gen_op_ffree_STN(opreg);
4513 break;
4514 case 0x2a: /* fst sti */
4515 gen_op_fmov_STN_ST0(opreg);
4516 break;
4517 case 0x2b: /* fstp sti */
4518 case 0x0b: /* fstp1 sti, undocumented op */
4519 case 0x3a: /* fstp8 sti, undocumented op */
4520 case 0x3b: /* fstp9 sti, undocumented op */
4521 gen_op_fmov_STN_ST0(opreg);
4522 gen_op_fpop();
4523 break;
4524 case 0x2c: /* fucom st(i) */
4525 gen_op_fmov_FT0_STN(opreg);
4526 gen_op_fucom_ST0_FT0();
4527 break;
4528 case 0x2d: /* fucomp st(i) */
4529 gen_op_fmov_FT0_STN(opreg);
4530 gen_op_fucom_ST0_FT0();
4531 gen_op_fpop();
4532 break;
4533 case 0x33: /* de/3 */
4534 switch(rm) {
4535 case 1: /* fcompp */
4536 gen_op_fmov_FT0_STN(1);
4537 gen_op_fcom_ST0_FT0();
4538 gen_op_fpop();
4539 gen_op_fpop();
4540 break;
4541 default:
4542 goto illegal_op;
4543 }
4544 break;
4545 case 0x38: /* ffreep sti, undocumented op */
4546 gen_op_ffree_STN(opreg);
4547 gen_op_fpop();
4548 break;
4549 case 0x3c: /* df/4 */
4550 switch(rm) {
4551 case 0:
4552 gen_op_fnstsw_EAX();
4553 break;
4554 default:
4555 goto illegal_op;
4556 }
4557 break;
4558 case 0x3d: /* fucomip */
4559 if (s->cc_op != CC_OP_DYNAMIC)
4560 gen_op_set_cc_op(s->cc_op);
4561 gen_op_fmov_FT0_STN(opreg);
4562 gen_op_fucomi_ST0_FT0();
4563 gen_op_fpop();
4564 s->cc_op = CC_OP_EFLAGS;
4565 break;
4566 case 0x3e: /* fcomip */
4567 if (s->cc_op != CC_OP_DYNAMIC)
4568 gen_op_set_cc_op(s->cc_op);
4569 gen_op_fmov_FT0_STN(opreg);
4570 gen_op_fcomi_ST0_FT0();
4571 gen_op_fpop();
4572 s->cc_op = CC_OP_EFLAGS;
4573 break;
4574 case 0x10 ... 0x13: /* fcmovxx */
4575 case 0x18 ... 0x1b:
4576 {
4577 int op1;
4578 const static uint8_t fcmov_cc[8] = {
4579 (JCC_B << 1),
4580 (JCC_Z << 1),
4581 (JCC_BE << 1),
4582 (JCC_P << 1),
4583 };
4584 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4585 gen_setcc(s, op1);
4586 gen_op_fcmov_ST0_STN_T0(opreg);
4587 }
4588 break;
4589 default:
4590 goto illegal_op;
4591 }
4592 }
4593 #ifdef USE_CODE_COPY
4594 s->tb->cflags |= CF_TB_FP_USED;
4595 #endif
4596 break;
4597 /************************/
4598 /* string ops */
4599
4600 case 0xa4: /* movsS */
4601 case 0xa5:
4602 if ((b & 1) == 0)
4603 ot = OT_BYTE;
4604 else
4605 ot = dflag + OT_WORD;
4606
4607 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4608 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4609 } else {
4610 gen_movs(s, ot);
4611 }
4612 break;
4613
4614 case 0xaa: /* stosS */
4615 case 0xab:
4616 if ((b & 1) == 0)
4617 ot = OT_BYTE;
4618 else
4619 ot = dflag + OT_WORD;
4620
4621 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4622 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4623 } else {
4624 gen_stos(s, ot);
4625 }
4626 break;
4627 case 0xac: /* lodsS */
4628 case 0xad:
4629 if ((b & 1) == 0)
4630 ot = OT_BYTE;
4631 else
4632 ot = dflag + OT_WORD;
4633 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4634 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4635 } else {
4636 gen_lods(s, ot);
4637 }
4638 break;
4639 case 0xae: /* scasS */
4640 case 0xaf:
4641 if ((b & 1) == 0)
4642 ot = OT_BYTE;
4643 else
4644 ot = dflag + OT_WORD;
4645 if (prefixes & PREFIX_REPNZ) {
4646 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4647 } else if (prefixes & PREFIX_REPZ) {
4648 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4649 } else {
4650 gen_scas(s, ot);
4651 s->cc_op = CC_OP_SUBB + ot;
4652 }
4653 break;
4654
4655 case 0xa6: /* cmpsS */
4656 case 0xa7:
4657 if ((b & 1) == 0)
4658 ot = OT_BYTE;
4659 else
4660 ot = dflag + OT_WORD;
4661 if (prefixes & PREFIX_REPNZ) {
4662 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4663 } else if (prefixes & PREFIX_REPZ) {
4664 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4665 } else {
4666 gen_cmps(s, ot);
4667 s->cc_op = CC_OP_SUBB + ot;
4668 }
4669 break;
4670 case 0x6c: /* insS */
4671 case 0x6d:
4672 if ((b & 1) == 0)
4673 ot = OT_BYTE;
4674 else
4675 ot = dflag ? OT_LONG : OT_WORD;
4676 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4677 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4678 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4679 } else {
4680 gen_ins(s, ot);
4681 }
4682 break;
4683 case 0x6e: /* outsS */
4684 case 0x6f:
4685 if ((b & 1) == 0)
4686 ot = OT_BYTE;
4687 else
4688 ot = dflag ? OT_LONG : OT_WORD;
4689 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4690 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4691 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4692 } else {
4693 gen_outs(s, ot);
4694 }
4695 break;
4696
4697 /************************/
4698 /* port I/O */
4699 case 0xe4:
4700 case 0xe5:
4701 if ((b & 1) == 0)
4702 ot = OT_BYTE;
4703 else
4704 ot = dflag ? OT_LONG : OT_WORD;
4705 val = ldub_code(s->pc++);
4706 gen_op_movl_T0_im(val);
4707 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4708 gen_op_in[ot]();
4709 gen_op_mov_reg_T1[ot][R_EAX]();
4710 break;
4711 case 0xe6:
4712 case 0xe7:
4713 if ((b & 1) == 0)
4714 ot = OT_BYTE;
4715 else
4716 ot = dflag ? OT_LONG : OT_WORD;
4717 val = ldub_code(s->pc++);
4718 gen_op_movl_T0_im(val);
4719 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4720 gen_op_mov_TN_reg[ot][1][R_EAX]();
4721 gen_op_out[ot]();
4722 break;
4723 case 0xec:
4724 case 0xed:
4725 if ((b & 1) == 0)
4726 ot = OT_BYTE;
4727 else
4728 ot = dflag ? OT_LONG : OT_WORD;
4729 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4730 gen_op_andl_T0_ffff();
4731 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4732 gen_op_in[ot]();
4733 gen_op_mov_reg_T1[ot][R_EAX]();
4734 break;
4735 case 0xee:
4736 case 0xef:
4737 if ((b & 1) == 0)
4738 ot = OT_BYTE;
4739 else
4740 ot = dflag ? OT_LONG : OT_WORD;
4741 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4742 gen_op_andl_T0_ffff();
4743 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4744 gen_op_mov_TN_reg[ot][1][R_EAX]();
4745 gen_op_out[ot]();
4746 break;
4747
4748 /************************/
4749 /* control */
4750 case 0xc2: /* ret im */
4751 val = ldsw_code(s->pc);
4752 s->pc += 2;
4753 gen_pop_T0(s);
4754 gen_stack_update(s, val + (2 << s->dflag));
4755 if (s->dflag == 0)
4756 gen_op_andl_T0_ffff();
4757 gen_op_jmp_T0();
4758 gen_eob(s);
4759 break;
4760 case 0xc3: /* ret */
4761 gen_pop_T0(s);
4762 gen_pop_update(s);
4763 if (s->dflag == 0)
4764 gen_op_andl_T0_ffff();
4765 gen_op_jmp_T0();
4766 gen_eob(s);
4767 break;
4768 case 0xca: /* lret im */
4769 val = ldsw_code(s->pc);
4770 s->pc += 2;
4771 do_lret:
4772 if (s->pe && !s->vm86) {
4773 if (s->cc_op != CC_OP_DYNAMIC)
4774 gen_op_set_cc_op(s->cc_op);
4775 gen_jmp_im(pc_start - s->cs_base);
4776 gen_op_lret_protected(s->dflag, val);
4777 } else {
4778 gen_stack_A0(s);
4779 /* pop offset */
4780 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4781 if (s->dflag == 0)
4782 gen_op_andl_T0_ffff();
4783 /* NOTE: keeping EIP updated is not a problem in case of
4784 exception */
4785 gen_op_jmp_T0();
4786 /* pop selector */
4787 gen_op_addl_A0_im(2 << s->dflag);
4788 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4789 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4790 /* add stack offset */
4791 gen_stack_update(s, val + (4 << s->dflag));
4792 }
4793 gen_eob(s);
4794 break;
4795 case 0xcb: /* lret */
4796 val = 0;
4797 goto do_lret;
4798 case 0xcf: /* iret */
4799 if (!s->pe) {
4800 /* real mode */
4801 gen_op_iret_real(s->dflag);
4802 s->cc_op = CC_OP_EFLAGS;
4803 } else if (s->vm86) {
4804 if (s->iopl != 3) {
4805 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4806 } else {
4807 gen_op_iret_real(s->dflag);
4808 s->cc_op = CC_OP_EFLAGS;
4809 }
4810 } else {
4811 if (s->cc_op != CC_OP_DYNAMIC)
4812 gen_op_set_cc_op(s->cc_op);
4813 gen_jmp_im(pc_start - s->cs_base);
4814 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4815 s->cc_op = CC_OP_EFLAGS;
4816 }
4817 gen_eob(s);
4818 break;
4819 case 0xe8: /* call im */
4820 {
4821 if (dflag)
4822 tval = (int32_t)insn_get(s, OT_LONG);
4823 else
4824 tval = (int16_t)insn_get(s, OT_WORD);
4825 next_eip = s->pc - s->cs_base;
4826 tval += next_eip;
4827 if (s->dflag == 0)
4828 tval &= 0xffff;
4829 gen_movtl_T0_im(next_eip);
4830 gen_push_T0(s);
4831 gen_jmp(s, tval);
4832 }
4833 break;
4834 case 0x9a: /* lcall im */
4835 {
4836 unsigned int selector, offset;
4837
4838 if (CODE64(s))
4839 goto illegal_op;
4840 ot = dflag ? OT_LONG : OT_WORD;
4841 offset = insn_get(s, ot);
4842 selector = insn_get(s, OT_WORD);
4843
4844 gen_op_movl_T0_im(selector);
4845 gen_op_movl_T1_imu(offset);
4846 }
4847 goto do_lcall;
4848 case 0xe9: /* jmp */
4849 if (dflag)
4850 tval = (int32_t)insn_get(s, OT_LONG);
4851 else
4852 tval = (int16_t)insn_get(s, OT_WORD);
4853 tval += s->pc - s->cs_base;
4854 if (s->dflag == 0)
4855 tval &= 0xffff;
4856 gen_jmp(s, tval);
4857 break;
4858 case 0xea: /* ljmp im */
4859 {
4860 unsigned int selector, offset;
4861
4862 if (CODE64(s))
4863 goto illegal_op;
4864 ot = dflag ? OT_LONG : OT_WORD;
4865 offset = insn_get(s, ot);
4866 selector = insn_get(s, OT_WORD);
4867
4868 gen_op_movl_T0_im(selector);
4869 gen_op_movl_T1_imu(offset);
4870 }
4871 goto do_ljmp;
4872 case 0xeb: /* jmp Jb */
4873 tval = (int8_t)insn_get(s, OT_BYTE);
4874 tval += s->pc - s->cs_base;
4875 if (s->dflag == 0)
4876 tval &= 0xffff;
4877 gen_jmp(s, tval);
4878 break;
4879 case 0x70 ... 0x7f: /* jcc Jb */
4880 tval = (int8_t)insn_get(s, OT_BYTE);
4881 goto do_jcc;
4882 case 0x180 ... 0x18f: /* jcc Jv */
4883 if (dflag) {
4884 tval = (int32_t)insn_get(s, OT_LONG);
4885 } else {
4886 tval = (int16_t)insn_get(s, OT_WORD);
4887 }
4888 do_jcc:
4889 next_eip = s->pc - s->cs_base;
4890 tval += next_eip;
4891 if (s->dflag == 0)
4892 tval &= 0xffff;
4893 gen_jcc(s, b, tval, next_eip);
4894 break;
4895
4896 case 0x190 ... 0x19f: /* setcc Gv */
4897 modrm = ldub_code(s->pc++);
4898 gen_setcc(s, b);
4899 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4900 break;
4901 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4902 ot = dflag + OT_WORD;
4903 modrm = ldub_code(s->pc++);
4904 reg = ((modrm >> 3) & 7) | rex_r;
4905 mod = (modrm >> 6) & 3;
4906 gen_setcc(s, b);
4907 if (mod != 3) {
4908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4909 gen_op_ld_T1_A0[ot + s->mem_index]();
4910 } else {
4911 rm = (modrm & 7) | REX_B(s);
4912 gen_op_mov_TN_reg[ot][1][rm]();
4913 }
4914 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4915 break;
4916
4917 /************************/
4918 /* flags */
4919 case 0x9c: /* pushf */
4920 if (s->vm86 && s->iopl != 3) {
4921 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4922 } else {
4923 if (s->cc_op != CC_OP_DYNAMIC)
4924 gen_op_set_cc_op(s->cc_op);
4925 gen_op_movl_T0_eflags();
4926 gen_push_T0(s);
4927 }
4928 break;
4929 case 0x9d: /* popf */
4930 if (s->vm86 && s->iopl != 3) {
4931 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4932 } else {
4933 gen_pop_T0(s);
4934 if (s->cpl == 0) {
4935 if (s->dflag) {
4936 gen_op_movl_eflags_T0_cpl0();
4937 } else {
4938 gen_op_movw_eflags_T0_cpl0();
4939 }
4940 } else {
4941 if (s->cpl <= s->iopl) {
4942 if (s->dflag) {
4943 gen_op_movl_eflags_T0_io();
4944 } else {
4945 gen_op_movw_eflags_T0_io();
4946 }
4947 } else {
4948 if (s->dflag) {
4949 gen_op_movl_eflags_T0();
4950 } else {
4951 gen_op_movw_eflags_T0();
4952 }
4953 }
4954 }
4955 gen_pop_update(s);
4956 s->cc_op = CC_OP_EFLAGS;
4957 /* abort translation because TF flag may change */
4958 gen_jmp_im(s->pc - s->cs_base);
4959 gen_eob(s);
4960 }
4961 break;
4962 case 0x9e: /* sahf */
4963 if (CODE64(s))
4964 goto illegal_op;
4965 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4966 if (s->cc_op != CC_OP_DYNAMIC)
4967 gen_op_set_cc_op(s->cc_op);
4968 gen_op_movb_eflags_T0();
4969 s->cc_op = CC_OP_EFLAGS;
4970 break;
4971 case 0x9f: /* lahf */
4972 if (CODE64(s))
4973 goto illegal_op;
4974 if (s->cc_op != CC_OP_DYNAMIC)
4975 gen_op_set_cc_op(s->cc_op);
4976 gen_op_movl_T0_eflags();
4977 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
4978 break;
4979 case 0xf5: /* cmc */
4980 if (s->cc_op != CC_OP_DYNAMIC)
4981 gen_op_set_cc_op(s->cc_op);
4982 gen_op_cmc();
4983 s->cc_op = CC_OP_EFLAGS;
4984 break;
4985 case 0xf8: /* clc */
4986 if (s->cc_op != CC_OP_DYNAMIC)
4987 gen_op_set_cc_op(s->cc_op);
4988 gen_op_clc();
4989 s->cc_op = CC_OP_EFLAGS;
4990 break;
4991 case 0xf9: /* stc */
4992 if (s->cc_op != CC_OP_DYNAMIC)
4993 gen_op_set_cc_op(s->cc_op);
4994 gen_op_stc();
4995 s->cc_op = CC_OP_EFLAGS;
4996 break;
4997 case 0xfc: /* cld */
4998 gen_op_cld();
4999 break;
5000 case 0xfd: /* std */
5001 gen_op_std();
5002 break;
5003
5004 /************************/
5005 /* bit operations */
5006 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5007 ot = dflag + OT_WORD;
5008 modrm = ldub_code(s->pc++);
5009 op = ((modrm >> 3) & 7) | rex_r;
5010 mod = (modrm >> 6) & 3;
5011 rm = (modrm & 7) | REX_B(s);
5012 if (mod != 3) {
5013 s->rip_offset = 1;
5014 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5015 gen_op_ld_T0_A0[ot + s->mem_index]();
5016 } else {
5017 gen_op_mov_TN_reg[ot][0][rm]();
5018 }
5019 /* load shift */
5020 val = ldub_code(s->pc++);
5021 gen_op_movl_T1_im(val);
5022 if (op < 4)
5023 goto illegal_op;
5024 op -= 4;
5025 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5026 s->cc_op = CC_OP_SARB + ot;
5027 if (op != 0) {
5028 if (mod != 3)
5029 gen_op_st_T0_A0[ot + s->mem_index]();
5030 else
5031 gen_op_mov_reg_T0[ot][rm]();
5032 gen_op_update_bt_cc();
5033 }
5034 break;
5035 case 0x1a3: /* bt Gv, Ev */
5036 op = 0;
5037 goto do_btx;
5038 case 0x1ab: /* bts */
5039 op = 1;
5040 goto do_btx;
5041 case 0x1b3: /* btr */
5042 op = 2;
5043 goto do_btx;
5044 case 0x1bb: /* btc */
5045 op = 3;
5046 do_btx:
5047 ot = dflag + OT_WORD;
5048 modrm = ldub_code(s->pc++);
5049 reg = ((modrm >> 3) & 7) | rex_r;
5050 mod = (modrm >> 6) & 3;
5051 rm = (modrm & 7) | REX_B(s);
5052 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5053 if (mod != 3) {
5054 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5055 /* specific case: we need to add a displacement */
5056 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5057 gen_op_ld_T0_A0[ot + s->mem_index]();
5058 } else {
5059 gen_op_mov_TN_reg[ot][0][rm]();
5060 }
5061 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5062 s->cc_op = CC_OP_SARB + ot;
5063 if (op != 0) {
5064 if (mod != 3)
5065 gen_op_st_T0_A0[ot + s->mem_index]();
5066 else
5067 gen_op_mov_reg_T0[ot][rm]();
5068 gen_op_update_bt_cc();
5069 }
5070 break;
5071 case 0x1bc: /* bsf */
5072 case 0x1bd: /* bsr */
5073 ot = dflag + OT_WORD;
5074 modrm = ldub_code(s->pc++);
5075 reg = ((modrm >> 3) & 7) | rex_r;
5076 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5077 /* NOTE: in order to handle the 0 case, we must load the
5078 result. It could be optimized with a generated jump */
5079 gen_op_mov_TN_reg[ot][1][reg]();
5080 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5081 gen_op_mov_reg_T1[ot][reg]();
5082 s->cc_op = CC_OP_LOGICB + ot;
5083 break;
5084 /************************/
5085 /* bcd */
5086 case 0x27: /* daa */
5087 if (CODE64(s))
5088 goto illegal_op;
5089 if (s->cc_op != CC_OP_DYNAMIC)
5090 gen_op_set_cc_op(s->cc_op);
5091 gen_op_daa();
5092 s->cc_op = CC_OP_EFLAGS;
5093 break;
5094 case 0x2f: /* das */
5095 if (CODE64(s))
5096 goto illegal_op;
5097 if (s->cc_op != CC_OP_DYNAMIC)
5098 gen_op_set_cc_op(s->cc_op);
5099 gen_op_das();
5100 s->cc_op = CC_OP_EFLAGS;
5101 break;
5102 case 0x37: /* aaa */
5103 if (CODE64(s))
5104 goto illegal_op;
5105 if (s->cc_op != CC_OP_DYNAMIC)
5106 gen_op_set_cc_op(s->cc_op);
5107 gen_op_aaa();
5108 s->cc_op = CC_OP_EFLAGS;
5109 break;
5110 case 0x3f: /* aas */
5111 if (CODE64(s))
5112 goto illegal_op;
5113 if (s->cc_op != CC_OP_DYNAMIC)
5114 gen_op_set_cc_op(s->cc_op);
5115 gen_op_aas();
5116 s->cc_op = CC_OP_EFLAGS;
5117 break;
5118 case 0xd4: /* aam */
5119 if (CODE64(s))
5120 goto illegal_op;
5121 val = ldub_code(s->pc++);
5122 gen_op_aam(val);
5123 s->cc_op = CC_OP_LOGICB;
5124 break;
5125 case 0xd5: /* aad */
5126 if (CODE64(s))
5127 goto illegal_op;
5128 val = ldub_code(s->pc++);
5129 gen_op_aad(val);
5130 s->cc_op = CC_OP_LOGICB;
5131 break;
5132 /************************/
5133 /* misc */
5134 case 0x90: /* nop */
5135 /* XXX: xchg + rex handling */
5136 /* XXX: correct lock test for all insn */
5137 if (prefixes & PREFIX_LOCK)
5138 goto illegal_op;
5139 break;
5140 case 0x9b: /* fwait */
5141 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5142 (HF_MP_MASK | HF_TS_MASK)) {
5143 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5144 } else {
5145 if (s->cc_op != CC_OP_DYNAMIC)
5146 gen_op_set_cc_op(s->cc_op);
5147 gen_jmp_im(pc_start - s->cs_base);
5148 gen_op_fwait();
5149 }
5150 break;
5151 case 0xcc: /* int3 */
5152 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5153 break;
5154 case 0xcd: /* int N */
5155 val = ldub_code(s->pc++);
5156 if (s->vm86 && s->iopl != 3) {
5157 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5158 } else {
5159 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5160 }
5161 break;
5162 case 0xce: /* into */
5163 if (CODE64(s))
5164 goto illegal_op;
5165 if (s->cc_op != CC_OP_DYNAMIC)
5166 gen_op_set_cc_op(s->cc_op);
5167 gen_jmp_im(pc_start - s->cs_base);
5168 gen_op_into(s->pc - pc_start);
5169 break;
5170 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5171 gen_debug(s, pc_start - s->cs_base);
5172 break;
5173 case 0xfa: /* cli */
5174 if (!s->vm86) {
5175 if (s->cpl <= s->iopl) {
5176 gen_op_cli();
5177 } else {
5178 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5179 }
5180 } else {
5181 if (s->iopl == 3) {
5182 gen_op_cli();
5183 } else {
5184 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5185 }
5186 }
5187 break;
5188 case 0xfb: /* sti */
5189 if (!s->vm86) {
5190 if (s->cpl <= s->iopl) {
5191 gen_sti:
5192 gen_op_sti();
5193 /* interruptions are enabled only the first insn after sti */
5194 /* If several instructions disable interrupts, only the
5195 _first_ does it */
5196 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5197 gen_op_set_inhibit_irq();
5198 /* give a chance to handle pending irqs */
5199 gen_jmp_im(s->pc - s->cs_base);
5200 gen_eob(s);
5201 } else {
5202 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5203 }
5204 } else {
5205 if (s->iopl == 3) {
5206 goto gen_sti;
5207 } else {
5208 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5209 }
5210 }
5211 break;
5212 case 0x62: /* bound */
5213 if (CODE64(s))
5214 goto illegal_op;
5215 ot = dflag ? OT_LONG : OT_WORD;
5216 modrm = ldub_code(s->pc++);
5217 reg = (modrm >> 3) & 7;
5218 mod = (modrm >> 6) & 3;
5219 if (mod == 3)
5220 goto illegal_op;
5221 gen_op_mov_TN_reg[ot][0][reg]();
5222 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5223 gen_jmp_im(pc_start - s->cs_base);
5224 if (ot == OT_WORD)
5225 gen_op_boundw();
5226 else
5227 gen_op_boundl();
5228 break;
5229 case 0x1c8 ... 0x1cf: /* bswap reg */
5230 reg = (b & 7) | REX_B(s);
5231 #ifdef TARGET_X86_64
5232 if (dflag == 2) {
5233 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5234 gen_op_bswapq_T0();
5235 gen_op_mov_reg_T0[OT_QUAD][reg]();
5236 } else
5237 #endif
5238 {
5239 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5240 gen_op_bswapl_T0();
5241 gen_op_mov_reg_T0[OT_LONG][reg]();
5242 }
5243 break;
5244 case 0xd6: /* salc */
5245 if (CODE64(s))
5246 goto illegal_op;
5247 if (s->cc_op != CC_OP_DYNAMIC)
5248 gen_op_set_cc_op(s->cc_op);
5249 gen_op_salc();
5250 break;
5251 case 0xe0: /* loopnz */
5252 case 0xe1: /* loopz */
5253 if (s->cc_op != CC_OP_DYNAMIC)
5254 gen_op_set_cc_op(s->cc_op);
5255 /* FALL THRU */
5256 case 0xe2: /* loop */
5257 case 0xe3: /* jecxz */
5258 {
5259 int l1, l2;
5260
5261 tval = (int8_t)insn_get(s, OT_BYTE);
5262 next_eip = s->pc - s->cs_base;
5263 tval += next_eip;
5264 if (s->dflag == 0)
5265 tval &= 0xffff;
5266
5267 l1 = gen_new_label();
5268 l2 = gen_new_label();
5269 b &= 3;
5270 if (b == 3) {
5271 gen_op_jz_ecx[s->aflag](l1);
5272 } else {
5273 gen_op_dec_ECX[s->aflag]();
5274 if (b <= 1)
5275 gen_op_mov_T0_cc();
5276 gen_op_loop[s->aflag][b](l1);
5277 }
5278
5279 gen_jmp_im(next_eip);
5280 gen_op_jmp_label(l2);
5281 gen_set_label(l1);
5282 gen_jmp_im(tval);
5283 gen_set_label(l2);
5284 gen_eob(s);
5285 }
5286 break;
5287 case 0x130: /* wrmsr */
5288 case 0x132: /* rdmsr */
5289 if (s->cpl != 0) {
5290 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5291 } else {
5292 if (b & 2)
5293 gen_op_rdmsr();
5294 else
5295 gen_op_wrmsr();
5296 }
5297 break;
5298 case 0x131: /* rdtsc */
5299 gen_op_rdtsc();
5300 break;
5301 case 0x134: /* sysenter */
5302 if (CODE64(s))
5303 goto illegal_op;
5304 if (!s->pe) {
5305 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5306 } else {
5307 if (s->cc_op != CC_OP_DYNAMIC) {
5308 gen_op_set_cc_op(s->cc_op);
5309 s->cc_op = CC_OP_DYNAMIC;
5310 }
5311 gen_jmp_im(pc_start - s->cs_base);
5312 gen_op_sysenter();
5313 gen_eob(s);
5314 }
5315 break;
5316 case 0x135: /* sysexit */
5317 if (CODE64(s))
5318 goto illegal_op;
5319 if (!s->pe) {
5320 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5321 } else {
5322 if (s->cc_op != CC_OP_DYNAMIC) {
5323 gen_op_set_cc_op(s->cc_op);
5324 s->cc_op = CC_OP_DYNAMIC;
5325 }
5326 gen_jmp_im(pc_start - s->cs_base);
5327 gen_op_sysexit();
5328 gen_eob(s);
5329 }
5330 break;
5331 #ifdef TARGET_X86_64
5332 case 0x105: /* syscall */
5333 /* XXX: is it usable in real mode ? */
5334 if (s->cc_op != CC_OP_DYNAMIC) {
5335 gen_op_set_cc_op(s->cc_op);
5336 s->cc_op = CC_OP_DYNAMIC;
5337 }
5338 gen_jmp_im(pc_start - s->cs_base);
5339 gen_op_syscall(s->pc - pc_start);
5340 gen_eob(s);
5341 break;
5342 case 0x107: /* sysret */
5343 if (!s->pe) {
5344 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5345 } else {
5346 if (s->cc_op != CC_OP_DYNAMIC) {
5347 gen_op_set_cc_op(s->cc_op);
5348 s->cc_op = CC_OP_DYNAMIC;
5349 }
5350 gen_jmp_im(pc_start - s->cs_base);
5351 gen_op_sysret(s->dflag);
5352 gen_eob(s);
5353 }
5354 break;
5355 #endif
5356 case 0x1a2: /* cpuid */
5357 gen_op_cpuid();
5358 break;
5359 case 0xf4: /* hlt */
5360 if (s->cpl != 0) {
5361 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5362 } else {
5363 if (s->cc_op != CC_OP_DYNAMIC)
5364 gen_op_set_cc_op(s->cc_op);
5365 gen_jmp_im(s->pc - s->cs_base);
5366 gen_op_hlt();
5367 s->is_jmp = 3;
5368 }
5369 break;
5370 case 0x100:
5371 modrm = ldub_code(s->pc++);
5372 mod = (modrm >> 6) & 3;
5373 op = (modrm >> 3) & 7;
5374 switch(op) {
5375 case 0: /* sldt */
5376 if (!s->pe || s->vm86)
5377 goto illegal_op;
5378 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5379 ot = OT_WORD;
5380 if (mod == 3)
5381 ot += s->dflag;
5382 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5383 break;
5384 case 2: /* lldt */
5385 if (!s->pe || s->vm86)
5386 goto illegal_op;
5387 if (s->cpl != 0) {
5388 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5389 } else {
5390 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5391 gen_jmp_im(pc_start - s->cs_base);
5392 gen_op_lldt_T0();
5393 }
5394 break;
5395 case 1: /* str */
5396 if (!s->pe || s->vm86)
5397 goto illegal_op;
5398 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5399 ot = OT_WORD;
5400 if (mod == 3)
5401 ot += s->dflag;
5402 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5403 break;
5404 case 3: /* ltr */
5405 if (!s->pe || s->vm86)
5406 goto illegal_op;
5407 if (s->cpl != 0) {
5408 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5409 } else {
5410 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5411 gen_jmp_im(pc_start - s->cs_base);
5412 gen_op_ltr_T0();
5413 }
5414 break;
5415 case 4: /* verr */
5416 case 5: /* verw */
5417 if (!s->pe || s->vm86)
5418 goto illegal_op;
5419 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5420 if (s->cc_op != CC_OP_DYNAMIC)
5421 gen_op_set_cc_op(s->cc_op);
5422 if (op == 4)
5423 gen_op_verr();
5424 else
5425 gen_op_verw();
5426 s->cc_op = CC_OP_EFLAGS;
5427 break;
5428 default:
5429 goto illegal_op;
5430 }
5431 break;
5432 case 0x101:
5433 modrm = ldub_code(s->pc++);
5434 mod = (modrm >> 6) & 3;
5435 op = (modrm >> 3) & 7;
5436 switch(op) {
5437 case 0: /* sgdt */
5438 case 1: /* sidt */
5439 if (mod == 3)
5440 goto illegal_op;
5441 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5442 if (op == 0)
5443 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
5444 else
5445 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
5446 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5447 #ifdef TARGET_X86_64
5448 if (CODE64(s))
5449 gen_op_addq_A0_im(2);
5450 else
5451 #endif
5452 gen_op_addl_A0_im(2);
5453 if (op == 0)
5454 gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
5455 else
5456 gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
5457 if (!s->dflag)
5458 gen_op_andl_T0_im(0xffffff);
5459 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5460 break;
5461 case 2: /* lgdt */
5462 case 3: /* lidt */
5463 if (mod == 3)
5464 goto illegal_op;
5465 if (s->cpl != 0) {
5466 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5467 } else {
5468 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5469 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5470 #ifdef TARGET_X86_64
5471 if (CODE64(s))
5472 gen_op_addq_A0_im(2);
5473 else
5474 #endif
5475 gen_op_addl_A0_im(2);
5476 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5477 if (!s->dflag)
5478 gen_op_andl_T0_im(0xffffff);
5479 if (op == 2) {
5480 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5481 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5482 } else {
5483 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5484 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5485 }
5486 }
5487 break;
5488 case 4: /* smsw */
5489 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5490 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5491 break;
5492 case 6: /* lmsw */
5493 if (s->cpl != 0) {
5494 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5495 } else {
5496 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5497 gen_op_lmsw_T0();
5498 gen_jmp_im(s->pc - s->cs_base);
5499 gen_eob(s);
5500 }
5501 break;
5502 case 7: /* invlpg */
5503 if (s->cpl != 0) {
5504 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5505 } else {
5506 if (mod == 3) {
5507 #ifdef TARGET_X86_64
5508 if (CODE64(s) && (modrm & 7) == 0) {
5509 /* swapgs */
5510 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5511 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5512 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5513 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5514 } else
5515 #endif
5516 {
5517 goto illegal_op;
5518 }
5519 } else {
5520 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5521 gen_op_invlpg_A0();
5522 gen_jmp_im(s->pc - s->cs_base);
5523 gen_eob(s);
5524 }
5525 }
5526 break;
5527 default:
5528 goto illegal_op;
5529 }
5530 break;
5531 case 0x108: /* invd */
5532 case 0x109: /* wbinvd */
5533 if (s->cpl != 0) {
5534 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5535 } else {
5536 /* nothing to do */
5537 }
5538 break;
5539 case 0x63: /* arpl or movslS (x86_64) */
5540 #ifdef TARGET_X86_64
5541 if (CODE64(s)) {
5542 int d_ot;
5543 /* d_ot is the size of destination */
5544 d_ot = dflag + OT_WORD;
5545
5546 modrm = ldub_code(s->pc++);
5547 reg = ((modrm >> 3) & 7) | rex_r;
5548 mod = (modrm >> 6) & 3;
5549 rm = (modrm & 7) | REX_B(s);
5550
5551 if (mod == 3) {
5552 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5553 /* sign extend */
5554 if (d_ot == OT_QUAD)
5555 gen_op_movslq_T0_T0();
5556 gen_op_mov_reg_T0[d_ot][reg]();
5557 } else {
5558 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5559 if (d_ot == OT_QUAD) {
5560 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5561 } else {
5562 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5563 }
5564 gen_op_mov_reg_T0[d_ot][reg]();
5565 }
5566 } else
5567 #endif
5568 {
5569 if (!s->pe || s->vm86)
5570 goto illegal_op;
5571 ot = dflag ? OT_LONG : OT_WORD;
5572 modrm = ldub_code(s->pc++);
5573 reg = (modrm >> 3) & 7;
5574 mod = (modrm >> 6) & 3;
5575 rm = modrm & 7;
5576 if (mod != 3) {
5577 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5578 gen_op_ld_T0_A0[ot + s->mem_index]();
5579 } else {
5580 gen_op_mov_TN_reg[ot][0][rm]();
5581 }
5582 if (s->cc_op != CC_OP_DYNAMIC)
5583 gen_op_set_cc_op(s->cc_op);
5584 gen_op_arpl();
5585 s->cc_op = CC_OP_EFLAGS;
5586 if (mod != 3) {
5587 gen_op_st_T0_A0[ot + s->mem_index]();
5588 } else {
5589 gen_op_mov_reg_T0[ot][rm]();
5590 }
5591 gen_op_arpl_update();
5592 }
5593 break;
5594 case 0x102: /* lar */
5595 case 0x103: /* lsl */
5596 if (!s->pe || s->vm86)
5597 goto illegal_op;
5598 ot = dflag ? OT_LONG : OT_WORD;
5599 modrm = ldub_code(s->pc++);
5600 reg = ((modrm >> 3) & 7) | rex_r;
5601 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5602 gen_op_mov_TN_reg[ot][1][reg]();
5603 if (s->cc_op != CC_OP_DYNAMIC)
5604 gen_op_set_cc_op(s->cc_op);
5605 if (b == 0x102)
5606 gen_op_lar();
5607 else
5608 gen_op_lsl();
5609 s->cc_op = CC_OP_EFLAGS;
5610 gen_op_mov_reg_T1[ot][reg]();
5611 break;
5612 case 0x118:
5613 modrm = ldub_code(s->pc++);
5614 mod = (modrm >> 6) & 3;
5615 op = (modrm >> 3) & 7;
5616 switch(op) {
5617 case 0: /* prefetchnta */
5618 case 1: /* prefetchnt0 */
5619 case 2: /* prefetchnt0 */
5620 case 3: /* prefetchnt0 */
5621 if (mod == 3)
5622 goto illegal_op;
5623 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5624 /* nothing more to do */
5625 break;
5626 default:
5627 goto illegal_op;
5628 }
5629 break;
5630 case 0x120: /* mov reg, crN */
5631 case 0x122: /* mov crN, reg */
5632 if (s->cpl != 0) {
5633 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5634 } else {
5635 modrm = ldub_code(s->pc++);
5636 if ((modrm & 0xc0) != 0xc0)
5637 goto illegal_op;
5638 rm = (modrm & 7) | REX_B(s);
5639 reg = ((modrm >> 3) & 7) | rex_r;
5640 if (CODE64(s))
5641 ot = OT_QUAD;
5642 else
5643 ot = OT_LONG;
5644 switch(reg) {
5645 case 0:
5646 case 2:
5647 case 3:
5648 case 4:
5649 case 8:
5650 if (b & 2) {
5651 gen_op_mov_TN_reg[ot][0][rm]();
5652 gen_op_movl_crN_T0(reg);
5653 gen_jmp_im(s->pc - s->cs_base);
5654 gen_eob(s);
5655 } else {
5656 #if !defined(CONFIG_USER_ONLY)
5657 if (reg == 8)
5658 gen_op_movtl_T0_cr8();
5659 else
5660 #endif
5661 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5662 gen_op_mov_reg_T0[ot][rm]();
5663 }
5664 break;
5665 default:
5666 goto illegal_op;
5667 }
5668 }
5669 break;
5670 case 0x121: /* mov reg, drN */
5671 case 0x123: /* mov drN, reg */
5672 if (s->cpl != 0) {
5673 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5674 } else {
5675 modrm = ldub_code(s->pc++);
5676 if ((modrm & 0xc0) != 0xc0)
5677 goto illegal_op;
5678 rm = (modrm & 7) | REX_B(s);
5679 reg = ((modrm >> 3) & 7) | rex_r;
5680 if (CODE64(s))
5681 ot = OT_QUAD;
5682 else
5683 ot = OT_LONG;
5684 /* XXX: do it dynamically with CR4.DE bit */
5685 if (reg == 4 || reg == 5 || reg >= 8)
5686 goto illegal_op;
5687 if (b & 2) {
5688 gen_op_mov_TN_reg[ot][0][rm]();
5689 gen_op_movl_drN_T0(reg);
5690 gen_jmp_im(s->pc - s->cs_base);
5691 gen_eob(s);
5692 } else {
5693 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5694 gen_op_mov_reg_T0[ot][rm]();
5695 }
5696 }
5697 break;
5698 case 0x106: /* clts */
5699 if (s->cpl != 0) {
5700 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5701 } else {
5702 gen_op_clts();
5703 /* abort block because static cpu state changed */
5704 gen_jmp_im(s->pc - s->cs_base);
5705 gen_eob(s);
5706 }
5707 break;
5708 /* MMX/SSE/SSE2/PNI support */
5709 case 0x1c3: /* MOVNTI reg, mem */
5710 if (!(s->cpuid_features & CPUID_SSE2))
5711 goto illegal_op;
5712 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5713 modrm = ldub_code(s->pc++);
5714 mod = (modrm >> 6) & 3;
5715 if (mod == 3)
5716 goto illegal_op;
5717 reg = ((modrm >> 3) & 7) | rex_r;
5718 /* generate a generic store */
5719 gen_ldst_modrm(s, modrm, ot, reg, 1);
5720 break;
5721 case 0x1ae:
5722 modrm = ldub_code(s->pc++);
5723 mod = (modrm >> 6) & 3;
5724 op = (modrm >> 3) & 7;
5725 switch(op) {
5726 case 0: /* fxsave */
5727 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5728 goto illegal_op;
5729 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5730 gen_op_fxsave_A0((s->dflag == 2));
5731 break;
5732 case 1: /* fxrstor */
5733 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5734 goto illegal_op;
5735 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5736 gen_op_fxrstor_A0((s->dflag == 2));
5737 break;
5738 case 2: /* ldmxcsr */
5739 case 3: /* stmxcsr */
5740 if (s->flags & HF_TS_MASK) {
5741 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5742 break;
5743 }
5744 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5745 mod == 3)
5746 goto illegal_op;
5747 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5748 if (op == 2) {
5749 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5750 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5751 } else {
5752 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5753 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5754 }
5755 break;
5756 case 5: /* lfence */
5757 case 6: /* mfence */
5758 case 7: /* sfence */
5759 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5760 goto illegal_op;
5761 break;
5762 default:
5763 goto illegal_op;
5764 }
5765 break;
5766 case 0x110 ... 0x117:
5767 case 0x128 ... 0x12f:
5768 case 0x150 ... 0x177:
5769 case 0x17c ... 0x17f:
5770 case 0x1c2:
5771 case 0x1c4 ... 0x1c6:
5772 case 0x1d0 ... 0x1fe:
5773 gen_sse(s, b, pc_start, rex_r);
5774 break;
5775 default:
5776 goto illegal_op;
5777 }
5778 /* lock generation */
5779 if (s->prefix & PREFIX_LOCK)
5780 gen_op_unlock();
5781 return s->pc;
5782 illegal_op:
5783 if (s->prefix & PREFIX_LOCK)
5784 gen_op_unlock();
5785 /* XXX: ensure that no lock was generated */
5786 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5787 return s->pc;
5788 }
5789
5790 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5791 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5792
5793 /* flags read by an operation */
5794 static uint16_t opc_read_flags[NB_OPS] = {
5795 [INDEX_op_aas] = CC_A,
5796 [INDEX_op_aaa] = CC_A,
5797 [INDEX_op_das] = CC_A | CC_C,
5798 [INDEX_op_daa] = CC_A | CC_C,
5799
5800 /* subtle: due to the incl/decl implementation, C is used */
5801 [INDEX_op_update_inc_cc] = CC_C,
5802
5803 [INDEX_op_into] = CC_O,
5804
5805 [INDEX_op_jb_subb] = CC_C,
5806 [INDEX_op_jb_subw] = CC_C,
5807 [INDEX_op_jb_subl] = CC_C,
5808
5809 [INDEX_op_jz_subb] = CC_Z,
5810 [INDEX_op_jz_subw] = CC_Z,
5811 [INDEX_op_jz_subl] = CC_Z,
5812
5813 [INDEX_op_jbe_subb] = CC_Z | CC_C,
5814 [INDEX_op_jbe_subw] = CC_Z | CC_C,
5815 [INDEX_op_jbe_subl] = CC_Z | CC_C,
5816
5817 [INDEX_op_js_subb] = CC_S,
5818 [INDEX_op_js_subw] = CC_S,
5819 [INDEX_op_js_subl] = CC_S,
5820
5821 [INDEX_op_jl_subb] = CC_O | CC_S,
5822 [INDEX_op_jl_subw] = CC_O | CC_S,
5823 [INDEX_op_jl_subl] = CC_O | CC_S,
5824
5825 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5826 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5827 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5828
5829 [INDEX_op_loopnzw] = CC_Z,
5830 [INDEX_op_loopnzl] = CC_Z,
5831 [INDEX_op_loopzw] = CC_Z,
5832 [INDEX_op_loopzl] = CC_Z,
5833
5834 [INDEX_op_seto_T0_cc] = CC_O,
5835 [INDEX_op_setb_T0_cc] = CC_C,
5836 [INDEX_op_setz_T0_cc] = CC_Z,
5837 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5838 [INDEX_op_sets_T0_cc] = CC_S,
5839 [INDEX_op_setp_T0_cc] = CC_P,
5840 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5841 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5842
5843 [INDEX_op_setb_T0_subb] = CC_C,
5844 [INDEX_op_setb_T0_subw] = CC_C,
5845 [INDEX_op_setb_T0_subl] = CC_C,
5846
5847 [INDEX_op_setz_T0_subb] = CC_Z,
5848 [INDEX_op_setz_T0_subw] = CC_Z,
5849 [INDEX_op_setz_T0_subl] = CC_Z,
5850
5851 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5852 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5853 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5854
5855 [INDEX_op_sets_T0_subb] = CC_S,
5856 [INDEX_op_sets_T0_subw] = CC_S,
5857 [INDEX_op_sets_T0_subl] = CC_S,
5858
5859 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5860 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5861 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5862
5863 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5864 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5865 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5866
5867 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5868 [INDEX_op_cmc] = CC_C,
5869 [INDEX_op_salc] = CC_C,
5870
5871 /* needed for correct flag optimisation before string ops */
5872 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5873 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5874 [INDEX_op_jz_ecxw] = CC_OSZAPC,
5875 [INDEX_op_jz_ecxl] = CC_OSZAPC,
5876
5877 #ifdef TARGET_X86_64
5878 [INDEX_op_jb_subq] = CC_C,
5879 [INDEX_op_jz_subq] = CC_Z,
5880 [INDEX_op_jbe_subq] = CC_Z | CC_C,
5881 [INDEX_op_js_subq] = CC_S,
5882 [INDEX_op_jl_subq] = CC_O | CC_S,
5883 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5884
5885 [INDEX_op_loopnzq] = CC_Z,
5886 [INDEX_op_loopzq] = CC_Z,
5887
5888 [INDEX_op_setb_T0_subq] = CC_C,
5889 [INDEX_op_setz_T0_subq] = CC_Z,
5890 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5891 [INDEX_op_sets_T0_subq] = CC_S,
5892 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5893 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5894
5895 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5896 [INDEX_op_jz_ecxq] = CC_OSZAPC,
5897 #endif
5898
5899 #define DEF_READF(SUFFIX)\
5900 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5901 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5902 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5903 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5904 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5905 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5906 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5907 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5908 \
5909 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5910 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5911 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5912 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5913 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5914 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5915 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5916 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5917
5918 DEF_READF( )
5919 DEF_READF(_raw)
5920 #ifndef CONFIG_USER_ONLY
5921 DEF_READF(_kernel)
5922 DEF_READF(_user)
5923 #endif
5924 };
5925
5926 /* flags written by an operation */
5927 static uint16_t opc_write_flags[NB_OPS] = {
5928 [INDEX_op_update2_cc] = CC_OSZAPC,
5929 [INDEX_op_update1_cc] = CC_OSZAPC,
5930 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5931 [INDEX_op_update_neg_cc] = CC_OSZAPC,
5932 /* subtle: due to the incl/decl implementation, C is used */
5933 [INDEX_op_update_inc_cc] = CC_OSZAPC,
5934 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5935
5936 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5937 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5938 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5939 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5940 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5941 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5942 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5943 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5944 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5945 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5946 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5947
5948 /* sse */
5949 [INDEX_op_ucomiss] = CC_OSZAPC,
5950 [INDEX_op_ucomisd] = CC_OSZAPC,
5951 [INDEX_op_comiss] = CC_OSZAPC,
5952 [INDEX_op_comisd] = CC_OSZAPC,
5953
5954 /* bcd */
5955 [INDEX_op_aam] = CC_OSZAPC,
5956 [INDEX_op_aad] = CC_OSZAPC,
5957 [INDEX_op_aas] = CC_OSZAPC,
5958 [INDEX_op_aaa] = CC_OSZAPC,
5959 [INDEX_op_das] = CC_OSZAPC,
5960 [INDEX_op_daa] = CC_OSZAPC,
5961
5962 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5963 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5964 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5965 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5966 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5967 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5968 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5969 [INDEX_op_clc] = CC_C,
5970 [INDEX_op_stc] = CC_C,
5971 [INDEX_op_cmc] = CC_C,
5972
5973 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
5974 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
5975 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
5976 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
5977 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
5978 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
5979 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
5980 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
5981 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
5982 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
5983 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
5984 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
5985
5986 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
5987 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
5988 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
5989 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
5990 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
5991 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
5992
5993 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
5994 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
5995 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
5996 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
5997
5998 [INDEX_op_cmpxchg8b] = CC_Z,
5999 [INDEX_op_lar] = CC_Z,
6000 [INDEX_op_lsl] = CC_Z,
6001 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6002 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6003
6004 #define DEF_WRITEF(SUFFIX)\
6005 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6006 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6007 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6008 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6009 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6010 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6011 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6012 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6013 \
6014 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6015 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6016 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6017 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6018 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6019 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6020 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6021 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6022 \
6023 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6024 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6025 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6026 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6027 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6028 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6029 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6030 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6031 \
6032 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6033 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6034 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6035 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6036 \
6037 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6038 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6039 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6040 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6041 \
6042 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6043 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6044 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6045 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6046 \
6047 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6048 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6049 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6050 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6051 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6052 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6053 \
6054 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6055 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6056 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6057 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6058 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6059 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6060 \
6061 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6062 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6063 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6064 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6065
6066
6067 DEF_WRITEF( )
6068 DEF_WRITEF(_raw)
6069 #ifndef CONFIG_USER_ONLY
6070 DEF_WRITEF(_kernel)
6071 DEF_WRITEF(_user)
6072 #endif
6073 };
6074
6075 /* simpler form of an operation if no flags need to be generated */
6076 static uint16_t opc_simpler[NB_OPS] = {
6077 [INDEX_op_update2_cc] = INDEX_op_nop,
6078 [INDEX_op_update1_cc] = INDEX_op_nop,
6079 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6080 #if 0
6081 /* broken: CC_OP logic must be rewritten */
6082 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6083 #endif
6084
6085 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6086 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6087 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6088 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6089
6090 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6091 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6092 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6093 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6094
6095 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6096 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6097 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6098 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6099
6100 #define DEF_SIMPLER(SUFFIX)\
6101 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6102 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6103 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6104 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6105 \
6106 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6107 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6108 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6109 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6110
6111 DEF_SIMPLER( )
6112 DEF_SIMPLER(_raw)
6113 #ifndef CONFIG_USER_ONLY
6114 DEF_SIMPLER(_kernel)
6115 DEF_SIMPLER(_user)
6116 #endif
6117 };
6118
6119 void optimize_flags_init(void)
6120 {
6121 int i;
6122 /* put default values in arrays */
6123 for(i = 0; i < NB_OPS; i++) {
6124 if (opc_simpler[i] == 0)
6125 opc_simpler[i] = i;
6126 }
6127 }
6128
6129 /* CPU flags computation optimization: we move backward thru the
6130 generated code to see which flags are needed. The operation is
6131 modified if suitable */
6132 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6133 {
6134 uint16_t *opc_ptr;
6135 int live_flags, write_flags, op;
6136
6137 opc_ptr = opc_buf + opc_buf_len;
6138 /* live_flags contains the flags needed by the next instructions
6139 in the code. At the end of the bloc, we consider that all the
6140 flags are live. */
6141 live_flags = CC_OSZAPC;
6142 while (opc_ptr > opc_buf) {
6143 op = *--opc_ptr;
6144 /* if none of the flags written by the instruction is used,
6145 then we can try to find a simpler instruction */
6146 write_flags = opc_write_flags[op];
6147 if ((live_flags & write_flags) == 0) {
6148 *opc_ptr = opc_simpler[op];
6149 }
6150 /* compute the live flags before the instruction */
6151 live_flags &= ~write_flags;
6152 live_flags |= opc_read_flags[op];
6153 }
6154 }
6155
6156 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6157 basic block 'tb'. If search_pc is TRUE, also generate PC
6158 information for each intermediate instruction. */
6159 static inline int gen_intermediate_code_internal(CPUState *env,
6160 TranslationBlock *tb,
6161 int search_pc)
6162 {
6163 DisasContext dc1, *dc = &dc1;
6164 target_ulong pc_ptr;
6165 uint16_t *gen_opc_end;
6166 int flags, j, lj, cflags;
6167 target_ulong pc_start;
6168 target_ulong cs_base;
6169
6170 /* generate intermediate code */
6171 pc_start = tb->pc;
6172 cs_base = tb->cs_base;
6173 flags = tb->flags;
6174 cflags = tb->cflags;
6175
6176 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6177 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6178 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6179 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6180 dc->f_st = 0;
6181 dc->vm86 = (flags >> VM_SHIFT) & 1;
6182 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6183 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6184 dc->tf = (flags >> TF_SHIFT) & 1;
6185 dc->singlestep_enabled = env->singlestep_enabled;
6186 dc->cc_op = CC_OP_DYNAMIC;
6187 dc->cs_base = cs_base;
6188 dc->tb = tb;
6189 dc->popl_esp_hack = 0;
6190 /* select memory access functions */
6191 dc->mem_index = 0;
6192 if (flags & HF_SOFTMMU_MASK) {
6193 if (dc->cpl == 3)
6194 dc->mem_index = 2 * 4;
6195 else
6196 dc->mem_index = 1 * 4;
6197 }
6198 dc->cpuid_features = env->cpuid_features;
6199 #ifdef TARGET_X86_64
6200 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6201 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6202 #endif
6203 dc->flags = flags;
6204 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6205 (flags & HF_INHIBIT_IRQ_MASK)
6206 #ifndef CONFIG_SOFTMMU
6207 || (flags & HF_SOFTMMU_MASK)
6208 #endif
6209 );
6210 #if 0
6211 /* check addseg logic */
6212 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6213 printf("ERROR addseg\n");
6214 #endif
6215
6216 gen_opc_ptr = gen_opc_buf;
6217 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6218 gen_opparam_ptr = gen_opparam_buf;
6219 nb_gen_labels = 0;
6220
6221 dc->is_jmp = DISAS_NEXT;
6222 pc_ptr = pc_start;
6223 lj = -1;
6224
6225 for(;;) {
6226 if (env->nb_breakpoints > 0) {
6227 for(j = 0; j < env->nb_breakpoints; j++) {
6228 if (env->breakpoints[j] == pc_ptr) {
6229 gen_debug(dc, pc_ptr - dc->cs_base);
6230 break;
6231 }
6232 }
6233 }
6234 if (search_pc) {
6235 j = gen_opc_ptr - gen_opc_buf;
6236 if (lj < j) {
6237 lj++;
6238 while (lj < j)
6239 gen_opc_instr_start[lj++] = 0;
6240 }
6241 gen_opc_pc[lj] = pc_ptr;
6242 gen_opc_cc_op[lj] = dc->cc_op;
6243 gen_opc_instr_start[lj] = 1;
6244 }
6245 pc_ptr = disas_insn(dc, pc_ptr);
6246 /* stop translation if indicated */
6247 if (dc->is_jmp)
6248 break;
6249 /* if single step mode, we generate only one instruction and
6250 generate an exception */
6251 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6252 the flag and abort the translation to give the irqs a
6253 change to be happen */
6254 if (dc->tf || dc->singlestep_enabled ||
6255 (flags & HF_INHIBIT_IRQ_MASK) ||
6256 (cflags & CF_SINGLE_INSN)) {
6257 gen_jmp_im(pc_ptr - dc->cs_base);
6258 gen_eob(dc);
6259 break;
6260 }
6261 /* if too long translation, stop generation too */
6262 if (gen_opc_ptr >= gen_opc_end ||
6263 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6264 gen_jmp_im(pc_ptr - dc->cs_base);
6265 gen_eob(dc);
6266 break;
6267 }
6268 }
6269 *gen_opc_ptr = INDEX_op_end;
6270 /* we don't forget to fill the last values */
6271 if (search_pc) {
6272 j = gen_opc_ptr - gen_opc_buf;
6273 lj++;
6274 while (lj <= j)
6275 gen_opc_instr_start[lj++] = 0;
6276 }
6277
6278 #ifdef DEBUG_DISAS
6279 if (loglevel & CPU_LOG_TB_CPU) {
6280 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6281 }
6282 if (loglevel & CPU_LOG_TB_IN_ASM) {
6283 int disas_flags;
6284 fprintf(logfile, "----------------\n");
6285 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6286 #ifdef TARGET_X86_64
6287 if (dc->code64)
6288 disas_flags = 2;
6289 else
6290 #endif
6291 disas_flags = !dc->code32;
6292 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6293 fprintf(logfile, "\n");
6294 if (loglevel & CPU_LOG_TB_OP) {
6295 fprintf(logfile, "OP:\n");
6296 dump_ops(gen_opc_buf, gen_opparam_buf);
6297 fprintf(logfile, "\n");
6298 }
6299 }
6300 #endif
6301
6302 /* optimize flag computations */
6303 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6304
6305 #ifdef DEBUG_DISAS
6306 if (loglevel & CPU_LOG_TB_OP_OPT) {
6307 fprintf(logfile, "AFTER FLAGS OPT:\n");
6308 dump_ops(gen_opc_buf, gen_opparam_buf);
6309 fprintf(logfile, "\n");
6310 }
6311 #endif
6312 if (!search_pc)
6313 tb->size = pc_ptr - pc_start;
6314 return 0;
6315 }
6316
6317 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6318 {
6319 return gen_intermediate_code_internal(env, tb, 0);
6320 }
6321
6322 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6323 {
6324 return gen_intermediate_code_internal(env, tb, 1);
6325 }
6326