]> git.proxmox.com Git - mirror_qemu.git/blob - target-i386/translate.c
Add TCG variable opaque type.
[mirror_qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
39
40 #ifdef TARGET_X86_64
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47 #if 1
48 #define BUGGY_64(x) NULL
49 #endif
50 #else
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
53 #define CODE64(s) 0
54 #define REX_X(s) 0
55 #define REX_B(s) 0
56 #endif
57
58 //#define MACRO_TEST 1
59
60 #ifdef TARGET_X86_64
61 #define TCG_TYPE_TL TCG_TYPE_I64
62 #define tcg_gen_movi_tl tcg_gen_movi_i64
63 #define tcg_gen_mov_tl tcg_gen_mov_i64
64 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
65 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
66 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
67 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
68 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
69 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
70 #define tcg_gen_ld_tl tcg_gen_ld_i64
71 #define tcg_gen_st8_tl tcg_gen_st8_i64
72 #define tcg_gen_st16_tl tcg_gen_st16_i64
73 #define tcg_gen_st32_tl tcg_gen_st32_i64
74 #define tcg_gen_st_tl tcg_gen_st_i64
75 #define tcg_gen_add_tl tcg_gen_add_i64
76 #define tcg_gen_addi_tl tcg_gen_addi_i64
77 #define tcg_gen_sub_tl tcg_gen_sub_i64
78 #define tcg_gen_subi_tl tcg_gen_subi_i64
79 #define tcg_gen_and_tl tcg_gen_and_i64
80 #define tcg_gen_andi_tl tcg_gen_andi_i64
81 #define tcg_gen_or_tl tcg_gen_or_i64
82 #define tcg_gen_ori_tl tcg_gen_ori_i64
83 #define tcg_gen_xor_tl tcg_gen_xor_i64
84 #define tcg_gen_xori_tl tcg_gen_xori_i64
85 #define tcg_gen_shl_tl tcg_gen_shl_i64
86 #define tcg_gen_shli_tl tcg_gen_shli_i64
87 #define tcg_gen_shr_tl tcg_gen_shr_i64
88 #define tcg_gen_shri_tl tcg_gen_shri_i64
89 #define tcg_gen_sar_tl tcg_gen_sar_i64
90 #define tcg_gen_sari_tl tcg_gen_sari_i64
91 #else
92 #define TCG_TYPE_TL TCG_TYPE_I32
93 #define tcg_gen_movi_tl tcg_gen_movi_i32
94 #define tcg_gen_mov_tl tcg_gen_mov_i32
95 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
96 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
97 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
98 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
99 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
100 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
101 #define tcg_gen_ld_tl tcg_gen_ld_i32
102 #define tcg_gen_st8_tl tcg_gen_st8_i32
103 #define tcg_gen_st16_tl tcg_gen_st16_i32
104 #define tcg_gen_st32_tl tcg_gen_st_i32
105 #define tcg_gen_st_tl tcg_gen_st_i32
106 #define tcg_gen_add_tl tcg_gen_add_i32
107 #define tcg_gen_addi_tl tcg_gen_addi_i32
108 #define tcg_gen_sub_tl tcg_gen_sub_i32
109 #define tcg_gen_subi_tl tcg_gen_subi_i32
110 #define tcg_gen_and_tl tcg_gen_and_i32
111 #define tcg_gen_andi_tl tcg_gen_andi_i32
112 #define tcg_gen_or_tl tcg_gen_or_i32
113 #define tcg_gen_ori_tl tcg_gen_ori_i32
114 #define tcg_gen_xor_tl tcg_gen_xor_i32
115 #define tcg_gen_xori_tl tcg_gen_xori_i32
116 #define tcg_gen_shl_tl tcg_gen_shl_i32
117 #define tcg_gen_shli_tl tcg_gen_shli_i32
118 #define tcg_gen_shr_tl tcg_gen_shr_i32
119 #define tcg_gen_shri_tl tcg_gen_shri_i32
120 #define tcg_gen_sar_tl tcg_gen_sar_i32
121 #define tcg_gen_sari_tl tcg_gen_sari_i32
122 #endif
123
124 /* global register indexes */
125 static TCGv cpu_env, cpu_T[2], cpu_A0;
126 /* local register indexes (only used inside old micro ops) */
127 static TCGv cpu_tmp0;
128
129 #ifdef TARGET_X86_64
130 static int x86_64_hregs;
131 #endif
132
133 typedef struct DisasContext {
134 /* current insn context */
135 int override; /* -1 if no override */
136 int prefix;
137 int aflag, dflag;
138 target_ulong pc; /* pc = eip + cs_base */
139 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
140 static state change (stop translation) */
141 /* current block context */
142 target_ulong cs_base; /* base of CS segment */
143 int pe; /* protected mode */
144 int code32; /* 32 bit code segment */
145 #ifdef TARGET_X86_64
146 int lma; /* long mode active */
147 int code64; /* 64 bit code segment */
148 int rex_x, rex_b;
149 #endif
150 int ss32; /* 32 bit stack segment */
151 int cc_op; /* current CC operation */
152 int addseg; /* non zero if either DS/ES/SS have a non zero base */
153 int f_st; /* currently unused */
154 int vm86; /* vm86 mode */
155 int cpl;
156 int iopl;
157 int tf; /* TF cpu flag */
158 int singlestep_enabled; /* "hardware" single step enabled */
159 int jmp_opt; /* use direct block chaining for direct jumps */
160 int mem_index; /* select memory access functions */
161 uint64_t flags; /* all execution flags */
162 struct TranslationBlock *tb;
163 int popl_esp_hack; /* for correct popl with esp base handling */
164 int rip_offset; /* only used in x86_64, but left for simplicity */
165 int cpuid_features;
166 int cpuid_ext_features;
167 } DisasContext;
168
169 static void gen_eob(DisasContext *s);
170 static void gen_jmp(DisasContext *s, target_ulong eip);
171 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
172
173 /* i386 arith/logic operations */
174 enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183 };
184
185 /* i386 shift ops */
186 enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195 };
196
197 /* operand size */
198 enum {
199 OT_BYTE = 0,
200 OT_WORD,
201 OT_LONG,
202 OT_QUAD,
203 };
204
205 enum {
206 /* I386 int registers */
207 OR_EAX, /* MUST be even numbered */
208 OR_ECX,
209 OR_EDX,
210 OR_EBX,
211 OR_ESP,
212 OR_EBP,
213 OR_ESI,
214 OR_EDI,
215
216 OR_TMP0 = 16, /* temporary operand register */
217 OR_TMP1,
218 OR_A0, /* temporary register used when doing address evaluation */
219 };
220
221 static inline void gen_op_movl_T0_0(void)
222 {
223 tcg_gen_movi_tl(cpu_T[0], 0);
224 }
225
226 static inline void gen_op_movl_T0_im(int32_t val)
227 {
228 tcg_gen_movi_tl(cpu_T[0], val);
229 }
230
231 static inline void gen_op_movl_T0_imu(uint32_t val)
232 {
233 tcg_gen_movi_tl(cpu_T[0], val);
234 }
235
236 static inline void gen_op_movl_T1_im(int32_t val)
237 {
238 tcg_gen_movi_tl(cpu_T[1], val);
239 }
240
241 static inline void gen_op_movl_T1_imu(uint32_t val)
242 {
243 tcg_gen_movi_tl(cpu_T[1], val);
244 }
245
246 static inline void gen_op_movl_A0_im(uint32_t val)
247 {
248 tcg_gen_movi_tl(cpu_A0, val);
249 }
250
251 #ifdef TARGET_X86_64
252 static inline void gen_op_movq_A0_im(int64_t val)
253 {
254 tcg_gen_movi_tl(cpu_A0, val);
255 }
256 #endif
257
258 static inline void gen_movtl_T0_im(target_ulong val)
259 {
260 tcg_gen_movi_tl(cpu_T[0], val);
261 }
262
263 static inline void gen_movtl_T1_im(target_ulong val)
264 {
265 tcg_gen_movi_tl(cpu_T[1], val);
266 }
267
268 static inline void gen_op_andl_T0_ffff(void)
269 {
270 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
271 }
272
273 static inline void gen_op_andl_T0_im(uint32_t val)
274 {
275 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
276 }
277
278 static inline void gen_op_movl_T0_T1(void)
279 {
280 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
281 }
282
283 static inline void gen_op_andl_A0_ffff(void)
284 {
285 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
286 }
287
288 #ifdef TARGET_X86_64
289
290 #define NB_OP_SIZES 4
291
292 #define DEF_REGS(prefix, suffix) \
293 prefix ## EAX ## suffix,\
294 prefix ## ECX ## suffix,\
295 prefix ## EDX ## suffix,\
296 prefix ## EBX ## suffix,\
297 prefix ## ESP ## suffix,\
298 prefix ## EBP ## suffix,\
299 prefix ## ESI ## suffix,\
300 prefix ## EDI ## suffix,\
301 prefix ## R8 ## suffix,\
302 prefix ## R9 ## suffix,\
303 prefix ## R10 ## suffix,\
304 prefix ## R11 ## suffix,\
305 prefix ## R12 ## suffix,\
306 prefix ## R13 ## suffix,\
307 prefix ## R14 ## suffix,\
308 prefix ## R15 ## suffix,
309
310 #else /* !TARGET_X86_64 */
311
312 #define NB_OP_SIZES 3
313
314 #define DEF_REGS(prefix, suffix) \
315 prefix ## EAX ## suffix,\
316 prefix ## ECX ## suffix,\
317 prefix ## EDX ## suffix,\
318 prefix ## EBX ## suffix,\
319 prefix ## ESP ## suffix,\
320 prefix ## EBP ## suffix,\
321 prefix ## ESI ## suffix,\
322 prefix ## EDI ## suffix,
323
324 #endif /* !TARGET_X86_64 */
325
326 #if defined(WORDS_BIGENDIAN)
327 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
328 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
329 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
330 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
331 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
332 #else
333 #define REG_B_OFFSET 0
334 #define REG_H_OFFSET 1
335 #define REG_W_OFFSET 0
336 #define REG_L_OFFSET 0
337 #define REG_LH_OFFSET 4
338 #endif
339
340 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
341 {
342 switch(ot) {
343 case OT_BYTE:
344 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
345 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
346 } else {
347 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
348 }
349 break;
350 case OT_WORD:
351 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
352 break;
353 #ifdef TARGET_X86_64
354 case OT_LONG:
355 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
356 /* high part of register set to zero */
357 tcg_gen_movi_tl(cpu_tmp0, 0);
358 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
359 break;
360 default:
361 case OT_QUAD:
362 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
363 break;
364 #else
365 default:
366 case OT_LONG:
367 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368 break;
369 #endif
370 }
371 }
372
373 static inline void gen_op_mov_reg_T0(int ot, int reg)
374 {
375 gen_op_mov_reg_TN(ot, 0, reg);
376 }
377
378 static inline void gen_op_mov_reg_T1(int ot, int reg)
379 {
380 gen_op_mov_reg_TN(ot, 1, reg);
381 }
382
383 static inline void gen_op_mov_reg_A0(int size, int reg)
384 {
385 switch(size) {
386 case 0:
387 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
388 break;
389 #ifdef TARGET_X86_64
390 case 1:
391 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
392 /* high part of register set to zero */
393 tcg_gen_movi_tl(cpu_tmp0, 0);
394 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
395 break;
396 default:
397 case 2:
398 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
399 break;
400 #else
401 default:
402 case 1:
403 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
404 break;
405 #endif
406 }
407 }
408
409 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
410 {
411 switch(ot) {
412 case OT_BYTE:
413 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
414 goto std_case;
415 } else {
416 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
417 }
418 break;
419 default:
420 std_case:
421 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
422 break;
423 }
424 }
425
426 static inline void gen_op_movl_A0_reg(int reg)
427 {
428 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
429 }
430
431 static inline void gen_op_addl_A0_im(int32_t val)
432 {
433 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
434 #ifdef TARGET_X86_64
435 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
436 #endif
437 }
438
439 #ifdef TARGET_X86_64
440 static inline void gen_op_addq_A0_im(int64_t val)
441 {
442 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
443 }
444 #endif
445
446 static void gen_add_A0_im(DisasContext *s, int val)
447 {
448 #ifdef TARGET_X86_64
449 if (CODE64(s))
450 gen_op_addq_A0_im(val);
451 else
452 #endif
453 gen_op_addl_A0_im(val);
454 }
455
456 static inline void gen_op_addl_T0_T1(void)
457 {
458 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
459 }
460
461 static inline void gen_op_jmp_T0(void)
462 {
463 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
464 }
465
466 static inline void gen_op_addw_ESP_im(int32_t val)
467 {
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
469 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
470 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
471 }
472
473 static inline void gen_op_addl_ESP_im(int32_t val)
474 {
475 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
476 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
477 #ifdef TARGET_X86_64
478 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
479 #endif
480 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
481 }
482
483 #ifdef TARGET_X86_64
484 static inline void gen_op_addq_ESP_im(int32_t val)
485 {
486 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
487 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
488 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
489 }
490 #endif
491
492 static inline void gen_op_set_cc_op(int32_t val)
493 {
494 tcg_gen_movi_tl(cpu_tmp0, val);
495 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
496 }
497
498 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
499 {
500 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
501 if (shift != 0)
502 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
503 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
504 #ifdef TARGET_X86_64
505 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
506 #endif
507 }
508
509 static inline void gen_op_movl_A0_seg(int reg)
510 {
511 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
512 }
513
514 static inline void gen_op_addl_A0_seg(int reg)
515 {
516 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
517 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
518 #ifdef TARGET_X86_64
519 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
520 #endif
521 }
522
523 #ifdef TARGET_X86_64
524 static inline void gen_op_movq_A0_seg(int reg)
525 {
526 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
527 }
528
529 static inline void gen_op_addq_A0_seg(int reg)
530 {
531 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
532 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
533 }
534
535 static inline void gen_op_movq_A0_reg(int reg)
536 {
537 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
538 }
539
540 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
541 {
542 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
543 if (shift != 0)
544 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
545 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
546 }
547 #endif
548
549 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
550 [0] = {
551 DEF_REGS(gen_op_cmovw_, _T1_T0)
552 },
553 [1] = {
554 DEF_REGS(gen_op_cmovl_, _T1_T0)
555 },
556 #ifdef TARGET_X86_64
557 [2] = {
558 DEF_REGS(gen_op_cmovq_, _T1_T0)
559 },
560 #endif
561 };
562
563 #define DEF_ARITHC(SUFFIX)\
564 {\
565 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
567 },\
568 {\
569 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
571 },\
572 {\
573 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
574 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
575 },\
576 {\
577 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
579 },
580
581 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
582 DEF_ARITHC( )
583 };
584
585 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
586 DEF_ARITHC(_raw)
587 #ifndef CONFIG_USER_ONLY
588 DEF_ARITHC(_kernel)
589 DEF_ARITHC(_user)
590 #endif
591 };
592
593 static const int cc_op_arithb[8] = {
594 CC_OP_ADDB,
595 CC_OP_LOGICB,
596 CC_OP_ADDB,
597 CC_OP_SUBB,
598 CC_OP_LOGICB,
599 CC_OP_SUBB,
600 CC_OP_LOGICB,
601 CC_OP_SUBB,
602 };
603
604 #define DEF_CMPXCHG(SUFFIX)\
605 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
606 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
607 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
608 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
609
610 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
611 DEF_CMPXCHG( )
612 };
613
614 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
615 DEF_CMPXCHG(_raw)
616 #ifndef CONFIG_USER_ONLY
617 DEF_CMPXCHG(_kernel)
618 DEF_CMPXCHG(_user)
619 #endif
620 };
621
622 #define DEF_SHIFT(SUFFIX)\
623 {\
624 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
625 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
626 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
627 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
628 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
629 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
630 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
631 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
632 },\
633 {\
634 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
635 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
636 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
637 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
638 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
639 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
640 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
641 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
642 },\
643 {\
644 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
645 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
646 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
647 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
648 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
649 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
650 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
651 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
652 },\
653 {\
654 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
655 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
656 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
657 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
658 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
659 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
660 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
661 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
662 },
663
664 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
665 DEF_SHIFT( )
666 };
667
668 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
669 DEF_SHIFT(_raw)
670 #ifndef CONFIG_USER_ONLY
671 DEF_SHIFT(_kernel)
672 DEF_SHIFT(_user)
673 #endif
674 };
675
676 #define DEF_SHIFTD(SUFFIX, op)\
677 {\
678 NULL,\
679 NULL,\
680 },\
681 {\
682 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
683 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
684 },\
685 {\
686 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
687 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
688 },\
689 {\
690 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
691 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
692 },
693
694 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
695 DEF_SHIFTD(, im)
696 };
697
698 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
699 DEF_SHIFTD(, ECX)
700 };
701
702 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
703 DEF_SHIFTD(_raw, im)
704 #ifndef CONFIG_USER_ONLY
705 DEF_SHIFTD(_kernel, im)
706 DEF_SHIFTD(_user, im)
707 #endif
708 };
709
710 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
711 DEF_SHIFTD(_raw, ECX)
712 #ifndef CONFIG_USER_ONLY
713 DEF_SHIFTD(_kernel, ECX)
714 DEF_SHIFTD(_user, ECX)
715 #endif
716 };
717
718 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
719 [0] = {
720 gen_op_btw_T0_T1_cc,
721 gen_op_btsw_T0_T1_cc,
722 gen_op_btrw_T0_T1_cc,
723 gen_op_btcw_T0_T1_cc,
724 },
725 [1] = {
726 gen_op_btl_T0_T1_cc,
727 gen_op_btsl_T0_T1_cc,
728 gen_op_btrl_T0_T1_cc,
729 gen_op_btcl_T0_T1_cc,
730 },
731 #ifdef TARGET_X86_64
732 [2] = {
733 gen_op_btq_T0_T1_cc,
734 gen_op_btsq_T0_T1_cc,
735 gen_op_btrq_T0_T1_cc,
736 gen_op_btcq_T0_T1_cc,
737 },
738 #endif
739 };
740
741 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
742 gen_op_add_bitw_A0_T1,
743 gen_op_add_bitl_A0_T1,
744 X86_64_ONLY(gen_op_add_bitq_A0_T1),
745 };
746
747 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
748 [0] = {
749 gen_op_bsfw_T0_cc,
750 gen_op_bsrw_T0_cc,
751 },
752 [1] = {
753 gen_op_bsfl_T0_cc,
754 gen_op_bsrl_T0_cc,
755 },
756 #ifdef TARGET_X86_64
757 [2] = {
758 gen_op_bsfq_T0_cc,
759 gen_op_bsrq_T0_cc,
760 },
761 #endif
762 };
763
764 static inline void gen_op_lds_T0_A0(int idx)
765 {
766 int mem_index = (idx >> 2) - 1;
767 switch(idx & 3) {
768 case 0:
769 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
770 break;
771 case 1:
772 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
773 break;
774 default:
775 case 2:
776 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
777 break;
778 }
779 }
780
781 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
782 static inline void gen_op_ld_T0_A0(int idx)
783 {
784 int mem_index = (idx >> 2) - 1;
785 switch(idx & 3) {
786 case 0:
787 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
788 break;
789 case 1:
790 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
791 break;
792 case 2:
793 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
794 break;
795 default:
796 case 3:
797 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
798 break;
799 }
800 }
801
802 static inline void gen_op_ldu_T0_A0(int idx)
803 {
804 gen_op_ld_T0_A0(idx);
805 }
806
807 static inline void gen_op_ld_T1_A0(int idx)
808 {
809 int mem_index = (idx >> 2) - 1;
810 switch(idx & 3) {
811 case 0:
812 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
813 break;
814 case 1:
815 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
816 break;
817 case 2:
818 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
819 break;
820 default:
821 case 3:
822 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
823 break;
824 }
825 }
826
827 static inline void gen_op_st_T0_A0(int idx)
828 {
829 int mem_index = (idx >> 2) - 1;
830 switch(idx & 3) {
831 case 0:
832 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
833 break;
834 case 1:
835 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
836 break;
837 case 2:
838 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
839 break;
840 default:
841 case 3:
842 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
843 break;
844 }
845 }
846
847 static inline void gen_op_st_T1_A0(int idx)
848 {
849 int mem_index = (idx >> 2) - 1;
850 switch(idx & 3) {
851 case 0:
852 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
853 break;
854 case 1:
855 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
856 break;
857 case 2:
858 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
859 break;
860 default:
861 case 3:
862 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
863 break;
864 }
865 }
866
867 static inline void gen_jmp_im(target_ulong pc)
868 {
869 tcg_gen_movi_tl(cpu_tmp0, pc);
870 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
871 }
872
873 static inline void gen_string_movl_A0_ESI(DisasContext *s)
874 {
875 int override;
876
877 override = s->override;
878 #ifdef TARGET_X86_64
879 if (s->aflag == 2) {
880 if (override >= 0) {
881 gen_op_movq_A0_seg(override);
882 gen_op_addq_A0_reg_sN(0, R_ESI);
883 } else {
884 gen_op_movq_A0_reg(R_ESI);
885 }
886 } else
887 #endif
888 if (s->aflag) {
889 /* 32 bit address */
890 if (s->addseg && override < 0)
891 override = R_DS;
892 if (override >= 0) {
893 gen_op_movl_A0_seg(override);
894 gen_op_addl_A0_reg_sN(0, R_ESI);
895 } else {
896 gen_op_movl_A0_reg(R_ESI);
897 }
898 } else {
899 /* 16 address, always override */
900 if (override < 0)
901 override = R_DS;
902 gen_op_movl_A0_reg(R_ESI);
903 gen_op_andl_A0_ffff();
904 gen_op_addl_A0_seg(override);
905 }
906 }
907
908 static inline void gen_string_movl_A0_EDI(DisasContext *s)
909 {
910 #ifdef TARGET_X86_64
911 if (s->aflag == 2) {
912 gen_op_movq_A0_reg(R_EDI);
913 } else
914 #endif
915 if (s->aflag) {
916 if (s->addseg) {
917 gen_op_movl_A0_seg(R_ES);
918 gen_op_addl_A0_reg_sN(0, R_EDI);
919 } else {
920 gen_op_movl_A0_reg(R_EDI);
921 }
922 } else {
923 gen_op_movl_A0_reg(R_EDI);
924 gen_op_andl_A0_ffff();
925 gen_op_addl_A0_seg(R_ES);
926 }
927 }
928
929 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
930 gen_op_movl_T0_Dshiftb,
931 gen_op_movl_T0_Dshiftw,
932 gen_op_movl_T0_Dshiftl,
933 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
934 };
935
936 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
937 gen_op_jnz_ecxw,
938 gen_op_jnz_ecxl,
939 X86_64_ONLY(gen_op_jnz_ecxq),
940 };
941
942 static GenOpFunc1 *gen_op_jz_ecx[3] = {
943 gen_op_jz_ecxw,
944 gen_op_jz_ecxl,
945 X86_64_ONLY(gen_op_jz_ecxq),
946 };
947
948 static GenOpFunc *gen_op_dec_ECX[3] = {
949 gen_op_decw_ECX,
950 gen_op_decl_ECX,
951 X86_64_ONLY(gen_op_decq_ECX),
952 };
953
954 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
955 {
956 gen_op_jnz_subb,
957 gen_op_jnz_subw,
958 gen_op_jnz_subl,
959 X86_64_ONLY(gen_op_jnz_subq),
960 },
961 {
962 gen_op_jz_subb,
963 gen_op_jz_subw,
964 gen_op_jz_subl,
965 X86_64_ONLY(gen_op_jz_subq),
966 },
967 };
968
969 static GenOpFunc *gen_op_in_DX_T0[3] = {
970 gen_op_inb_DX_T0,
971 gen_op_inw_DX_T0,
972 gen_op_inl_DX_T0,
973 };
974
975 static GenOpFunc *gen_op_out_DX_T0[3] = {
976 gen_op_outb_DX_T0,
977 gen_op_outw_DX_T0,
978 gen_op_outl_DX_T0,
979 };
980
981 static GenOpFunc *gen_op_in[3] = {
982 gen_op_inb_T0_T1,
983 gen_op_inw_T0_T1,
984 gen_op_inl_T0_T1,
985 };
986
987 static GenOpFunc *gen_op_out[3] = {
988 gen_op_outb_T0_T1,
989 gen_op_outw_T0_T1,
990 gen_op_outl_T0_T1,
991 };
992
993 static GenOpFunc *gen_check_io_T0[3] = {
994 gen_op_check_iob_T0,
995 gen_op_check_iow_T0,
996 gen_op_check_iol_T0,
997 };
998
999 static GenOpFunc *gen_check_io_DX[3] = {
1000 gen_op_check_iob_DX,
1001 gen_op_check_iow_DX,
1002 gen_op_check_iol_DX,
1003 };
1004
1005 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1006 {
1007 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1008 if (s->cc_op != CC_OP_DYNAMIC)
1009 gen_op_set_cc_op(s->cc_op);
1010 gen_jmp_im(cur_eip);
1011 if (use_dx)
1012 gen_check_io_DX[ot]();
1013 else
1014 gen_check_io_T0[ot]();
1015 }
1016 }
1017
1018 static inline void gen_movs(DisasContext *s, int ot)
1019 {
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0(ot + s->mem_index);
1022 gen_string_movl_A0_EDI(s);
1023 gen_op_st_T0_A0(ot + s->mem_index);
1024 gen_op_movl_T0_Dshift[ot]();
1025 #ifdef TARGET_X86_64
1026 if (s->aflag == 2) {
1027 gen_op_addq_ESI_T0();
1028 gen_op_addq_EDI_T0();
1029 } else
1030 #endif
1031 if (s->aflag) {
1032 gen_op_addl_ESI_T0();
1033 gen_op_addl_EDI_T0();
1034 } else {
1035 gen_op_addw_ESI_T0();
1036 gen_op_addw_EDI_T0();
1037 }
1038 }
1039
1040 static inline void gen_update_cc_op(DisasContext *s)
1041 {
1042 if (s->cc_op != CC_OP_DYNAMIC) {
1043 gen_op_set_cc_op(s->cc_op);
1044 s->cc_op = CC_OP_DYNAMIC;
1045 }
1046 }
1047
1048 /* XXX: does not work with gdbstub "ice" single step - not a
1049 serious problem */
1050 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1051 {
1052 int l1, l2;
1053
1054 l1 = gen_new_label();
1055 l2 = gen_new_label();
1056 gen_op_jnz_ecx[s->aflag](l1);
1057 gen_set_label(l2);
1058 gen_jmp_tb(s, next_eip, 1);
1059 gen_set_label(l1);
1060 return l2;
1061 }
1062
1063 static inline void gen_stos(DisasContext *s, int ot)
1064 {
1065 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1066 gen_string_movl_A0_EDI(s);
1067 gen_op_st_T0_A0(ot + s->mem_index);
1068 gen_op_movl_T0_Dshift[ot]();
1069 #ifdef TARGET_X86_64
1070 if (s->aflag == 2) {
1071 gen_op_addq_EDI_T0();
1072 } else
1073 #endif
1074 if (s->aflag) {
1075 gen_op_addl_EDI_T0();
1076 } else {
1077 gen_op_addw_EDI_T0();
1078 }
1079 }
1080
1081 static inline void gen_lods(DisasContext *s, int ot)
1082 {
1083 gen_string_movl_A0_ESI(s);
1084 gen_op_ld_T0_A0(ot + s->mem_index);
1085 gen_op_mov_reg_T0(ot, R_EAX);
1086 gen_op_movl_T0_Dshift[ot]();
1087 #ifdef TARGET_X86_64
1088 if (s->aflag == 2) {
1089 gen_op_addq_ESI_T0();
1090 } else
1091 #endif
1092 if (s->aflag) {
1093 gen_op_addl_ESI_T0();
1094 } else {
1095 gen_op_addw_ESI_T0();
1096 }
1097 }
1098
1099 static inline void gen_scas(DisasContext *s, int ot)
1100 {
1101 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1102 gen_string_movl_A0_EDI(s);
1103 gen_op_ld_T1_A0(ot + s->mem_index);
1104 gen_op_cmpl_T0_T1_cc();
1105 gen_op_movl_T0_Dshift[ot]();
1106 #ifdef TARGET_X86_64
1107 if (s->aflag == 2) {
1108 gen_op_addq_EDI_T0();
1109 } else
1110 #endif
1111 if (s->aflag) {
1112 gen_op_addl_EDI_T0();
1113 } else {
1114 gen_op_addw_EDI_T0();
1115 }
1116 }
1117
1118 static inline void gen_cmps(DisasContext *s, int ot)
1119 {
1120 gen_string_movl_A0_ESI(s);
1121 gen_op_ld_T0_A0(ot + s->mem_index);
1122 gen_string_movl_A0_EDI(s);
1123 gen_op_ld_T1_A0(ot + s->mem_index);
1124 gen_op_cmpl_T0_T1_cc();
1125 gen_op_movl_T0_Dshift[ot]();
1126 #ifdef TARGET_X86_64
1127 if (s->aflag == 2) {
1128 gen_op_addq_ESI_T0();
1129 gen_op_addq_EDI_T0();
1130 } else
1131 #endif
1132 if (s->aflag) {
1133 gen_op_addl_ESI_T0();
1134 gen_op_addl_EDI_T0();
1135 } else {
1136 gen_op_addw_ESI_T0();
1137 gen_op_addw_EDI_T0();
1138 }
1139 }
1140
1141 static inline void gen_ins(DisasContext *s, int ot)
1142 {
1143 gen_string_movl_A0_EDI(s);
1144 gen_op_movl_T0_0();
1145 gen_op_st_T0_A0(ot + s->mem_index);
1146 gen_op_in_DX_T0[ot]();
1147 gen_op_st_T0_A0(ot + s->mem_index);
1148 gen_op_movl_T0_Dshift[ot]();
1149 #ifdef TARGET_X86_64
1150 if (s->aflag == 2) {
1151 gen_op_addq_EDI_T0();
1152 } else
1153 #endif
1154 if (s->aflag) {
1155 gen_op_addl_EDI_T0();
1156 } else {
1157 gen_op_addw_EDI_T0();
1158 }
1159 }
1160
1161 static inline void gen_outs(DisasContext *s, int ot)
1162 {
1163 gen_string_movl_A0_ESI(s);
1164 gen_op_ld_T0_A0(ot + s->mem_index);
1165 gen_op_out_DX_T0[ot]();
1166 gen_op_movl_T0_Dshift[ot]();
1167 #ifdef TARGET_X86_64
1168 if (s->aflag == 2) {
1169 gen_op_addq_ESI_T0();
1170 } else
1171 #endif
1172 if (s->aflag) {
1173 gen_op_addl_ESI_T0();
1174 } else {
1175 gen_op_addw_ESI_T0();
1176 }
1177 }
1178
1179 /* same method as Valgrind : we generate jumps to current or next
1180 instruction */
1181 #define GEN_REPZ(op) \
1182 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1183 target_ulong cur_eip, target_ulong next_eip) \
1184 { \
1185 int l2;\
1186 gen_update_cc_op(s); \
1187 l2 = gen_jz_ecx_string(s, next_eip); \
1188 gen_ ## op(s, ot); \
1189 gen_op_dec_ECX[s->aflag](); \
1190 /* a loop would cause two single step exceptions if ECX = 1 \
1191 before rep string_insn */ \
1192 if (!s->jmp_opt) \
1193 gen_op_jz_ecx[s->aflag](l2); \
1194 gen_jmp(s, cur_eip); \
1195 }
1196
1197 #define GEN_REPZ2(op) \
1198 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1199 target_ulong cur_eip, \
1200 target_ulong next_eip, \
1201 int nz) \
1202 { \
1203 int l2;\
1204 gen_update_cc_op(s); \
1205 l2 = gen_jz_ecx_string(s, next_eip); \
1206 gen_ ## op(s, ot); \
1207 gen_op_dec_ECX[s->aflag](); \
1208 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1209 gen_op_string_jnz_sub[nz][ot](l2);\
1210 if (!s->jmp_opt) \
1211 gen_op_jz_ecx[s->aflag](l2); \
1212 gen_jmp(s, cur_eip); \
1213 }
1214
1215 GEN_REPZ(movs)
1216 GEN_REPZ(stos)
1217 GEN_REPZ(lods)
1218 GEN_REPZ(ins)
1219 GEN_REPZ(outs)
1220 GEN_REPZ2(scas)
1221 GEN_REPZ2(cmps)
1222
1223 enum {
1224 JCC_O,
1225 JCC_B,
1226 JCC_Z,
1227 JCC_BE,
1228 JCC_S,
1229 JCC_P,
1230 JCC_L,
1231 JCC_LE,
1232 };
1233
1234 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1235 [OT_BYTE] = {
1236 NULL,
1237 gen_op_jb_subb,
1238 gen_op_jz_subb,
1239 gen_op_jbe_subb,
1240 gen_op_js_subb,
1241 NULL,
1242 gen_op_jl_subb,
1243 gen_op_jle_subb,
1244 },
1245 [OT_WORD] = {
1246 NULL,
1247 gen_op_jb_subw,
1248 gen_op_jz_subw,
1249 gen_op_jbe_subw,
1250 gen_op_js_subw,
1251 NULL,
1252 gen_op_jl_subw,
1253 gen_op_jle_subw,
1254 },
1255 [OT_LONG] = {
1256 NULL,
1257 gen_op_jb_subl,
1258 gen_op_jz_subl,
1259 gen_op_jbe_subl,
1260 gen_op_js_subl,
1261 NULL,
1262 gen_op_jl_subl,
1263 gen_op_jle_subl,
1264 },
1265 #ifdef TARGET_X86_64
1266 [OT_QUAD] = {
1267 NULL,
1268 BUGGY_64(gen_op_jb_subq),
1269 gen_op_jz_subq,
1270 BUGGY_64(gen_op_jbe_subq),
1271 gen_op_js_subq,
1272 NULL,
1273 BUGGY_64(gen_op_jl_subq),
1274 BUGGY_64(gen_op_jle_subq),
1275 },
1276 #endif
1277 };
1278 static GenOpFunc1 *gen_op_loop[3][4] = {
1279 [0] = {
1280 gen_op_loopnzw,
1281 gen_op_loopzw,
1282 gen_op_jnz_ecxw,
1283 },
1284 [1] = {
1285 gen_op_loopnzl,
1286 gen_op_loopzl,
1287 gen_op_jnz_ecxl,
1288 },
1289 #ifdef TARGET_X86_64
1290 [2] = {
1291 gen_op_loopnzq,
1292 gen_op_loopzq,
1293 gen_op_jnz_ecxq,
1294 },
1295 #endif
1296 };
1297
1298 static GenOpFunc *gen_setcc_slow[8] = {
1299 gen_op_seto_T0_cc,
1300 gen_op_setb_T0_cc,
1301 gen_op_setz_T0_cc,
1302 gen_op_setbe_T0_cc,
1303 gen_op_sets_T0_cc,
1304 gen_op_setp_T0_cc,
1305 gen_op_setl_T0_cc,
1306 gen_op_setle_T0_cc,
1307 };
1308
1309 static GenOpFunc *gen_setcc_sub[4][8] = {
1310 [OT_BYTE] = {
1311 NULL,
1312 gen_op_setb_T0_subb,
1313 gen_op_setz_T0_subb,
1314 gen_op_setbe_T0_subb,
1315 gen_op_sets_T0_subb,
1316 NULL,
1317 gen_op_setl_T0_subb,
1318 gen_op_setle_T0_subb,
1319 },
1320 [OT_WORD] = {
1321 NULL,
1322 gen_op_setb_T0_subw,
1323 gen_op_setz_T0_subw,
1324 gen_op_setbe_T0_subw,
1325 gen_op_sets_T0_subw,
1326 NULL,
1327 gen_op_setl_T0_subw,
1328 gen_op_setle_T0_subw,
1329 },
1330 [OT_LONG] = {
1331 NULL,
1332 gen_op_setb_T0_subl,
1333 gen_op_setz_T0_subl,
1334 gen_op_setbe_T0_subl,
1335 gen_op_sets_T0_subl,
1336 NULL,
1337 gen_op_setl_T0_subl,
1338 gen_op_setle_T0_subl,
1339 },
1340 #ifdef TARGET_X86_64
1341 [OT_QUAD] = {
1342 NULL,
1343 gen_op_setb_T0_subq,
1344 gen_op_setz_T0_subq,
1345 gen_op_setbe_T0_subq,
1346 gen_op_sets_T0_subq,
1347 NULL,
1348 gen_op_setl_T0_subq,
1349 gen_op_setle_T0_subq,
1350 },
1351 #endif
1352 };
1353
1354 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1355 gen_op_fadd_ST0_FT0,
1356 gen_op_fmul_ST0_FT0,
1357 gen_op_fcom_ST0_FT0,
1358 gen_op_fcom_ST0_FT0,
1359 gen_op_fsub_ST0_FT0,
1360 gen_op_fsubr_ST0_FT0,
1361 gen_op_fdiv_ST0_FT0,
1362 gen_op_fdivr_ST0_FT0,
1363 };
1364
1365 /* NOTE the exception in "r" op ordering */
1366 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1367 gen_op_fadd_STN_ST0,
1368 gen_op_fmul_STN_ST0,
1369 NULL,
1370 NULL,
1371 gen_op_fsubr_STN_ST0,
1372 gen_op_fsub_STN_ST0,
1373 gen_op_fdivr_STN_ST0,
1374 gen_op_fdiv_STN_ST0,
1375 };
1376
1377 /* if d == OR_TMP0, it means memory operand (address in A0) */
1378 static void gen_op(DisasContext *s1, int op, int ot, int d)
1379 {
1380 GenOpFunc *gen_update_cc;
1381
1382 if (d != OR_TMP0) {
1383 gen_op_mov_TN_reg(ot, 0, d);
1384 } else {
1385 gen_op_ld_T0_A0(ot + s1->mem_index);
1386 }
1387 switch(op) {
1388 case OP_ADCL:
1389 case OP_SBBL:
1390 if (s1->cc_op != CC_OP_DYNAMIC)
1391 gen_op_set_cc_op(s1->cc_op);
1392 if (d != OR_TMP0) {
1393 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1394 gen_op_mov_reg_T0(ot, d);
1395 } else {
1396 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1397 }
1398 s1->cc_op = CC_OP_DYNAMIC;
1399 goto the_end;
1400 case OP_ADDL:
1401 gen_op_addl_T0_T1();
1402 s1->cc_op = CC_OP_ADDB + ot;
1403 gen_update_cc = gen_op_update2_cc;
1404 break;
1405 case OP_SUBL:
1406 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1407 s1->cc_op = CC_OP_SUBB + ot;
1408 gen_update_cc = gen_op_update2_cc;
1409 break;
1410 default:
1411 case OP_ANDL:
1412 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1413 s1->cc_op = CC_OP_LOGICB + ot;
1414 gen_update_cc = gen_op_update1_cc;
1415 break;
1416 case OP_ORL:
1417 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1418 s1->cc_op = CC_OP_LOGICB + ot;
1419 gen_update_cc = gen_op_update1_cc;
1420 break;
1421 case OP_XORL:
1422 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1423 s1->cc_op = CC_OP_LOGICB + ot;
1424 gen_update_cc = gen_op_update1_cc;
1425 break;
1426 case OP_CMPL:
1427 gen_op_cmpl_T0_T1_cc();
1428 s1->cc_op = CC_OP_SUBB + ot;
1429 gen_update_cc = NULL;
1430 break;
1431 }
1432 if (op != OP_CMPL) {
1433 if (d != OR_TMP0)
1434 gen_op_mov_reg_T0(ot, d);
1435 else
1436 gen_op_st_T0_A0(ot + s1->mem_index);
1437 }
1438 /* the flags update must happen after the memory write (precise
1439 exception support) */
1440 if (gen_update_cc)
1441 gen_update_cc();
1442 the_end: ;
1443 }
1444
1445 /* if d == OR_TMP0, it means memory operand (address in A0) */
1446 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1447 {
1448 if (d != OR_TMP0)
1449 gen_op_mov_TN_reg(ot, 0, d);
1450 else
1451 gen_op_ld_T0_A0(ot + s1->mem_index);
1452 if (s1->cc_op != CC_OP_DYNAMIC)
1453 gen_op_set_cc_op(s1->cc_op);
1454 if (c > 0) {
1455 gen_op_incl_T0();
1456 s1->cc_op = CC_OP_INCB + ot;
1457 } else {
1458 gen_op_decl_T0();
1459 s1->cc_op = CC_OP_DECB + ot;
1460 }
1461 if (d != OR_TMP0)
1462 gen_op_mov_reg_T0(ot, d);
1463 else
1464 gen_op_st_T0_A0(ot + s1->mem_index);
1465 gen_op_update_inc_cc();
1466 }
1467
1468 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1469 {
1470 if (d != OR_TMP0)
1471 gen_op_mov_TN_reg(ot, 0, d);
1472 else
1473 gen_op_ld_T0_A0(ot + s1->mem_index);
1474 if (s != OR_TMP1)
1475 gen_op_mov_TN_reg(ot, 1, s);
1476 /* for zero counts, flags are not updated, so must do it dynamically */
1477 if (s1->cc_op != CC_OP_DYNAMIC)
1478 gen_op_set_cc_op(s1->cc_op);
1479
1480 if (d != OR_TMP0)
1481 gen_op_shift_T0_T1_cc[ot][op]();
1482 else
1483 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1484 if (d != OR_TMP0)
1485 gen_op_mov_reg_T0(ot, d);
1486 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1487 }
1488
1489 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1490 {
1491 /* currently not optimized */
1492 gen_op_movl_T1_im(c);
1493 gen_shift(s1, op, ot, d, OR_TMP1);
1494 }
1495
1496 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1497 {
1498 target_long disp;
1499 int havesib;
1500 int base;
1501 int index;
1502 int scale;
1503 int opreg;
1504 int mod, rm, code, override, must_add_seg;
1505
1506 override = s->override;
1507 must_add_seg = s->addseg;
1508 if (override >= 0)
1509 must_add_seg = 1;
1510 mod = (modrm >> 6) & 3;
1511 rm = modrm & 7;
1512
1513 if (s->aflag) {
1514
1515 havesib = 0;
1516 base = rm;
1517 index = 0;
1518 scale = 0;
1519
1520 if (base == 4) {
1521 havesib = 1;
1522 code = ldub_code(s->pc++);
1523 scale = (code >> 6) & 3;
1524 index = ((code >> 3) & 7) | REX_X(s);
1525 base = (code & 7);
1526 }
1527 base |= REX_B(s);
1528
1529 switch (mod) {
1530 case 0:
1531 if ((base & 7) == 5) {
1532 base = -1;
1533 disp = (int32_t)ldl_code(s->pc);
1534 s->pc += 4;
1535 if (CODE64(s) && !havesib) {
1536 disp += s->pc + s->rip_offset;
1537 }
1538 } else {
1539 disp = 0;
1540 }
1541 break;
1542 case 1:
1543 disp = (int8_t)ldub_code(s->pc++);
1544 break;
1545 default:
1546 case 2:
1547 disp = ldl_code(s->pc);
1548 s->pc += 4;
1549 break;
1550 }
1551
1552 if (base >= 0) {
1553 /* for correct popl handling with esp */
1554 if (base == 4 && s->popl_esp_hack)
1555 disp += s->popl_esp_hack;
1556 #ifdef TARGET_X86_64
1557 if (s->aflag == 2) {
1558 gen_op_movq_A0_reg(base);
1559 if (disp != 0) {
1560 gen_op_addq_A0_im(disp);
1561 }
1562 } else
1563 #endif
1564 {
1565 gen_op_movl_A0_reg(base);
1566 if (disp != 0)
1567 gen_op_addl_A0_im(disp);
1568 }
1569 } else {
1570 #ifdef TARGET_X86_64
1571 if (s->aflag == 2) {
1572 gen_op_movq_A0_im(disp);
1573 } else
1574 #endif
1575 {
1576 gen_op_movl_A0_im(disp);
1577 }
1578 }
1579 /* XXX: index == 4 is always invalid */
1580 if (havesib && (index != 4 || scale != 0)) {
1581 #ifdef TARGET_X86_64
1582 if (s->aflag == 2) {
1583 gen_op_addq_A0_reg_sN(scale, index);
1584 } else
1585 #endif
1586 {
1587 gen_op_addl_A0_reg_sN(scale, index);
1588 }
1589 }
1590 if (must_add_seg) {
1591 if (override < 0) {
1592 if (base == R_EBP || base == R_ESP)
1593 override = R_SS;
1594 else
1595 override = R_DS;
1596 }
1597 #ifdef TARGET_X86_64
1598 if (s->aflag == 2) {
1599 gen_op_addq_A0_seg(override);
1600 } else
1601 #endif
1602 {
1603 gen_op_addl_A0_seg(override);
1604 }
1605 }
1606 } else {
1607 switch (mod) {
1608 case 0:
1609 if (rm == 6) {
1610 disp = lduw_code(s->pc);
1611 s->pc += 2;
1612 gen_op_movl_A0_im(disp);
1613 rm = 0; /* avoid SS override */
1614 goto no_rm;
1615 } else {
1616 disp = 0;
1617 }
1618 break;
1619 case 1:
1620 disp = (int8_t)ldub_code(s->pc++);
1621 break;
1622 default:
1623 case 2:
1624 disp = lduw_code(s->pc);
1625 s->pc += 2;
1626 break;
1627 }
1628 switch(rm) {
1629 case 0:
1630 gen_op_movl_A0_reg(R_EBX);
1631 gen_op_addl_A0_reg_sN(0, R_ESI);
1632 break;
1633 case 1:
1634 gen_op_movl_A0_reg(R_EBX);
1635 gen_op_addl_A0_reg_sN(0, R_EDI);
1636 break;
1637 case 2:
1638 gen_op_movl_A0_reg(R_EBP);
1639 gen_op_addl_A0_reg_sN(0, R_ESI);
1640 break;
1641 case 3:
1642 gen_op_movl_A0_reg(R_EBP);
1643 gen_op_addl_A0_reg_sN(0, R_EDI);
1644 break;
1645 case 4:
1646 gen_op_movl_A0_reg(R_ESI);
1647 break;
1648 case 5:
1649 gen_op_movl_A0_reg(R_EDI);
1650 break;
1651 case 6:
1652 gen_op_movl_A0_reg(R_EBP);
1653 break;
1654 default:
1655 case 7:
1656 gen_op_movl_A0_reg(R_EBX);
1657 break;
1658 }
1659 if (disp != 0)
1660 gen_op_addl_A0_im(disp);
1661 gen_op_andl_A0_ffff();
1662 no_rm:
1663 if (must_add_seg) {
1664 if (override < 0) {
1665 if (rm == 2 || rm == 3 || rm == 6)
1666 override = R_SS;
1667 else
1668 override = R_DS;
1669 }
1670 gen_op_addl_A0_seg(override);
1671 }
1672 }
1673
1674 opreg = OR_A0;
1675 disp = 0;
1676 *reg_ptr = opreg;
1677 *offset_ptr = disp;
1678 }
1679
1680 static void gen_nop_modrm(DisasContext *s, int modrm)
1681 {
1682 int mod, rm, base, code;
1683
1684 mod = (modrm >> 6) & 3;
1685 if (mod == 3)
1686 return;
1687 rm = modrm & 7;
1688
1689 if (s->aflag) {
1690
1691 base = rm;
1692
1693 if (base == 4) {
1694 code = ldub_code(s->pc++);
1695 base = (code & 7);
1696 }
1697
1698 switch (mod) {
1699 case 0:
1700 if (base == 5) {
1701 s->pc += 4;
1702 }
1703 break;
1704 case 1:
1705 s->pc++;
1706 break;
1707 default:
1708 case 2:
1709 s->pc += 4;
1710 break;
1711 }
1712 } else {
1713 switch (mod) {
1714 case 0:
1715 if (rm == 6) {
1716 s->pc += 2;
1717 }
1718 break;
1719 case 1:
1720 s->pc++;
1721 break;
1722 default:
1723 case 2:
1724 s->pc += 2;
1725 break;
1726 }
1727 }
1728 }
1729
1730 /* used for LEA and MOV AX, mem */
1731 static void gen_add_A0_ds_seg(DisasContext *s)
1732 {
1733 int override, must_add_seg;
1734 must_add_seg = s->addseg;
1735 override = R_DS;
1736 if (s->override >= 0) {
1737 override = s->override;
1738 must_add_seg = 1;
1739 } else {
1740 override = R_DS;
1741 }
1742 if (must_add_seg) {
1743 #ifdef TARGET_X86_64
1744 if (CODE64(s)) {
1745 gen_op_addq_A0_seg(override);
1746 } else
1747 #endif
1748 {
1749 gen_op_addl_A0_seg(override);
1750 }
1751 }
1752 }
1753
1754 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1755 OR_TMP0 */
1756 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1757 {
1758 int mod, rm, opreg, disp;
1759
1760 mod = (modrm >> 6) & 3;
1761 rm = (modrm & 7) | REX_B(s);
1762 if (mod == 3) {
1763 if (is_store) {
1764 if (reg != OR_TMP0)
1765 gen_op_mov_TN_reg(ot, 0, reg);
1766 gen_op_mov_reg_T0(ot, rm);
1767 } else {
1768 gen_op_mov_TN_reg(ot, 0, rm);
1769 if (reg != OR_TMP0)
1770 gen_op_mov_reg_T0(ot, reg);
1771 }
1772 } else {
1773 gen_lea_modrm(s, modrm, &opreg, &disp);
1774 if (is_store) {
1775 if (reg != OR_TMP0)
1776 gen_op_mov_TN_reg(ot, 0, reg);
1777 gen_op_st_T0_A0(ot + s->mem_index);
1778 } else {
1779 gen_op_ld_T0_A0(ot + s->mem_index);
1780 if (reg != OR_TMP0)
1781 gen_op_mov_reg_T0(ot, reg);
1782 }
1783 }
1784 }
1785
1786 static inline uint32_t insn_get(DisasContext *s, int ot)
1787 {
1788 uint32_t ret;
1789
1790 switch(ot) {
1791 case OT_BYTE:
1792 ret = ldub_code(s->pc);
1793 s->pc++;
1794 break;
1795 case OT_WORD:
1796 ret = lduw_code(s->pc);
1797 s->pc += 2;
1798 break;
1799 default:
1800 case OT_LONG:
1801 ret = ldl_code(s->pc);
1802 s->pc += 4;
1803 break;
1804 }
1805 return ret;
1806 }
1807
1808 static inline int insn_const_size(unsigned int ot)
1809 {
1810 if (ot <= OT_LONG)
1811 return 1 << ot;
1812 else
1813 return 4;
1814 }
1815
1816 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1817 {
1818 TranslationBlock *tb;
1819 target_ulong pc;
1820
1821 pc = s->cs_base + eip;
1822 tb = s->tb;
1823 /* NOTE: we handle the case where the TB spans two pages here */
1824 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1825 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1826 /* jump to same page: we can use a direct jump */
1827 tcg_gen_goto_tb(tb_num);
1828 gen_jmp_im(eip);
1829 tcg_gen_exit_tb((long)tb + tb_num);
1830 } else {
1831 /* jump to another page: currently not optimized */
1832 gen_jmp_im(eip);
1833 gen_eob(s);
1834 }
1835 }
1836
1837 static inline void gen_jcc(DisasContext *s, int b,
1838 target_ulong val, target_ulong next_eip)
1839 {
1840 TranslationBlock *tb;
1841 int inv, jcc_op;
1842 GenOpFunc1 *func;
1843 target_ulong tmp;
1844 int l1, l2;
1845
1846 inv = b & 1;
1847 jcc_op = (b >> 1) & 7;
1848
1849 if (s->jmp_opt) {
1850 switch(s->cc_op) {
1851 /* we optimize the cmp/jcc case */
1852 case CC_OP_SUBB:
1853 case CC_OP_SUBW:
1854 case CC_OP_SUBL:
1855 case CC_OP_SUBQ:
1856 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1857 break;
1858
1859 /* some jumps are easy to compute */
1860 case CC_OP_ADDB:
1861 case CC_OP_ADDW:
1862 case CC_OP_ADDL:
1863 case CC_OP_ADDQ:
1864
1865 case CC_OP_ADCB:
1866 case CC_OP_ADCW:
1867 case CC_OP_ADCL:
1868 case CC_OP_ADCQ:
1869
1870 case CC_OP_SBBB:
1871 case CC_OP_SBBW:
1872 case CC_OP_SBBL:
1873 case CC_OP_SBBQ:
1874
1875 case CC_OP_LOGICB:
1876 case CC_OP_LOGICW:
1877 case CC_OP_LOGICL:
1878 case CC_OP_LOGICQ:
1879
1880 case CC_OP_INCB:
1881 case CC_OP_INCW:
1882 case CC_OP_INCL:
1883 case CC_OP_INCQ:
1884
1885 case CC_OP_DECB:
1886 case CC_OP_DECW:
1887 case CC_OP_DECL:
1888 case CC_OP_DECQ:
1889
1890 case CC_OP_SHLB:
1891 case CC_OP_SHLW:
1892 case CC_OP_SHLL:
1893 case CC_OP_SHLQ:
1894
1895 case CC_OP_SARB:
1896 case CC_OP_SARW:
1897 case CC_OP_SARL:
1898 case CC_OP_SARQ:
1899 switch(jcc_op) {
1900 case JCC_Z:
1901 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1902 break;
1903 case JCC_S:
1904 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1905 break;
1906 default:
1907 func = NULL;
1908 break;
1909 }
1910 break;
1911 default:
1912 func = NULL;
1913 break;
1914 }
1915
1916 if (s->cc_op != CC_OP_DYNAMIC) {
1917 gen_op_set_cc_op(s->cc_op);
1918 s->cc_op = CC_OP_DYNAMIC;
1919 }
1920
1921 if (!func) {
1922 gen_setcc_slow[jcc_op]();
1923 func = gen_op_jnz_T0_label;
1924 }
1925
1926 if (inv) {
1927 tmp = val;
1928 val = next_eip;
1929 next_eip = tmp;
1930 }
1931 tb = s->tb;
1932
1933 l1 = gen_new_label();
1934 func(l1);
1935
1936 gen_goto_tb(s, 0, next_eip);
1937
1938 gen_set_label(l1);
1939 gen_goto_tb(s, 1, val);
1940
1941 s->is_jmp = 3;
1942 } else {
1943
1944 if (s->cc_op != CC_OP_DYNAMIC) {
1945 gen_op_set_cc_op(s->cc_op);
1946 s->cc_op = CC_OP_DYNAMIC;
1947 }
1948 gen_setcc_slow[jcc_op]();
1949 if (inv) {
1950 tmp = val;
1951 val = next_eip;
1952 next_eip = tmp;
1953 }
1954 l1 = gen_new_label();
1955 l2 = gen_new_label();
1956 gen_op_jnz_T0_label(l1);
1957 gen_jmp_im(next_eip);
1958 gen_op_jmp_label(l2);
1959 gen_set_label(l1);
1960 gen_jmp_im(val);
1961 gen_set_label(l2);
1962 gen_eob(s);
1963 }
1964 }
1965
1966 static void gen_setcc(DisasContext *s, int b)
1967 {
1968 int inv, jcc_op;
1969 GenOpFunc *func;
1970
1971 inv = b & 1;
1972 jcc_op = (b >> 1) & 7;
1973 switch(s->cc_op) {
1974 /* we optimize the cmp/jcc case */
1975 case CC_OP_SUBB:
1976 case CC_OP_SUBW:
1977 case CC_OP_SUBL:
1978 case CC_OP_SUBQ:
1979 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1980 if (!func)
1981 goto slow_jcc;
1982 break;
1983
1984 /* some jumps are easy to compute */
1985 case CC_OP_ADDB:
1986 case CC_OP_ADDW:
1987 case CC_OP_ADDL:
1988 case CC_OP_ADDQ:
1989
1990 case CC_OP_LOGICB:
1991 case CC_OP_LOGICW:
1992 case CC_OP_LOGICL:
1993 case CC_OP_LOGICQ:
1994
1995 case CC_OP_INCB:
1996 case CC_OP_INCW:
1997 case CC_OP_INCL:
1998 case CC_OP_INCQ:
1999
2000 case CC_OP_DECB:
2001 case CC_OP_DECW:
2002 case CC_OP_DECL:
2003 case CC_OP_DECQ:
2004
2005 case CC_OP_SHLB:
2006 case CC_OP_SHLW:
2007 case CC_OP_SHLL:
2008 case CC_OP_SHLQ:
2009 switch(jcc_op) {
2010 case JCC_Z:
2011 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2012 break;
2013 case JCC_S:
2014 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2015 break;
2016 default:
2017 goto slow_jcc;
2018 }
2019 break;
2020 default:
2021 slow_jcc:
2022 if (s->cc_op != CC_OP_DYNAMIC)
2023 gen_op_set_cc_op(s->cc_op);
2024 func = gen_setcc_slow[jcc_op];
2025 break;
2026 }
2027 func();
2028 if (inv) {
2029 gen_op_xor_T0_1();
2030 }
2031 }
2032
2033 /* move T0 to seg_reg and compute if the CPU state may change. Never
2034 call this function with seg_reg == R_CS */
2035 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2036 {
2037 if (s->pe && !s->vm86) {
2038 /* XXX: optimize by finding processor state dynamically */
2039 if (s->cc_op != CC_OP_DYNAMIC)
2040 gen_op_set_cc_op(s->cc_op);
2041 gen_jmp_im(cur_eip);
2042 gen_op_movl_seg_T0(seg_reg);
2043 /* abort translation because the addseg value may change or
2044 because ss32 may change. For R_SS, translation must always
2045 stop as a special handling must be done to disable hardware
2046 interrupts for the next instruction */
2047 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2048 s->is_jmp = 3;
2049 } else {
2050 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2051 if (seg_reg == R_SS)
2052 s->is_jmp = 3;
2053 }
2054 }
2055
2056 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
2057
2058 static inline int
2059 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
2060 {
2061 #if !defined(CONFIG_USER_ONLY)
2062 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2063 if (s->cc_op != CC_OP_DYNAMIC)
2064 gen_op_set_cc_op(s->cc_op);
2065 SVM_movq_T1_im(s->pc - s->cs_base);
2066 gen_jmp_im(pc_start - s->cs_base);
2067 gen_op_geneflags();
2068 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2069 s->cc_op = CC_OP_DYNAMIC;
2070 /* FIXME: maybe we could move the io intercept vector to the TB as well
2071 so we know if this is an EOB or not ... let's assume it's not
2072 for now. */
2073 }
2074 #endif
2075 return 0;
2076 }
2077
2078 static inline int svm_is_rep(int prefixes)
2079 {
2080 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2081 }
2082
2083 static inline int
2084 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2085 uint64_t type, uint64_t param)
2086 {
2087 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2088 /* no SVM activated */
2089 return 0;
2090 switch(type) {
2091 /* CRx and DRx reads/writes */
2092 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2093 if (s->cc_op != CC_OP_DYNAMIC) {
2094 gen_op_set_cc_op(s->cc_op);
2095 s->cc_op = CC_OP_DYNAMIC;
2096 }
2097 gen_jmp_im(pc_start - s->cs_base);
2098 SVM_movq_T1_im(param);
2099 gen_op_geneflags();
2100 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2101 /* this is a special case as we do not know if the interception occurs
2102 so we assume there was none */
2103 return 0;
2104 case SVM_EXIT_MSR:
2105 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2106 if (s->cc_op != CC_OP_DYNAMIC) {
2107 gen_op_set_cc_op(s->cc_op);
2108 s->cc_op = CC_OP_DYNAMIC;
2109 }
2110 gen_jmp_im(pc_start - s->cs_base);
2111 SVM_movq_T1_im(param);
2112 gen_op_geneflags();
2113 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2114 /* this is a special case as we do not know if the interception occurs
2115 so we assume there was none */
2116 return 0;
2117 }
2118 break;
2119 default:
2120 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2121 if (s->cc_op != CC_OP_DYNAMIC) {
2122 gen_op_set_cc_op(s->cc_op);
2123 s->cc_op = CC_OP_EFLAGS;
2124 }
2125 gen_jmp_im(pc_start - s->cs_base);
2126 SVM_movq_T1_im(param);
2127 gen_op_geneflags();
2128 gen_op_svm_vmexit(type >> 32, type);
2129 /* we can optimize this one so TBs don't get longer
2130 than up to vmexit */
2131 gen_eob(s);
2132 return 1;
2133 }
2134 }
2135 return 0;
2136 }
2137
2138 static inline int
2139 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2140 {
2141 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2142 }
2143
2144 static inline void gen_stack_update(DisasContext *s, int addend)
2145 {
2146 #ifdef TARGET_X86_64
2147 if (CODE64(s)) {
2148 gen_op_addq_ESP_im(addend);
2149 } else
2150 #endif
2151 if (s->ss32) {
2152 gen_op_addl_ESP_im(addend);
2153 } else {
2154 gen_op_addw_ESP_im(addend);
2155 }
2156 }
2157
2158 /* generate a push. It depends on ss32, addseg and dflag */
2159 static void gen_push_T0(DisasContext *s)
2160 {
2161 #ifdef TARGET_X86_64
2162 if (CODE64(s)) {
2163 gen_op_movq_A0_reg(R_ESP);
2164 if (s->dflag) {
2165 gen_op_addq_A0_im(-8);
2166 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2167 } else {
2168 gen_op_addq_A0_im(-2);
2169 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2170 }
2171 gen_op_mov_reg_A0(2, R_ESP);
2172 } else
2173 #endif
2174 {
2175 gen_op_movl_A0_reg(R_ESP);
2176 if (!s->dflag)
2177 gen_op_addl_A0_im(-2);
2178 else
2179 gen_op_addl_A0_im(-4);
2180 if (s->ss32) {
2181 if (s->addseg) {
2182 gen_op_movl_T1_A0();
2183 gen_op_addl_A0_seg(R_SS);
2184 }
2185 } else {
2186 gen_op_andl_A0_ffff();
2187 gen_op_movl_T1_A0();
2188 gen_op_addl_A0_seg(R_SS);
2189 }
2190 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2191 if (s->ss32 && !s->addseg)
2192 gen_op_mov_reg_A0(1, R_ESP);
2193 else
2194 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2195 }
2196 }
2197
2198 /* generate a push. It depends on ss32, addseg and dflag */
2199 /* slower version for T1, only used for call Ev */
2200 static void gen_push_T1(DisasContext *s)
2201 {
2202 #ifdef TARGET_X86_64
2203 if (CODE64(s)) {
2204 gen_op_movq_A0_reg(R_ESP);
2205 if (s->dflag) {
2206 gen_op_addq_A0_im(-8);
2207 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2208 } else {
2209 gen_op_addq_A0_im(-2);
2210 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2211 }
2212 gen_op_mov_reg_A0(2, R_ESP);
2213 } else
2214 #endif
2215 {
2216 gen_op_movl_A0_reg(R_ESP);
2217 if (!s->dflag)
2218 gen_op_addl_A0_im(-2);
2219 else
2220 gen_op_addl_A0_im(-4);
2221 if (s->ss32) {
2222 if (s->addseg) {
2223 gen_op_addl_A0_seg(R_SS);
2224 }
2225 } else {
2226 gen_op_andl_A0_ffff();
2227 gen_op_addl_A0_seg(R_SS);
2228 }
2229 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2230
2231 if (s->ss32 && !s->addseg)
2232 gen_op_mov_reg_A0(1, R_ESP);
2233 else
2234 gen_stack_update(s, (-2) << s->dflag);
2235 }
2236 }
2237
2238 /* two step pop is necessary for precise exceptions */
2239 static void gen_pop_T0(DisasContext *s)
2240 {
2241 #ifdef TARGET_X86_64
2242 if (CODE64(s)) {
2243 gen_op_movq_A0_reg(R_ESP);
2244 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2245 } else
2246 #endif
2247 {
2248 gen_op_movl_A0_reg(R_ESP);
2249 if (s->ss32) {
2250 if (s->addseg)
2251 gen_op_addl_A0_seg(R_SS);
2252 } else {
2253 gen_op_andl_A0_ffff();
2254 gen_op_addl_A0_seg(R_SS);
2255 }
2256 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2257 }
2258 }
2259
2260 static void gen_pop_update(DisasContext *s)
2261 {
2262 #ifdef TARGET_X86_64
2263 if (CODE64(s) && s->dflag) {
2264 gen_stack_update(s, 8);
2265 } else
2266 #endif
2267 {
2268 gen_stack_update(s, 2 << s->dflag);
2269 }
2270 }
2271
2272 static void gen_stack_A0(DisasContext *s)
2273 {
2274 gen_op_movl_A0_reg(R_ESP);
2275 if (!s->ss32)
2276 gen_op_andl_A0_ffff();
2277 gen_op_movl_T1_A0();
2278 if (s->addseg)
2279 gen_op_addl_A0_seg(R_SS);
2280 }
2281
2282 /* NOTE: wrap around in 16 bit not fully handled */
2283 static void gen_pusha(DisasContext *s)
2284 {
2285 int i;
2286 gen_op_movl_A0_reg(R_ESP);
2287 gen_op_addl_A0_im(-16 << s->dflag);
2288 if (!s->ss32)
2289 gen_op_andl_A0_ffff();
2290 gen_op_movl_T1_A0();
2291 if (s->addseg)
2292 gen_op_addl_A0_seg(R_SS);
2293 for(i = 0;i < 8; i++) {
2294 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2295 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2296 gen_op_addl_A0_im(2 << s->dflag);
2297 }
2298 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2299 }
2300
2301 /* NOTE: wrap around in 16 bit not fully handled */
2302 static void gen_popa(DisasContext *s)
2303 {
2304 int i;
2305 gen_op_movl_A0_reg(R_ESP);
2306 if (!s->ss32)
2307 gen_op_andl_A0_ffff();
2308 gen_op_movl_T1_A0();
2309 gen_op_addl_T1_im(16 << s->dflag);
2310 if (s->addseg)
2311 gen_op_addl_A0_seg(R_SS);
2312 for(i = 0;i < 8; i++) {
2313 /* ESP is not reloaded */
2314 if (i != 3) {
2315 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2316 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2317 }
2318 gen_op_addl_A0_im(2 << s->dflag);
2319 }
2320 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2321 }
2322
2323 static void gen_enter(DisasContext *s, int esp_addend, int level)
2324 {
2325 int ot, opsize;
2326
2327 level &= 0x1f;
2328 #ifdef TARGET_X86_64
2329 if (CODE64(s)) {
2330 ot = s->dflag ? OT_QUAD : OT_WORD;
2331 opsize = 1 << ot;
2332
2333 gen_op_movl_A0_reg(R_ESP);
2334 gen_op_addq_A0_im(-opsize);
2335 gen_op_movl_T1_A0();
2336
2337 /* push bp */
2338 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2339 gen_op_st_T0_A0(ot + s->mem_index);
2340 if (level) {
2341 gen_op_enter64_level(level, (ot == OT_QUAD));
2342 }
2343 gen_op_mov_reg_T1(ot, R_EBP);
2344 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2345 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2346 } else
2347 #endif
2348 {
2349 ot = s->dflag + OT_WORD;
2350 opsize = 2 << s->dflag;
2351
2352 gen_op_movl_A0_reg(R_ESP);
2353 gen_op_addl_A0_im(-opsize);
2354 if (!s->ss32)
2355 gen_op_andl_A0_ffff();
2356 gen_op_movl_T1_A0();
2357 if (s->addseg)
2358 gen_op_addl_A0_seg(R_SS);
2359 /* push bp */
2360 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2361 gen_op_st_T0_A0(ot + s->mem_index);
2362 if (level) {
2363 gen_op_enter_level(level, s->dflag);
2364 }
2365 gen_op_mov_reg_T1(ot, R_EBP);
2366 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2367 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2368 }
2369 }
2370
2371 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2372 {
2373 if (s->cc_op != CC_OP_DYNAMIC)
2374 gen_op_set_cc_op(s->cc_op);
2375 gen_jmp_im(cur_eip);
2376 gen_op_raise_exception(trapno);
2377 s->is_jmp = 3;
2378 }
2379
2380 /* an interrupt is different from an exception because of the
2381 privilege checks */
2382 static void gen_interrupt(DisasContext *s, int intno,
2383 target_ulong cur_eip, target_ulong next_eip)
2384 {
2385 if (s->cc_op != CC_OP_DYNAMIC)
2386 gen_op_set_cc_op(s->cc_op);
2387 gen_jmp_im(cur_eip);
2388 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2389 s->is_jmp = 3;
2390 }
2391
2392 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2393 {
2394 if (s->cc_op != CC_OP_DYNAMIC)
2395 gen_op_set_cc_op(s->cc_op);
2396 gen_jmp_im(cur_eip);
2397 gen_op_debug();
2398 s->is_jmp = 3;
2399 }
2400
2401 /* generate a generic end of block. Trace exception is also generated
2402 if needed */
2403 static void gen_eob(DisasContext *s)
2404 {
2405 if (s->cc_op != CC_OP_DYNAMIC)
2406 gen_op_set_cc_op(s->cc_op);
2407 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2408 gen_op_reset_inhibit_irq();
2409 }
2410 if (s->singlestep_enabled) {
2411 gen_op_debug();
2412 } else if (s->tf) {
2413 gen_op_single_step();
2414 } else {
2415 tcg_gen_exit_tb(0);
2416 }
2417 s->is_jmp = 3;
2418 }
2419
2420 /* generate a jump to eip. No segment change must happen before as a
2421 direct call to the next block may occur */
2422 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2423 {
2424 if (s->jmp_opt) {
2425 if (s->cc_op != CC_OP_DYNAMIC) {
2426 gen_op_set_cc_op(s->cc_op);
2427 s->cc_op = CC_OP_DYNAMIC;
2428 }
2429 gen_goto_tb(s, tb_num, eip);
2430 s->is_jmp = 3;
2431 } else {
2432 gen_jmp_im(eip);
2433 gen_eob(s);
2434 }
2435 }
2436
2437 static void gen_jmp(DisasContext *s, target_ulong eip)
2438 {
2439 gen_jmp_tb(s, eip, 0);
2440 }
2441
2442 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2443 gen_op_ldq_raw_env_A0,
2444 #ifndef CONFIG_USER_ONLY
2445 gen_op_ldq_kernel_env_A0,
2446 gen_op_ldq_user_env_A0,
2447 #endif
2448 };
2449
2450 static GenOpFunc1 *gen_stq_env_A0[3] = {
2451 gen_op_stq_raw_env_A0,
2452 #ifndef CONFIG_USER_ONLY
2453 gen_op_stq_kernel_env_A0,
2454 gen_op_stq_user_env_A0,
2455 #endif
2456 };
2457
2458 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2459 gen_op_ldo_raw_env_A0,
2460 #ifndef CONFIG_USER_ONLY
2461 gen_op_ldo_kernel_env_A0,
2462 gen_op_ldo_user_env_A0,
2463 #endif
2464 };
2465
2466 static GenOpFunc1 *gen_sto_env_A0[3] = {
2467 gen_op_sto_raw_env_A0,
2468 #ifndef CONFIG_USER_ONLY
2469 gen_op_sto_kernel_env_A0,
2470 gen_op_sto_user_env_A0,
2471 #endif
2472 };
2473
2474 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2475
2476 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2477 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2478 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2479
2480 static GenOpFunc2 *sse_op_table1[256][4] = {
2481 /* pure SSE operations */
2482 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2483 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2484 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2485 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2486 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2487 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2488 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2489 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2490
2491 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2492 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2493 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2494 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2495 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2496 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2497 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2498 [0x2f] = { gen_op_comiss, gen_op_comisd },
2499 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2500 [0x51] = SSE_FOP(sqrt),
2501 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2502 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2503 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2504 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2505 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2506 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2507 [0x58] = SSE_FOP(add),
2508 [0x59] = SSE_FOP(mul),
2509 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2510 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2511 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2512 [0x5c] = SSE_FOP(sub),
2513 [0x5d] = SSE_FOP(min),
2514 [0x5e] = SSE_FOP(div),
2515 [0x5f] = SSE_FOP(max),
2516
2517 [0xc2] = SSE_FOP(cmpeq),
2518 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2519
2520 /* MMX ops and their SSE extensions */
2521 [0x60] = MMX_OP2(punpcklbw),
2522 [0x61] = MMX_OP2(punpcklwd),
2523 [0x62] = MMX_OP2(punpckldq),
2524 [0x63] = MMX_OP2(packsswb),
2525 [0x64] = MMX_OP2(pcmpgtb),
2526 [0x65] = MMX_OP2(pcmpgtw),
2527 [0x66] = MMX_OP2(pcmpgtl),
2528 [0x67] = MMX_OP2(packuswb),
2529 [0x68] = MMX_OP2(punpckhbw),
2530 [0x69] = MMX_OP2(punpckhwd),
2531 [0x6a] = MMX_OP2(punpckhdq),
2532 [0x6b] = MMX_OP2(packssdw),
2533 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2534 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2535 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2536 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2537 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2538 (GenOpFunc2 *)gen_op_pshufd_xmm,
2539 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2540 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2541 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2542 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2543 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2544 [0x74] = MMX_OP2(pcmpeqb),
2545 [0x75] = MMX_OP2(pcmpeqw),
2546 [0x76] = MMX_OP2(pcmpeql),
2547 [0x77] = { SSE_SPECIAL }, /* emms */
2548 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2549 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2550 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2551 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2552 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2553 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2554 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2555 [0xd1] = MMX_OP2(psrlw),
2556 [0xd2] = MMX_OP2(psrld),
2557 [0xd3] = MMX_OP2(psrlq),
2558 [0xd4] = MMX_OP2(paddq),
2559 [0xd5] = MMX_OP2(pmullw),
2560 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2561 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2562 [0xd8] = MMX_OP2(psubusb),
2563 [0xd9] = MMX_OP2(psubusw),
2564 [0xda] = MMX_OP2(pminub),
2565 [0xdb] = MMX_OP2(pand),
2566 [0xdc] = MMX_OP2(paddusb),
2567 [0xdd] = MMX_OP2(paddusw),
2568 [0xde] = MMX_OP2(pmaxub),
2569 [0xdf] = MMX_OP2(pandn),
2570 [0xe0] = MMX_OP2(pavgb),
2571 [0xe1] = MMX_OP2(psraw),
2572 [0xe2] = MMX_OP2(psrad),
2573 [0xe3] = MMX_OP2(pavgw),
2574 [0xe4] = MMX_OP2(pmulhuw),
2575 [0xe5] = MMX_OP2(pmulhw),
2576 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2577 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2578 [0xe8] = MMX_OP2(psubsb),
2579 [0xe9] = MMX_OP2(psubsw),
2580 [0xea] = MMX_OP2(pminsw),
2581 [0xeb] = MMX_OP2(por),
2582 [0xec] = MMX_OP2(paddsb),
2583 [0xed] = MMX_OP2(paddsw),
2584 [0xee] = MMX_OP2(pmaxsw),
2585 [0xef] = MMX_OP2(pxor),
2586 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2587 [0xf1] = MMX_OP2(psllw),
2588 [0xf2] = MMX_OP2(pslld),
2589 [0xf3] = MMX_OP2(psllq),
2590 [0xf4] = MMX_OP2(pmuludq),
2591 [0xf5] = MMX_OP2(pmaddwd),
2592 [0xf6] = MMX_OP2(psadbw),
2593 [0xf7] = MMX_OP2(maskmov),
2594 [0xf8] = MMX_OP2(psubb),
2595 [0xf9] = MMX_OP2(psubw),
2596 [0xfa] = MMX_OP2(psubl),
2597 [0xfb] = MMX_OP2(psubq),
2598 [0xfc] = MMX_OP2(paddb),
2599 [0xfd] = MMX_OP2(paddw),
2600 [0xfe] = MMX_OP2(paddl),
2601 };
2602
2603 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2604 [0 + 2] = MMX_OP2(psrlw),
2605 [0 + 4] = MMX_OP2(psraw),
2606 [0 + 6] = MMX_OP2(psllw),
2607 [8 + 2] = MMX_OP2(psrld),
2608 [8 + 4] = MMX_OP2(psrad),
2609 [8 + 6] = MMX_OP2(pslld),
2610 [16 + 2] = MMX_OP2(psrlq),
2611 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2612 [16 + 6] = MMX_OP2(psllq),
2613 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2614 };
2615
2616 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2617 gen_op_cvtsi2ss,
2618 gen_op_cvtsi2sd,
2619 X86_64_ONLY(gen_op_cvtsq2ss),
2620 X86_64_ONLY(gen_op_cvtsq2sd),
2621
2622 gen_op_cvttss2si,
2623 gen_op_cvttsd2si,
2624 X86_64_ONLY(gen_op_cvttss2sq),
2625 X86_64_ONLY(gen_op_cvttsd2sq),
2626
2627 gen_op_cvtss2si,
2628 gen_op_cvtsd2si,
2629 X86_64_ONLY(gen_op_cvtss2sq),
2630 X86_64_ONLY(gen_op_cvtsd2sq),
2631 };
2632
2633 static GenOpFunc2 *sse_op_table4[8][4] = {
2634 SSE_FOP(cmpeq),
2635 SSE_FOP(cmplt),
2636 SSE_FOP(cmple),
2637 SSE_FOP(cmpunord),
2638 SSE_FOP(cmpneq),
2639 SSE_FOP(cmpnlt),
2640 SSE_FOP(cmpnle),
2641 SSE_FOP(cmpord),
2642 };
2643
2644 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2645 {
2646 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2647 int modrm, mod, rm, reg, reg_addr, offset_addr;
2648 GenOpFunc2 *sse_op2;
2649 GenOpFunc3 *sse_op3;
2650
2651 b &= 0xff;
2652 if (s->prefix & PREFIX_DATA)
2653 b1 = 1;
2654 else if (s->prefix & PREFIX_REPZ)
2655 b1 = 2;
2656 else if (s->prefix & PREFIX_REPNZ)
2657 b1 = 3;
2658 else
2659 b1 = 0;
2660 sse_op2 = sse_op_table1[b][b1];
2661 if (!sse_op2)
2662 goto illegal_op;
2663 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2664 is_xmm = 1;
2665 } else {
2666 if (b1 == 0) {
2667 /* MMX case */
2668 is_xmm = 0;
2669 } else {
2670 is_xmm = 1;
2671 }
2672 }
2673 /* simple MMX/SSE operation */
2674 if (s->flags & HF_TS_MASK) {
2675 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2676 return;
2677 }
2678 if (s->flags & HF_EM_MASK) {
2679 illegal_op:
2680 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2681 return;
2682 }
2683 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2684 goto illegal_op;
2685 if (b == 0x77) {
2686 /* emms */
2687 gen_op_emms();
2688 return;
2689 }
2690 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2691 the static cpu state) */
2692 if (!is_xmm) {
2693 gen_op_enter_mmx();
2694 }
2695
2696 modrm = ldub_code(s->pc++);
2697 reg = ((modrm >> 3) & 7);
2698 if (is_xmm)
2699 reg |= rex_r;
2700 mod = (modrm >> 6) & 3;
2701 if (sse_op2 == SSE_SPECIAL) {
2702 b |= (b1 << 8);
2703 switch(b) {
2704 case 0x0e7: /* movntq */
2705 if (mod == 3)
2706 goto illegal_op;
2707 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2708 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2709 break;
2710 case 0x1e7: /* movntdq */
2711 case 0x02b: /* movntps */
2712 case 0x12b: /* movntps */
2713 case 0x3f0: /* lddqu */
2714 if (mod == 3)
2715 goto illegal_op;
2716 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2717 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2718 break;
2719 case 0x6e: /* movd mm, ea */
2720 #ifdef TARGET_X86_64
2721 if (s->dflag == 2) {
2722 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2723 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2724 } else
2725 #endif
2726 {
2727 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2728 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2729 }
2730 break;
2731 case 0x16e: /* movd xmm, ea */
2732 #ifdef TARGET_X86_64
2733 if (s->dflag == 2) {
2734 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2735 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2736 } else
2737 #endif
2738 {
2739 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2740 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2741 }
2742 break;
2743 case 0x6f: /* movq mm, ea */
2744 if (mod != 3) {
2745 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2746 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2747 } else {
2748 rm = (modrm & 7);
2749 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2750 offsetof(CPUX86State,fpregs[rm].mmx));
2751 }
2752 break;
2753 case 0x010: /* movups */
2754 case 0x110: /* movupd */
2755 case 0x028: /* movaps */
2756 case 0x128: /* movapd */
2757 case 0x16f: /* movdqa xmm, ea */
2758 case 0x26f: /* movdqu xmm, ea */
2759 if (mod != 3) {
2760 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2761 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2762 } else {
2763 rm = (modrm & 7) | REX_B(s);
2764 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2765 offsetof(CPUX86State,xmm_regs[rm]));
2766 }
2767 break;
2768 case 0x210: /* movss xmm, ea */
2769 if (mod != 3) {
2770 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2771 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2772 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2773 gen_op_movl_T0_0();
2774 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2775 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2776 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2777 } else {
2778 rm = (modrm & 7) | REX_B(s);
2779 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2780 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2781 }
2782 break;
2783 case 0x310: /* movsd xmm, ea */
2784 if (mod != 3) {
2785 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2786 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2787 gen_op_movl_T0_0();
2788 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2789 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2790 } else {
2791 rm = (modrm & 7) | REX_B(s);
2792 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2793 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2794 }
2795 break;
2796 case 0x012: /* movlps */
2797 case 0x112: /* movlpd */
2798 if (mod != 3) {
2799 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2800 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2801 } else {
2802 /* movhlps */
2803 rm = (modrm & 7) | REX_B(s);
2804 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2805 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2806 }
2807 break;
2808 case 0x212: /* movsldup */
2809 if (mod != 3) {
2810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2811 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2812 } else {
2813 rm = (modrm & 7) | REX_B(s);
2814 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2815 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2816 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2817 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2818 }
2819 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2820 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2821 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2822 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2823 break;
2824 case 0x312: /* movddup */
2825 if (mod != 3) {
2826 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2827 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2828 } else {
2829 rm = (modrm & 7) | REX_B(s);
2830 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2831 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2832 }
2833 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2834 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2835 break;
2836 case 0x016: /* movhps */
2837 case 0x116: /* movhpd */
2838 if (mod != 3) {
2839 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2840 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2841 } else {
2842 /* movlhps */
2843 rm = (modrm & 7) | REX_B(s);
2844 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2845 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2846 }
2847 break;
2848 case 0x216: /* movshdup */
2849 if (mod != 3) {
2850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2851 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2852 } else {
2853 rm = (modrm & 7) | REX_B(s);
2854 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2855 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2856 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2857 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2858 }
2859 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2860 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2861 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2862 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2863 break;
2864 case 0x7e: /* movd ea, mm */
2865 #ifdef TARGET_X86_64
2866 if (s->dflag == 2) {
2867 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2868 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2869 } else
2870 #endif
2871 {
2872 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2873 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2874 }
2875 break;
2876 case 0x17e: /* movd ea, xmm */
2877 #ifdef TARGET_X86_64
2878 if (s->dflag == 2) {
2879 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2880 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2881 } else
2882 #endif
2883 {
2884 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2885 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2886 }
2887 break;
2888 case 0x27e: /* movq xmm, ea */
2889 if (mod != 3) {
2890 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2891 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2892 } else {
2893 rm = (modrm & 7) | REX_B(s);
2894 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2895 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2896 }
2897 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2898 break;
2899 case 0x7f: /* movq ea, mm */
2900 if (mod != 3) {
2901 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2902 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2903 } else {
2904 rm = (modrm & 7);
2905 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2906 offsetof(CPUX86State,fpregs[reg].mmx));
2907 }
2908 break;
2909 case 0x011: /* movups */
2910 case 0x111: /* movupd */
2911 case 0x029: /* movaps */
2912 case 0x129: /* movapd */
2913 case 0x17f: /* movdqa ea, xmm */
2914 case 0x27f: /* movdqu ea, xmm */
2915 if (mod != 3) {
2916 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2917 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2918 } else {
2919 rm = (modrm & 7) | REX_B(s);
2920 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2921 offsetof(CPUX86State,xmm_regs[reg]));
2922 }
2923 break;
2924 case 0x211: /* movss ea, xmm */
2925 if (mod != 3) {
2926 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2927 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2928 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2929 } else {
2930 rm = (modrm & 7) | REX_B(s);
2931 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2932 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2933 }
2934 break;
2935 case 0x311: /* movsd ea, xmm */
2936 if (mod != 3) {
2937 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2938 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2939 } else {
2940 rm = (modrm & 7) | REX_B(s);
2941 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2942 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2943 }
2944 break;
2945 case 0x013: /* movlps */
2946 case 0x113: /* movlpd */
2947 if (mod != 3) {
2948 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2949 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2950 } else {
2951 goto illegal_op;
2952 }
2953 break;
2954 case 0x017: /* movhps */
2955 case 0x117: /* movhpd */
2956 if (mod != 3) {
2957 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2958 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2959 } else {
2960 goto illegal_op;
2961 }
2962 break;
2963 case 0x71: /* shift mm, im */
2964 case 0x72:
2965 case 0x73:
2966 case 0x171: /* shift xmm, im */
2967 case 0x172:
2968 case 0x173:
2969 val = ldub_code(s->pc++);
2970 if (is_xmm) {
2971 gen_op_movl_T0_im(val);
2972 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2973 gen_op_movl_T0_0();
2974 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2975 op1_offset = offsetof(CPUX86State,xmm_t0);
2976 } else {
2977 gen_op_movl_T0_im(val);
2978 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2979 gen_op_movl_T0_0();
2980 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2981 op1_offset = offsetof(CPUX86State,mmx_t0);
2982 }
2983 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2984 if (!sse_op2)
2985 goto illegal_op;
2986 if (is_xmm) {
2987 rm = (modrm & 7) | REX_B(s);
2988 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2989 } else {
2990 rm = (modrm & 7);
2991 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2992 }
2993 sse_op2(op2_offset, op1_offset);
2994 break;
2995 case 0x050: /* movmskps */
2996 rm = (modrm & 7) | REX_B(s);
2997 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2998 gen_op_mov_reg_T0(OT_LONG, reg);
2999 break;
3000 case 0x150: /* movmskpd */
3001 rm = (modrm & 7) | REX_B(s);
3002 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
3003 gen_op_mov_reg_T0(OT_LONG, reg);
3004 break;
3005 case 0x02a: /* cvtpi2ps */
3006 case 0x12a: /* cvtpi2pd */
3007 gen_op_enter_mmx();
3008 if (mod != 3) {
3009 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3010 op2_offset = offsetof(CPUX86State,mmx_t0);
3011 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3012 } else {
3013 rm = (modrm & 7);
3014 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3015 }
3016 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3017 switch(b >> 8) {
3018 case 0x0:
3019 gen_op_cvtpi2ps(op1_offset, op2_offset);
3020 break;
3021 default:
3022 case 0x1:
3023 gen_op_cvtpi2pd(op1_offset, op2_offset);
3024 break;
3025 }
3026 break;
3027 case 0x22a: /* cvtsi2ss */
3028 case 0x32a: /* cvtsi2sd */
3029 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3030 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3031 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3032 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3033 break;
3034 case 0x02c: /* cvttps2pi */
3035 case 0x12c: /* cvttpd2pi */
3036 case 0x02d: /* cvtps2pi */
3037 case 0x12d: /* cvtpd2pi */
3038 gen_op_enter_mmx();
3039 if (mod != 3) {
3040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3041 op2_offset = offsetof(CPUX86State,xmm_t0);
3042 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3043 } else {
3044 rm = (modrm & 7) | REX_B(s);
3045 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3046 }
3047 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3048 switch(b) {
3049 case 0x02c:
3050 gen_op_cvttps2pi(op1_offset, op2_offset);
3051 break;
3052 case 0x12c:
3053 gen_op_cvttpd2pi(op1_offset, op2_offset);
3054 break;
3055 case 0x02d:
3056 gen_op_cvtps2pi(op1_offset, op2_offset);
3057 break;
3058 case 0x12d:
3059 gen_op_cvtpd2pi(op1_offset, op2_offset);
3060 break;
3061 }
3062 break;
3063 case 0x22c: /* cvttss2si */
3064 case 0x32c: /* cvttsd2si */
3065 case 0x22d: /* cvtss2si */
3066 case 0x32d: /* cvtsd2si */
3067 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3068 if (mod != 3) {
3069 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3070 if ((b >> 8) & 1) {
3071 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3072 } else {
3073 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3074 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3075 }
3076 op2_offset = offsetof(CPUX86State,xmm_t0);
3077 } else {
3078 rm = (modrm & 7) | REX_B(s);
3079 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3080 }
3081 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3082 (b & 1) * 4](op2_offset);
3083 gen_op_mov_reg_T0(ot, reg);
3084 break;
3085 case 0xc4: /* pinsrw */
3086 case 0x1c4:
3087 s->rip_offset = 1;
3088 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3089 val = ldub_code(s->pc++);
3090 if (b1) {
3091 val &= 7;
3092 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3093 } else {
3094 val &= 3;
3095 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3096 }
3097 break;
3098 case 0xc5: /* pextrw */
3099 case 0x1c5:
3100 if (mod != 3)
3101 goto illegal_op;
3102 val = ldub_code(s->pc++);
3103 if (b1) {
3104 val &= 7;
3105 rm = (modrm & 7) | REX_B(s);
3106 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3107 } else {
3108 val &= 3;
3109 rm = (modrm & 7);
3110 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3111 }
3112 reg = ((modrm >> 3) & 7) | rex_r;
3113 gen_op_mov_reg_T0(OT_LONG, reg);
3114 break;
3115 case 0x1d6: /* movq ea, xmm */
3116 if (mod != 3) {
3117 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3118 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3119 } else {
3120 rm = (modrm & 7) | REX_B(s);
3121 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3122 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3123 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3124 }
3125 break;
3126 case 0x2d6: /* movq2dq */
3127 gen_op_enter_mmx();
3128 rm = (modrm & 7);
3129 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3130 offsetof(CPUX86State,fpregs[rm].mmx));
3131 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3132 break;
3133 case 0x3d6: /* movdq2q */
3134 gen_op_enter_mmx();
3135 rm = (modrm & 7) | REX_B(s);
3136 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3137 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3138 break;
3139 case 0xd7: /* pmovmskb */
3140 case 0x1d7:
3141 if (mod != 3)
3142 goto illegal_op;
3143 if (b1) {
3144 rm = (modrm & 7) | REX_B(s);
3145 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3146 } else {
3147 rm = (modrm & 7);
3148 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3149 }
3150 reg = ((modrm >> 3) & 7) | rex_r;
3151 gen_op_mov_reg_T0(OT_LONG, reg);
3152 break;
3153 default:
3154 goto illegal_op;
3155 }
3156 } else {
3157 /* generic MMX or SSE operation */
3158 switch(b) {
3159 case 0xf7:
3160 /* maskmov : we must prepare A0 */
3161 if (mod != 3)
3162 goto illegal_op;
3163 #ifdef TARGET_X86_64
3164 if (s->aflag == 2) {
3165 gen_op_movq_A0_reg(R_EDI);
3166 } else
3167 #endif
3168 {
3169 gen_op_movl_A0_reg(R_EDI);
3170 if (s->aflag == 0)
3171 gen_op_andl_A0_ffff();
3172 }
3173 gen_add_A0_ds_seg(s);
3174 break;
3175 case 0x70: /* pshufx insn */
3176 case 0xc6: /* pshufx insn */
3177 case 0xc2: /* compare insns */
3178 s->rip_offset = 1;
3179 break;
3180 default:
3181 break;
3182 }
3183 if (is_xmm) {
3184 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3185 if (mod != 3) {
3186 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3187 op2_offset = offsetof(CPUX86State,xmm_t0);
3188 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3189 b == 0xc2)) {
3190 /* specific case for SSE single instructions */
3191 if (b1 == 2) {
3192 /* 32 bit access */
3193 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3194 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3195 } else {
3196 /* 64 bit access */
3197 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3198 }
3199 } else {
3200 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3201 }
3202 } else {
3203 rm = (modrm & 7) | REX_B(s);
3204 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3205 }
3206 } else {
3207 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3208 if (mod != 3) {
3209 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3210 op2_offset = offsetof(CPUX86State,mmx_t0);
3211 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3212 } else {
3213 rm = (modrm & 7);
3214 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3215 }
3216 }
3217 switch(b) {
3218 case 0x70: /* pshufx insn */
3219 case 0xc6: /* pshufx insn */
3220 val = ldub_code(s->pc++);
3221 sse_op3 = (GenOpFunc3 *)sse_op2;
3222 sse_op3(op1_offset, op2_offset, val);
3223 break;
3224 case 0xc2:
3225 /* compare insns */
3226 val = ldub_code(s->pc++);
3227 if (val >= 8)
3228 goto illegal_op;
3229 sse_op2 = sse_op_table4[val][b1];
3230 sse_op2(op1_offset, op2_offset);
3231 break;
3232 default:
3233 sse_op2(op1_offset, op2_offset);
3234 break;
3235 }
3236 if (b == 0x2e || b == 0x2f) {
3237 s->cc_op = CC_OP_EFLAGS;
3238 }
3239 }
3240 }
3241
3242
3243 /* convert one instruction. s->is_jmp is set if the translation must
3244 be stopped. Return the next pc value */
3245 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3246 {
3247 int b, prefixes, aflag, dflag;
3248 int shift, ot;
3249 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3250 target_ulong next_eip, tval;
3251 int rex_w, rex_r;
3252
3253 s->pc = pc_start;
3254 prefixes = 0;
3255 aflag = s->code32;
3256 dflag = s->code32;
3257 s->override = -1;
3258 rex_w = -1;
3259 rex_r = 0;
3260 #ifdef TARGET_X86_64
3261 s->rex_x = 0;
3262 s->rex_b = 0;
3263 x86_64_hregs = 0;
3264 #endif
3265 s->rip_offset = 0; /* for relative ip address */
3266 next_byte:
3267 b = ldub_code(s->pc);
3268 s->pc++;
3269 /* check prefixes */
3270 #ifdef TARGET_X86_64
3271 if (CODE64(s)) {
3272 switch (b) {
3273 case 0xf3:
3274 prefixes |= PREFIX_REPZ;
3275 goto next_byte;
3276 case 0xf2:
3277 prefixes |= PREFIX_REPNZ;
3278 goto next_byte;
3279 case 0xf0:
3280 prefixes |= PREFIX_LOCK;
3281 goto next_byte;
3282 case 0x2e:
3283 s->override = R_CS;
3284 goto next_byte;
3285 case 0x36:
3286 s->override = R_SS;
3287 goto next_byte;
3288 case 0x3e:
3289 s->override = R_DS;
3290 goto next_byte;
3291 case 0x26:
3292 s->override = R_ES;
3293 goto next_byte;
3294 case 0x64:
3295 s->override = R_FS;
3296 goto next_byte;
3297 case 0x65:
3298 s->override = R_GS;
3299 goto next_byte;
3300 case 0x66:
3301 prefixes |= PREFIX_DATA;
3302 goto next_byte;
3303 case 0x67:
3304 prefixes |= PREFIX_ADR;
3305 goto next_byte;
3306 case 0x40 ... 0x4f:
3307 /* REX prefix */
3308 rex_w = (b >> 3) & 1;
3309 rex_r = (b & 0x4) << 1;
3310 s->rex_x = (b & 0x2) << 2;
3311 REX_B(s) = (b & 0x1) << 3;
3312 x86_64_hregs = 1; /* select uniform byte register addressing */
3313 goto next_byte;
3314 }
3315 if (rex_w == 1) {
3316 /* 0x66 is ignored if rex.w is set */
3317 dflag = 2;
3318 } else {
3319 if (prefixes & PREFIX_DATA)
3320 dflag ^= 1;
3321 }
3322 if (!(prefixes & PREFIX_ADR))
3323 aflag = 2;
3324 } else
3325 #endif
3326 {
3327 switch (b) {
3328 case 0xf3:
3329 prefixes |= PREFIX_REPZ;
3330 goto next_byte;
3331 case 0xf2:
3332 prefixes |= PREFIX_REPNZ;
3333 goto next_byte;
3334 case 0xf0:
3335 prefixes |= PREFIX_LOCK;
3336 goto next_byte;
3337 case 0x2e:
3338 s->override = R_CS;
3339 goto next_byte;
3340 case 0x36:
3341 s->override = R_SS;
3342 goto next_byte;
3343 case 0x3e:
3344 s->override = R_DS;
3345 goto next_byte;
3346 case 0x26:
3347 s->override = R_ES;
3348 goto next_byte;
3349 case 0x64:
3350 s->override = R_FS;
3351 goto next_byte;
3352 case 0x65:
3353 s->override = R_GS;
3354 goto next_byte;
3355 case 0x66:
3356 prefixes |= PREFIX_DATA;
3357 goto next_byte;
3358 case 0x67:
3359 prefixes |= PREFIX_ADR;
3360 goto next_byte;
3361 }
3362 if (prefixes & PREFIX_DATA)
3363 dflag ^= 1;
3364 if (prefixes & PREFIX_ADR)
3365 aflag ^= 1;
3366 }
3367
3368 s->prefix = prefixes;
3369 s->aflag = aflag;
3370 s->dflag = dflag;
3371
3372 /* lock generation */
3373 if (prefixes & PREFIX_LOCK)
3374 gen_op_lock();
3375
3376 /* now check op code */
3377 reswitch:
3378 switch(b) {
3379 case 0x0f:
3380 /**************************/
3381 /* extended op code */
3382 b = ldub_code(s->pc++) | 0x100;
3383 goto reswitch;
3384
3385 /**************************/
3386 /* arith & logic */
3387 case 0x00 ... 0x05:
3388 case 0x08 ... 0x0d:
3389 case 0x10 ... 0x15:
3390 case 0x18 ... 0x1d:
3391 case 0x20 ... 0x25:
3392 case 0x28 ... 0x2d:
3393 case 0x30 ... 0x35:
3394 case 0x38 ... 0x3d:
3395 {
3396 int op, f, val;
3397 op = (b >> 3) & 7;
3398 f = (b >> 1) & 3;
3399
3400 if ((b & 1) == 0)
3401 ot = OT_BYTE;
3402 else
3403 ot = dflag + OT_WORD;
3404
3405 switch(f) {
3406 case 0: /* OP Ev, Gv */
3407 modrm = ldub_code(s->pc++);
3408 reg = ((modrm >> 3) & 7) | rex_r;
3409 mod = (modrm >> 6) & 3;
3410 rm = (modrm & 7) | REX_B(s);
3411 if (mod != 3) {
3412 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3413 opreg = OR_TMP0;
3414 } else if (op == OP_XORL && rm == reg) {
3415 xor_zero:
3416 /* xor reg, reg optimisation */
3417 gen_op_movl_T0_0();
3418 s->cc_op = CC_OP_LOGICB + ot;
3419 gen_op_mov_reg_T0(ot, reg);
3420 gen_op_update1_cc();
3421 break;
3422 } else {
3423 opreg = rm;
3424 }
3425 gen_op_mov_TN_reg(ot, 1, reg);
3426 gen_op(s, op, ot, opreg);
3427 break;
3428 case 1: /* OP Gv, Ev */
3429 modrm = ldub_code(s->pc++);
3430 mod = (modrm >> 6) & 3;
3431 reg = ((modrm >> 3) & 7) | rex_r;
3432 rm = (modrm & 7) | REX_B(s);
3433 if (mod != 3) {
3434 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3435 gen_op_ld_T1_A0(ot + s->mem_index);
3436 } else if (op == OP_XORL && rm == reg) {
3437 goto xor_zero;
3438 } else {
3439 gen_op_mov_TN_reg(ot, 1, rm);
3440 }
3441 gen_op(s, op, ot, reg);
3442 break;
3443 case 2: /* OP A, Iv */
3444 val = insn_get(s, ot);
3445 gen_op_movl_T1_im(val);
3446 gen_op(s, op, ot, OR_EAX);
3447 break;
3448 }
3449 }
3450 break;
3451
3452 case 0x80: /* GRP1 */
3453 case 0x81:
3454 case 0x82:
3455 case 0x83:
3456 {
3457 int val;
3458
3459 if ((b & 1) == 0)
3460 ot = OT_BYTE;
3461 else
3462 ot = dflag + OT_WORD;
3463
3464 modrm = ldub_code(s->pc++);
3465 mod = (modrm >> 6) & 3;
3466 rm = (modrm & 7) | REX_B(s);
3467 op = (modrm >> 3) & 7;
3468
3469 if (mod != 3) {
3470 if (b == 0x83)
3471 s->rip_offset = 1;
3472 else
3473 s->rip_offset = insn_const_size(ot);
3474 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3475 opreg = OR_TMP0;
3476 } else {
3477 opreg = rm;
3478 }
3479
3480 switch(b) {
3481 default:
3482 case 0x80:
3483 case 0x81:
3484 case 0x82:
3485 val = insn_get(s, ot);
3486 break;
3487 case 0x83:
3488 val = (int8_t)insn_get(s, OT_BYTE);
3489 break;
3490 }
3491 gen_op_movl_T1_im(val);
3492 gen_op(s, op, ot, opreg);
3493 }
3494 break;
3495
3496 /**************************/
3497 /* inc, dec, and other misc arith */
3498 case 0x40 ... 0x47: /* inc Gv */
3499 ot = dflag ? OT_LONG : OT_WORD;
3500 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3501 break;
3502 case 0x48 ... 0x4f: /* dec Gv */
3503 ot = dflag ? OT_LONG : OT_WORD;
3504 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3505 break;
3506 case 0xf6: /* GRP3 */
3507 case 0xf7:
3508 if ((b & 1) == 0)
3509 ot = OT_BYTE;
3510 else
3511 ot = dflag + OT_WORD;
3512
3513 modrm = ldub_code(s->pc++);
3514 mod = (modrm >> 6) & 3;
3515 rm = (modrm & 7) | REX_B(s);
3516 op = (modrm >> 3) & 7;
3517 if (mod != 3) {
3518 if (op == 0)
3519 s->rip_offset = insn_const_size(ot);
3520 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3521 gen_op_ld_T0_A0(ot + s->mem_index);
3522 } else {
3523 gen_op_mov_TN_reg(ot, 0, rm);
3524 }
3525
3526 switch(op) {
3527 case 0: /* test */
3528 val = insn_get(s, ot);
3529 gen_op_movl_T1_im(val);
3530 gen_op_testl_T0_T1_cc();
3531 s->cc_op = CC_OP_LOGICB + ot;
3532 break;
3533 case 2: /* not */
3534 gen_op_notl_T0();
3535 if (mod != 3) {
3536 gen_op_st_T0_A0(ot + s->mem_index);
3537 } else {
3538 gen_op_mov_reg_T0(ot, rm);
3539 }
3540 break;
3541 case 3: /* neg */
3542 gen_op_negl_T0();
3543 if (mod != 3) {
3544 gen_op_st_T0_A0(ot + s->mem_index);
3545 } else {
3546 gen_op_mov_reg_T0(ot, rm);
3547 }
3548 gen_op_update_neg_cc();
3549 s->cc_op = CC_OP_SUBB + ot;
3550 break;
3551 case 4: /* mul */
3552 switch(ot) {
3553 case OT_BYTE:
3554 gen_op_mulb_AL_T0();
3555 s->cc_op = CC_OP_MULB;
3556 break;
3557 case OT_WORD:
3558 gen_op_mulw_AX_T0();
3559 s->cc_op = CC_OP_MULW;
3560 break;
3561 default:
3562 case OT_LONG:
3563 gen_op_mull_EAX_T0();
3564 s->cc_op = CC_OP_MULL;
3565 break;
3566 #ifdef TARGET_X86_64
3567 case OT_QUAD:
3568 gen_op_mulq_EAX_T0();
3569 s->cc_op = CC_OP_MULQ;
3570 break;
3571 #endif
3572 }
3573 break;
3574 case 5: /* imul */
3575 switch(ot) {
3576 case OT_BYTE:
3577 gen_op_imulb_AL_T0();
3578 s->cc_op = CC_OP_MULB;
3579 break;
3580 case OT_WORD:
3581 gen_op_imulw_AX_T0();
3582 s->cc_op = CC_OP_MULW;
3583 break;
3584 default:
3585 case OT_LONG:
3586 gen_op_imull_EAX_T0();
3587 s->cc_op = CC_OP_MULL;
3588 break;
3589 #ifdef TARGET_X86_64
3590 case OT_QUAD:
3591 gen_op_imulq_EAX_T0();
3592 s->cc_op = CC_OP_MULQ;
3593 break;
3594 #endif
3595 }
3596 break;
3597 case 6: /* div */
3598 switch(ot) {
3599 case OT_BYTE:
3600 gen_jmp_im(pc_start - s->cs_base);
3601 gen_op_divb_AL_T0();
3602 break;
3603 case OT_WORD:
3604 gen_jmp_im(pc_start - s->cs_base);
3605 gen_op_divw_AX_T0();
3606 break;
3607 default:
3608 case OT_LONG:
3609 gen_jmp_im(pc_start - s->cs_base);
3610 #ifdef MACRO_TEST
3611 /* XXX: this is just a test */
3612 tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3613 #else
3614 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3615 #endif
3616 break;
3617 #ifdef TARGET_X86_64
3618 case OT_QUAD:
3619 gen_jmp_im(pc_start - s->cs_base);
3620 gen_op_divq_EAX_T0();
3621 break;
3622 #endif
3623 }
3624 break;
3625 case 7: /* idiv */
3626 switch(ot) {
3627 case OT_BYTE:
3628 gen_jmp_im(pc_start - s->cs_base);
3629 gen_op_idivb_AL_T0();
3630 break;
3631 case OT_WORD:
3632 gen_jmp_im(pc_start - s->cs_base);
3633 gen_op_idivw_AX_T0();
3634 break;
3635 default:
3636 case OT_LONG:
3637 gen_jmp_im(pc_start - s->cs_base);
3638 tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3639 break;
3640 #ifdef TARGET_X86_64
3641 case OT_QUAD:
3642 gen_jmp_im(pc_start - s->cs_base);
3643 gen_op_idivq_EAX_T0();
3644 break;
3645 #endif
3646 }
3647 break;
3648 default:
3649 goto illegal_op;
3650 }
3651 break;
3652
3653 case 0xfe: /* GRP4 */
3654 case 0xff: /* GRP5 */
3655 if ((b & 1) == 0)
3656 ot = OT_BYTE;
3657 else
3658 ot = dflag + OT_WORD;
3659
3660 modrm = ldub_code(s->pc++);
3661 mod = (modrm >> 6) & 3;
3662 rm = (modrm & 7) | REX_B(s);
3663 op = (modrm >> 3) & 7;
3664 if (op >= 2 && b == 0xfe) {
3665 goto illegal_op;
3666 }
3667 if (CODE64(s)) {
3668 if (op == 2 || op == 4) {
3669 /* operand size for jumps is 64 bit */
3670 ot = OT_QUAD;
3671 } else if (op == 3 || op == 5) {
3672 /* for call calls, the operand is 16 or 32 bit, even
3673 in long mode */
3674 ot = dflag ? OT_LONG : OT_WORD;
3675 } else if (op == 6) {
3676 /* default push size is 64 bit */
3677 ot = dflag ? OT_QUAD : OT_WORD;
3678 }
3679 }
3680 if (mod != 3) {
3681 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3682 if (op >= 2 && op != 3 && op != 5)
3683 gen_op_ld_T0_A0(ot + s->mem_index);
3684 } else {
3685 gen_op_mov_TN_reg(ot, 0, rm);
3686 }
3687
3688 switch(op) {
3689 case 0: /* inc Ev */
3690 if (mod != 3)
3691 opreg = OR_TMP0;
3692 else
3693 opreg = rm;
3694 gen_inc(s, ot, opreg, 1);
3695 break;
3696 case 1: /* dec Ev */
3697 if (mod != 3)
3698 opreg = OR_TMP0;
3699 else
3700 opreg = rm;
3701 gen_inc(s, ot, opreg, -1);
3702 break;
3703 case 2: /* call Ev */
3704 /* XXX: optimize if memory (no 'and' is necessary) */
3705 if (s->dflag == 0)
3706 gen_op_andl_T0_ffff();
3707 next_eip = s->pc - s->cs_base;
3708 gen_movtl_T1_im(next_eip);
3709 gen_push_T1(s);
3710 gen_op_jmp_T0();
3711 gen_eob(s);
3712 break;
3713 case 3: /* lcall Ev */
3714 gen_op_ld_T1_A0(ot + s->mem_index);
3715 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3716 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3717 do_lcall:
3718 if (s->pe && !s->vm86) {
3719 if (s->cc_op != CC_OP_DYNAMIC)
3720 gen_op_set_cc_op(s->cc_op);
3721 gen_jmp_im(pc_start - s->cs_base);
3722 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3723 } else {
3724 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3725 }
3726 gen_eob(s);
3727 break;
3728 case 4: /* jmp Ev */
3729 if (s->dflag == 0)
3730 gen_op_andl_T0_ffff();
3731 gen_op_jmp_T0();
3732 gen_eob(s);
3733 break;
3734 case 5: /* ljmp Ev */
3735 gen_op_ld_T1_A0(ot + s->mem_index);
3736 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3737 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3738 do_ljmp:
3739 if (s->pe && !s->vm86) {
3740 if (s->cc_op != CC_OP_DYNAMIC)
3741 gen_op_set_cc_op(s->cc_op);
3742 gen_jmp_im(pc_start - s->cs_base);
3743 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3744 } else {
3745 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3746 gen_op_movl_T0_T1();
3747 gen_op_jmp_T0();
3748 }
3749 gen_eob(s);
3750 break;
3751 case 6: /* push Ev */
3752 gen_push_T0(s);
3753 break;
3754 default:
3755 goto illegal_op;
3756 }
3757 break;
3758
3759 case 0x84: /* test Ev, Gv */
3760 case 0x85:
3761 if ((b & 1) == 0)
3762 ot = OT_BYTE;
3763 else
3764 ot = dflag + OT_WORD;
3765
3766 modrm = ldub_code(s->pc++);
3767 mod = (modrm >> 6) & 3;
3768 rm = (modrm & 7) | REX_B(s);
3769 reg = ((modrm >> 3) & 7) | rex_r;
3770
3771 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3772 gen_op_mov_TN_reg(ot, 1, reg);
3773 gen_op_testl_T0_T1_cc();
3774 s->cc_op = CC_OP_LOGICB + ot;
3775 break;
3776
3777 case 0xa8: /* test eAX, Iv */
3778 case 0xa9:
3779 if ((b & 1) == 0)
3780 ot = OT_BYTE;
3781 else
3782 ot = dflag + OT_WORD;
3783 val = insn_get(s, ot);
3784
3785 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3786 gen_op_movl_T1_im(val);
3787 gen_op_testl_T0_T1_cc();
3788 s->cc_op = CC_OP_LOGICB + ot;
3789 break;
3790
3791 case 0x98: /* CWDE/CBW */
3792 #ifdef TARGET_X86_64
3793 if (dflag == 2) {
3794 gen_op_movslq_RAX_EAX();
3795 } else
3796 #endif
3797 if (dflag == 1)
3798 gen_op_movswl_EAX_AX();
3799 else
3800 gen_op_movsbw_AX_AL();
3801 break;
3802 case 0x99: /* CDQ/CWD */
3803 #ifdef TARGET_X86_64
3804 if (dflag == 2) {
3805 gen_op_movsqo_RDX_RAX();
3806 } else
3807 #endif
3808 if (dflag == 1)
3809 gen_op_movslq_EDX_EAX();
3810 else
3811 gen_op_movswl_DX_AX();
3812 break;
3813 case 0x1af: /* imul Gv, Ev */
3814 case 0x69: /* imul Gv, Ev, I */
3815 case 0x6b:
3816 ot = dflag + OT_WORD;
3817 modrm = ldub_code(s->pc++);
3818 reg = ((modrm >> 3) & 7) | rex_r;
3819 if (b == 0x69)
3820 s->rip_offset = insn_const_size(ot);
3821 else if (b == 0x6b)
3822 s->rip_offset = 1;
3823 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3824 if (b == 0x69) {
3825 val = insn_get(s, ot);
3826 gen_op_movl_T1_im(val);
3827 } else if (b == 0x6b) {
3828 val = (int8_t)insn_get(s, OT_BYTE);
3829 gen_op_movl_T1_im(val);
3830 } else {
3831 gen_op_mov_TN_reg(ot, 1, reg);
3832 }
3833
3834 #ifdef TARGET_X86_64
3835 if (ot == OT_QUAD) {
3836 gen_op_imulq_T0_T1();
3837 } else
3838 #endif
3839 if (ot == OT_LONG) {
3840 gen_op_imull_T0_T1();
3841 } else {
3842 gen_op_imulw_T0_T1();
3843 }
3844 gen_op_mov_reg_T0(ot, reg);
3845 s->cc_op = CC_OP_MULB + ot;
3846 break;
3847 case 0x1c0:
3848 case 0x1c1: /* xadd Ev, Gv */
3849 if ((b & 1) == 0)
3850 ot = OT_BYTE;
3851 else
3852 ot = dflag + OT_WORD;
3853 modrm = ldub_code(s->pc++);
3854 reg = ((modrm >> 3) & 7) | rex_r;
3855 mod = (modrm >> 6) & 3;
3856 if (mod == 3) {
3857 rm = (modrm & 7) | REX_B(s);
3858 gen_op_mov_TN_reg(ot, 0, reg);
3859 gen_op_mov_TN_reg(ot, 1, rm);
3860 gen_op_addl_T0_T1();
3861 gen_op_mov_reg_T1(ot, reg);
3862 gen_op_mov_reg_T0(ot, rm);
3863 } else {
3864 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3865 gen_op_mov_TN_reg(ot, 0, reg);
3866 gen_op_ld_T1_A0(ot + s->mem_index);
3867 gen_op_addl_T0_T1();
3868 gen_op_st_T0_A0(ot + s->mem_index);
3869 gen_op_mov_reg_T1(ot, reg);
3870 }
3871 gen_op_update2_cc();
3872 s->cc_op = CC_OP_ADDB + ot;
3873 break;
3874 case 0x1b0:
3875 case 0x1b1: /* cmpxchg Ev, Gv */
3876 if ((b & 1) == 0)
3877 ot = OT_BYTE;
3878 else
3879 ot = dflag + OT_WORD;
3880 modrm = ldub_code(s->pc++);
3881 reg = ((modrm >> 3) & 7) | rex_r;
3882 mod = (modrm >> 6) & 3;
3883 gen_op_mov_TN_reg(ot, 1, reg);
3884 if (mod == 3) {
3885 rm = (modrm & 7) | REX_B(s);
3886 gen_op_mov_TN_reg(ot, 0, rm);
3887 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3888 gen_op_mov_reg_T0(ot, rm);
3889 } else {
3890 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3891 gen_op_ld_T0_A0(ot + s->mem_index);
3892 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3893 }
3894 s->cc_op = CC_OP_SUBB + ot;
3895 break;
3896 case 0x1c7: /* cmpxchg8b */
3897 modrm = ldub_code(s->pc++);
3898 mod = (modrm >> 6) & 3;
3899 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3900 goto illegal_op;
3901 gen_jmp_im(pc_start - s->cs_base);
3902 if (s->cc_op != CC_OP_DYNAMIC)
3903 gen_op_set_cc_op(s->cc_op);
3904 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3905 gen_op_cmpxchg8b();
3906 s->cc_op = CC_OP_EFLAGS;
3907 break;
3908
3909 /**************************/
3910 /* push/pop */
3911 case 0x50 ... 0x57: /* push */
3912 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3913 gen_push_T0(s);
3914 break;
3915 case 0x58 ... 0x5f: /* pop */
3916 if (CODE64(s)) {
3917 ot = dflag ? OT_QUAD : OT_WORD;
3918 } else {
3919 ot = dflag + OT_WORD;
3920 }
3921 gen_pop_T0(s);
3922 /* NOTE: order is important for pop %sp */
3923 gen_pop_update(s);
3924 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3925 break;
3926 case 0x60: /* pusha */
3927 if (CODE64(s))
3928 goto illegal_op;
3929 gen_pusha(s);
3930 break;
3931 case 0x61: /* popa */
3932 if (CODE64(s))
3933 goto illegal_op;
3934 gen_popa(s);
3935 break;
3936 case 0x68: /* push Iv */
3937 case 0x6a:
3938 if (CODE64(s)) {
3939 ot = dflag ? OT_QUAD : OT_WORD;
3940 } else {
3941 ot = dflag + OT_WORD;
3942 }
3943 if (b == 0x68)
3944 val = insn_get(s, ot);
3945 else
3946 val = (int8_t)insn_get(s, OT_BYTE);
3947 gen_op_movl_T0_im(val);
3948 gen_push_T0(s);
3949 break;
3950 case 0x8f: /* pop Ev */
3951 if (CODE64(s)) {
3952 ot = dflag ? OT_QUAD : OT_WORD;
3953 } else {
3954 ot = dflag + OT_WORD;
3955 }
3956 modrm = ldub_code(s->pc++);
3957 mod = (modrm >> 6) & 3;
3958 gen_pop_T0(s);
3959 if (mod == 3) {
3960 /* NOTE: order is important for pop %sp */
3961 gen_pop_update(s);
3962 rm = (modrm & 7) | REX_B(s);
3963 gen_op_mov_reg_T0(ot, rm);
3964 } else {
3965 /* NOTE: order is important too for MMU exceptions */
3966 s->popl_esp_hack = 1 << ot;
3967 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3968 s->popl_esp_hack = 0;
3969 gen_pop_update(s);
3970 }
3971 break;
3972 case 0xc8: /* enter */
3973 {
3974 int level;
3975 val = lduw_code(s->pc);
3976 s->pc += 2;
3977 level = ldub_code(s->pc++);
3978 gen_enter(s, val, level);
3979 }
3980 break;
3981 case 0xc9: /* leave */
3982 /* XXX: exception not precise (ESP is updated before potential exception) */
3983 if (CODE64(s)) {
3984 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
3985 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
3986 } else if (s->ss32) {
3987 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3988 gen_op_mov_reg_T0(OT_LONG, R_ESP);
3989 } else {
3990 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
3991 gen_op_mov_reg_T0(OT_WORD, R_ESP);
3992 }
3993 gen_pop_T0(s);
3994 if (CODE64(s)) {
3995 ot = dflag ? OT_QUAD : OT_WORD;
3996 } else {
3997 ot = dflag + OT_WORD;
3998 }
3999 gen_op_mov_reg_T0(ot, R_EBP);
4000 gen_pop_update(s);
4001 break;
4002 case 0x06: /* push es */
4003 case 0x0e: /* push cs */
4004 case 0x16: /* push ss */
4005 case 0x1e: /* push ds */
4006 if (CODE64(s))
4007 goto illegal_op;
4008 gen_op_movl_T0_seg(b >> 3);
4009 gen_push_T0(s);
4010 break;
4011 case 0x1a0: /* push fs */
4012 case 0x1a8: /* push gs */
4013 gen_op_movl_T0_seg((b >> 3) & 7);
4014 gen_push_T0(s);
4015 break;
4016 case 0x07: /* pop es */
4017 case 0x17: /* pop ss */
4018 case 0x1f: /* pop ds */
4019 if (CODE64(s))
4020 goto illegal_op;
4021 reg = b >> 3;
4022 gen_pop_T0(s);
4023 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4024 gen_pop_update(s);
4025 if (reg == R_SS) {
4026 /* if reg == SS, inhibit interrupts/trace. */
4027 /* If several instructions disable interrupts, only the
4028 _first_ does it */
4029 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4030 gen_op_set_inhibit_irq();
4031 s->tf = 0;
4032 }
4033 if (s->is_jmp) {
4034 gen_jmp_im(s->pc - s->cs_base);
4035 gen_eob(s);
4036 }
4037 break;
4038 case 0x1a1: /* pop fs */
4039 case 0x1a9: /* pop gs */
4040 gen_pop_T0(s);
4041 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4042 gen_pop_update(s);
4043 if (s->is_jmp) {
4044 gen_jmp_im(s->pc - s->cs_base);
4045 gen_eob(s);
4046 }
4047 break;
4048
4049 /**************************/
4050 /* mov */
4051 case 0x88:
4052 case 0x89: /* mov Gv, Ev */
4053 if ((b & 1) == 0)
4054 ot = OT_BYTE;
4055 else
4056 ot = dflag + OT_WORD;
4057 modrm = ldub_code(s->pc++);
4058 reg = ((modrm >> 3) & 7) | rex_r;
4059
4060 /* generate a generic store */
4061 gen_ldst_modrm(s, modrm, ot, reg, 1);
4062 break;
4063 case 0xc6:
4064 case 0xc7: /* mov Ev, Iv */
4065 if ((b & 1) == 0)
4066 ot = OT_BYTE;
4067 else
4068 ot = dflag + OT_WORD;
4069 modrm = ldub_code(s->pc++);
4070 mod = (modrm >> 6) & 3;
4071 if (mod != 3) {
4072 s->rip_offset = insn_const_size(ot);
4073 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4074 }
4075 val = insn_get(s, ot);
4076 gen_op_movl_T0_im(val);
4077 if (mod != 3)
4078 gen_op_st_T0_A0(ot + s->mem_index);
4079 else
4080 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4081 break;
4082 case 0x8a:
4083 case 0x8b: /* mov Ev, Gv */
4084 if ((b & 1) == 0)
4085 ot = OT_BYTE;
4086 else
4087 ot = OT_WORD + dflag;
4088 modrm = ldub_code(s->pc++);
4089 reg = ((modrm >> 3) & 7) | rex_r;
4090
4091 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4092 gen_op_mov_reg_T0(ot, reg);
4093 break;
4094 case 0x8e: /* mov seg, Gv */
4095 modrm = ldub_code(s->pc++);
4096 reg = (modrm >> 3) & 7;
4097 if (reg >= 6 || reg == R_CS)
4098 goto illegal_op;
4099 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4100 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4101 if (reg == R_SS) {
4102 /* if reg == SS, inhibit interrupts/trace */
4103 /* If several instructions disable interrupts, only the
4104 _first_ does it */
4105 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4106 gen_op_set_inhibit_irq();
4107 s->tf = 0;
4108 }
4109 if (s->is_jmp) {
4110 gen_jmp_im(s->pc - s->cs_base);
4111 gen_eob(s);
4112 }
4113 break;
4114 case 0x8c: /* mov Gv, seg */
4115 modrm = ldub_code(s->pc++);
4116 reg = (modrm >> 3) & 7;
4117 mod = (modrm >> 6) & 3;
4118 if (reg >= 6)
4119 goto illegal_op;
4120 gen_op_movl_T0_seg(reg);
4121 if (mod == 3)
4122 ot = OT_WORD + dflag;
4123 else
4124 ot = OT_WORD;
4125 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4126 break;
4127
4128 case 0x1b6: /* movzbS Gv, Eb */
4129 case 0x1b7: /* movzwS Gv, Eb */
4130 case 0x1be: /* movsbS Gv, Eb */
4131 case 0x1bf: /* movswS Gv, Eb */
4132 {
4133 int d_ot;
4134 /* d_ot is the size of destination */
4135 d_ot = dflag + OT_WORD;
4136 /* ot is the size of source */
4137 ot = (b & 1) + OT_BYTE;
4138 modrm = ldub_code(s->pc++);
4139 reg = ((modrm >> 3) & 7) | rex_r;
4140 mod = (modrm >> 6) & 3;
4141 rm = (modrm & 7) | REX_B(s);
4142
4143 if (mod == 3) {
4144 gen_op_mov_TN_reg(ot, 0, rm);
4145 switch(ot | (b & 8)) {
4146 case OT_BYTE:
4147 gen_op_movzbl_T0_T0();
4148 break;
4149 case OT_BYTE | 8:
4150 gen_op_movsbl_T0_T0();
4151 break;
4152 case OT_WORD:
4153 gen_op_movzwl_T0_T0();
4154 break;
4155 default:
4156 case OT_WORD | 8:
4157 gen_op_movswl_T0_T0();
4158 break;
4159 }
4160 gen_op_mov_reg_T0(d_ot, reg);
4161 } else {
4162 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4163 if (b & 8) {
4164 gen_op_lds_T0_A0(ot + s->mem_index);
4165 } else {
4166 gen_op_ldu_T0_A0(ot + s->mem_index);
4167 }
4168 gen_op_mov_reg_T0(d_ot, reg);
4169 }
4170 }
4171 break;
4172
4173 case 0x8d: /* lea */
4174 ot = dflag + OT_WORD;
4175 modrm = ldub_code(s->pc++);
4176 mod = (modrm >> 6) & 3;
4177 if (mod == 3)
4178 goto illegal_op;
4179 reg = ((modrm >> 3) & 7) | rex_r;
4180 /* we must ensure that no segment is added */
4181 s->override = -1;
4182 val = s->addseg;
4183 s->addseg = 0;
4184 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4185 s->addseg = val;
4186 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4187 break;
4188
4189 case 0xa0: /* mov EAX, Ov */
4190 case 0xa1:
4191 case 0xa2: /* mov Ov, EAX */
4192 case 0xa3:
4193 {
4194 target_ulong offset_addr;
4195
4196 if ((b & 1) == 0)
4197 ot = OT_BYTE;
4198 else
4199 ot = dflag + OT_WORD;
4200 #ifdef TARGET_X86_64
4201 if (s->aflag == 2) {
4202 offset_addr = ldq_code(s->pc);
4203 s->pc += 8;
4204 gen_op_movq_A0_im(offset_addr);
4205 } else
4206 #endif
4207 {
4208 if (s->aflag) {
4209 offset_addr = insn_get(s, OT_LONG);
4210 } else {
4211 offset_addr = insn_get(s, OT_WORD);
4212 }
4213 gen_op_movl_A0_im(offset_addr);
4214 }
4215 gen_add_A0_ds_seg(s);
4216 if ((b & 2) == 0) {
4217 gen_op_ld_T0_A0(ot + s->mem_index);
4218 gen_op_mov_reg_T0(ot, R_EAX);
4219 } else {
4220 gen_op_mov_TN_reg(ot, 0, R_EAX);
4221 gen_op_st_T0_A0(ot + s->mem_index);
4222 }
4223 }
4224 break;
4225 case 0xd7: /* xlat */
4226 #ifdef TARGET_X86_64
4227 if (s->aflag == 2) {
4228 gen_op_movq_A0_reg(R_EBX);
4229 gen_op_addq_A0_AL();
4230 } else
4231 #endif
4232 {
4233 gen_op_movl_A0_reg(R_EBX);
4234 gen_op_addl_A0_AL();
4235 if (s->aflag == 0)
4236 gen_op_andl_A0_ffff();
4237 }
4238 gen_add_A0_ds_seg(s);
4239 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4240 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4241 break;
4242 case 0xb0 ... 0xb7: /* mov R, Ib */
4243 val = insn_get(s, OT_BYTE);
4244 gen_op_movl_T0_im(val);
4245 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4246 break;
4247 case 0xb8 ... 0xbf: /* mov R, Iv */
4248 #ifdef TARGET_X86_64
4249 if (dflag == 2) {
4250 uint64_t tmp;
4251 /* 64 bit case */
4252 tmp = ldq_code(s->pc);
4253 s->pc += 8;
4254 reg = (b & 7) | REX_B(s);
4255 gen_movtl_T0_im(tmp);
4256 gen_op_mov_reg_T0(OT_QUAD, reg);
4257 } else
4258 #endif
4259 {
4260 ot = dflag ? OT_LONG : OT_WORD;
4261 val = insn_get(s, ot);
4262 reg = (b & 7) | REX_B(s);
4263 gen_op_movl_T0_im(val);
4264 gen_op_mov_reg_T0(ot, reg);
4265 }
4266 break;
4267
4268 case 0x91 ... 0x97: /* xchg R, EAX */
4269 ot = dflag + OT_WORD;
4270 reg = (b & 7) | REX_B(s);
4271 rm = R_EAX;
4272 goto do_xchg_reg;
4273 case 0x86:
4274 case 0x87: /* xchg Ev, Gv */
4275 if ((b & 1) == 0)
4276 ot = OT_BYTE;
4277 else
4278 ot = dflag + OT_WORD;
4279 modrm = ldub_code(s->pc++);
4280 reg = ((modrm >> 3) & 7) | rex_r;
4281 mod = (modrm >> 6) & 3;
4282 if (mod == 3) {
4283 rm = (modrm & 7) | REX_B(s);
4284 do_xchg_reg:
4285 gen_op_mov_TN_reg(ot, 0, reg);
4286 gen_op_mov_TN_reg(ot, 1, rm);
4287 gen_op_mov_reg_T0(ot, rm);
4288 gen_op_mov_reg_T1(ot, reg);
4289 } else {
4290 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4291 gen_op_mov_TN_reg(ot, 0, reg);
4292 /* for xchg, lock is implicit */
4293 if (!(prefixes & PREFIX_LOCK))
4294 gen_op_lock();
4295 gen_op_ld_T1_A0(ot + s->mem_index);
4296 gen_op_st_T0_A0(ot + s->mem_index);
4297 if (!(prefixes & PREFIX_LOCK))
4298 gen_op_unlock();
4299 gen_op_mov_reg_T1(ot, reg);
4300 }
4301 break;
4302 case 0xc4: /* les Gv */
4303 if (CODE64(s))
4304 goto illegal_op;
4305 op = R_ES;
4306 goto do_lxx;
4307 case 0xc5: /* lds Gv */
4308 if (CODE64(s))
4309 goto illegal_op;
4310 op = R_DS;
4311 goto do_lxx;
4312 case 0x1b2: /* lss Gv */
4313 op = R_SS;
4314 goto do_lxx;
4315 case 0x1b4: /* lfs Gv */
4316 op = R_FS;
4317 goto do_lxx;
4318 case 0x1b5: /* lgs Gv */
4319 op = R_GS;
4320 do_lxx:
4321 ot = dflag ? OT_LONG : OT_WORD;
4322 modrm = ldub_code(s->pc++);
4323 reg = ((modrm >> 3) & 7) | rex_r;
4324 mod = (modrm >> 6) & 3;
4325 if (mod == 3)
4326 goto illegal_op;
4327 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4328 gen_op_ld_T1_A0(ot + s->mem_index);
4329 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4330 /* load the segment first to handle exceptions properly */
4331 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4332 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4333 /* then put the data */
4334 gen_op_mov_reg_T1(ot, reg);
4335 if (s->is_jmp) {
4336 gen_jmp_im(s->pc - s->cs_base);
4337 gen_eob(s);
4338 }
4339 break;
4340
4341 /************************/
4342 /* shifts */
4343 case 0xc0:
4344 case 0xc1:
4345 /* shift Ev,Ib */
4346 shift = 2;
4347 grp2:
4348 {
4349 if ((b & 1) == 0)
4350 ot = OT_BYTE;
4351 else
4352 ot = dflag + OT_WORD;
4353
4354 modrm = ldub_code(s->pc++);
4355 mod = (modrm >> 6) & 3;
4356 op = (modrm >> 3) & 7;
4357
4358 if (mod != 3) {
4359 if (shift == 2) {
4360 s->rip_offset = 1;
4361 }
4362 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4363 opreg = OR_TMP0;
4364 } else {
4365 opreg = (modrm & 7) | REX_B(s);
4366 }
4367
4368 /* simpler op */
4369 if (shift == 0) {
4370 gen_shift(s, op, ot, opreg, OR_ECX);
4371 } else {
4372 if (shift == 2) {
4373 shift = ldub_code(s->pc++);
4374 }
4375 gen_shifti(s, op, ot, opreg, shift);
4376 }
4377 }
4378 break;
4379 case 0xd0:
4380 case 0xd1:
4381 /* shift Ev,1 */
4382 shift = 1;
4383 goto grp2;
4384 case 0xd2:
4385 case 0xd3:
4386 /* shift Ev,cl */
4387 shift = 0;
4388 goto grp2;
4389
4390 case 0x1a4: /* shld imm */
4391 op = 0;
4392 shift = 1;
4393 goto do_shiftd;
4394 case 0x1a5: /* shld cl */
4395 op = 0;
4396 shift = 0;
4397 goto do_shiftd;
4398 case 0x1ac: /* shrd imm */
4399 op = 1;
4400 shift = 1;
4401 goto do_shiftd;
4402 case 0x1ad: /* shrd cl */
4403 op = 1;
4404 shift = 0;
4405 do_shiftd:
4406 ot = dflag + OT_WORD;
4407 modrm = ldub_code(s->pc++);
4408 mod = (modrm >> 6) & 3;
4409 rm = (modrm & 7) | REX_B(s);
4410 reg = ((modrm >> 3) & 7) | rex_r;
4411
4412 if (mod != 3) {
4413 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4414 gen_op_ld_T0_A0(ot + s->mem_index);
4415 } else {
4416 gen_op_mov_TN_reg(ot, 0, rm);
4417 }
4418 gen_op_mov_TN_reg(ot, 1, reg);
4419
4420 if (shift) {
4421 val = ldub_code(s->pc++);
4422 if (ot == OT_QUAD)
4423 val &= 0x3f;
4424 else
4425 val &= 0x1f;
4426 if (val) {
4427 if (mod == 3)
4428 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4429 else
4430 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4431 if (op == 0 && ot != OT_WORD)
4432 s->cc_op = CC_OP_SHLB + ot;
4433 else
4434 s->cc_op = CC_OP_SARB + ot;
4435 }
4436 } else {
4437 if (s->cc_op != CC_OP_DYNAMIC)
4438 gen_op_set_cc_op(s->cc_op);
4439 if (mod == 3)
4440 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4441 else
4442 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4443 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4444 }
4445 if (mod == 3) {
4446 gen_op_mov_reg_T0(ot, rm);
4447 }
4448 break;
4449
4450 /************************/
4451 /* floats */
4452 case 0xd8 ... 0xdf:
4453 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4454 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4455 /* XXX: what to do if illegal op ? */
4456 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4457 break;
4458 }
4459 modrm = ldub_code(s->pc++);
4460 mod = (modrm >> 6) & 3;
4461 rm = modrm & 7;
4462 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4463 if (mod != 3) {
4464 /* memory op */
4465 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4466 switch(op) {
4467 case 0x00 ... 0x07: /* fxxxs */
4468 case 0x10 ... 0x17: /* fixxxl */
4469 case 0x20 ... 0x27: /* fxxxl */
4470 case 0x30 ... 0x37: /* fixxx */
4471 {
4472 int op1;
4473 op1 = op & 7;
4474
4475 switch(op >> 4) {
4476 case 0:
4477 gen_op_flds_FT0_A0();
4478 break;
4479 case 1:
4480 gen_op_fildl_FT0_A0();
4481 break;
4482 case 2:
4483 gen_op_fldl_FT0_A0();
4484 break;
4485 case 3:
4486 default:
4487 gen_op_fild_FT0_A0();
4488 break;
4489 }
4490
4491 gen_op_fp_arith_ST0_FT0[op1]();
4492 if (op1 == 3) {
4493 /* fcomp needs pop */
4494 gen_op_fpop();
4495 }
4496 }
4497 break;
4498 case 0x08: /* flds */
4499 case 0x0a: /* fsts */
4500 case 0x0b: /* fstps */
4501 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4502 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4503 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4504 switch(op & 7) {
4505 case 0:
4506 switch(op >> 4) {
4507 case 0:
4508 gen_op_flds_ST0_A0();
4509 break;
4510 case 1:
4511 gen_op_fildl_ST0_A0();
4512 break;
4513 case 2:
4514 gen_op_fldl_ST0_A0();
4515 break;
4516 case 3:
4517 default:
4518 gen_op_fild_ST0_A0();
4519 break;
4520 }
4521 break;
4522 case 1:
4523 switch(op >> 4) {
4524 case 1:
4525 gen_op_fisttl_ST0_A0();
4526 break;
4527 case 2:
4528 gen_op_fisttll_ST0_A0();
4529 break;
4530 case 3:
4531 default:
4532 gen_op_fistt_ST0_A0();
4533 }
4534 gen_op_fpop();
4535 break;
4536 default:
4537 switch(op >> 4) {
4538 case 0:
4539 gen_op_fsts_ST0_A0();
4540 break;
4541 case 1:
4542 gen_op_fistl_ST0_A0();
4543 break;
4544 case 2:
4545 gen_op_fstl_ST0_A0();
4546 break;
4547 case 3:
4548 default:
4549 gen_op_fist_ST0_A0();
4550 break;
4551 }
4552 if ((op & 7) == 3)
4553 gen_op_fpop();
4554 break;
4555 }
4556 break;
4557 case 0x0c: /* fldenv mem */
4558 gen_op_fldenv_A0(s->dflag);
4559 break;
4560 case 0x0d: /* fldcw mem */
4561 gen_op_fldcw_A0();
4562 break;
4563 case 0x0e: /* fnstenv mem */
4564 gen_op_fnstenv_A0(s->dflag);
4565 break;
4566 case 0x0f: /* fnstcw mem */
4567 gen_op_fnstcw_A0();
4568 break;
4569 case 0x1d: /* fldt mem */
4570 gen_op_fldt_ST0_A0();
4571 break;
4572 case 0x1f: /* fstpt mem */
4573 gen_op_fstt_ST0_A0();
4574 gen_op_fpop();
4575 break;
4576 case 0x2c: /* frstor mem */
4577 gen_op_frstor_A0(s->dflag);
4578 break;
4579 case 0x2e: /* fnsave mem */
4580 gen_op_fnsave_A0(s->dflag);
4581 break;
4582 case 0x2f: /* fnstsw mem */
4583 gen_op_fnstsw_A0();
4584 break;
4585 case 0x3c: /* fbld */
4586 gen_op_fbld_ST0_A0();
4587 break;
4588 case 0x3e: /* fbstp */
4589 gen_op_fbst_ST0_A0();
4590 gen_op_fpop();
4591 break;
4592 case 0x3d: /* fildll */
4593 gen_op_fildll_ST0_A0();
4594 break;
4595 case 0x3f: /* fistpll */
4596 gen_op_fistll_ST0_A0();
4597 gen_op_fpop();
4598 break;
4599 default:
4600 goto illegal_op;
4601 }
4602 } else {
4603 /* register float ops */
4604 opreg = rm;
4605
4606 switch(op) {
4607 case 0x08: /* fld sti */
4608 gen_op_fpush();
4609 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4610 break;
4611 case 0x09: /* fxchg sti */
4612 case 0x29: /* fxchg4 sti, undocumented op */
4613 case 0x39: /* fxchg7 sti, undocumented op */
4614 gen_op_fxchg_ST0_STN(opreg);
4615 break;
4616 case 0x0a: /* grp d9/2 */
4617 switch(rm) {
4618 case 0: /* fnop */
4619 /* check exceptions (FreeBSD FPU probe) */
4620 if (s->cc_op != CC_OP_DYNAMIC)
4621 gen_op_set_cc_op(s->cc_op);
4622 gen_jmp_im(pc_start - s->cs_base);
4623 gen_op_fwait();
4624 break;
4625 default:
4626 goto illegal_op;
4627 }
4628 break;
4629 case 0x0c: /* grp d9/4 */
4630 switch(rm) {
4631 case 0: /* fchs */
4632 gen_op_fchs_ST0();
4633 break;
4634 case 1: /* fabs */
4635 gen_op_fabs_ST0();
4636 break;
4637 case 4: /* ftst */
4638 gen_op_fldz_FT0();
4639 gen_op_fcom_ST0_FT0();
4640 break;
4641 case 5: /* fxam */
4642 gen_op_fxam_ST0();
4643 break;
4644 default:
4645 goto illegal_op;
4646 }
4647 break;
4648 case 0x0d: /* grp d9/5 */
4649 {
4650 switch(rm) {
4651 case 0:
4652 gen_op_fpush();
4653 gen_op_fld1_ST0();
4654 break;
4655 case 1:
4656 gen_op_fpush();
4657 gen_op_fldl2t_ST0();
4658 break;
4659 case 2:
4660 gen_op_fpush();
4661 gen_op_fldl2e_ST0();
4662 break;
4663 case 3:
4664 gen_op_fpush();
4665 gen_op_fldpi_ST0();
4666 break;
4667 case 4:
4668 gen_op_fpush();
4669 gen_op_fldlg2_ST0();
4670 break;
4671 case 5:
4672 gen_op_fpush();
4673 gen_op_fldln2_ST0();
4674 break;
4675 case 6:
4676 gen_op_fpush();
4677 gen_op_fldz_ST0();
4678 break;
4679 default:
4680 goto illegal_op;
4681 }
4682 }
4683 break;
4684 case 0x0e: /* grp d9/6 */
4685 switch(rm) {
4686 case 0: /* f2xm1 */
4687 gen_op_f2xm1();
4688 break;
4689 case 1: /* fyl2x */
4690 gen_op_fyl2x();
4691 break;
4692 case 2: /* fptan */
4693 gen_op_fptan();
4694 break;
4695 case 3: /* fpatan */
4696 gen_op_fpatan();
4697 break;
4698 case 4: /* fxtract */
4699 gen_op_fxtract();
4700 break;
4701 case 5: /* fprem1 */
4702 gen_op_fprem1();
4703 break;
4704 case 6: /* fdecstp */
4705 gen_op_fdecstp();
4706 break;
4707 default:
4708 case 7: /* fincstp */
4709 gen_op_fincstp();
4710 break;
4711 }
4712 break;
4713 case 0x0f: /* grp d9/7 */
4714 switch(rm) {
4715 case 0: /* fprem */
4716 gen_op_fprem();
4717 break;
4718 case 1: /* fyl2xp1 */
4719 gen_op_fyl2xp1();
4720 break;
4721 case 2: /* fsqrt */
4722 gen_op_fsqrt();
4723 break;
4724 case 3: /* fsincos */
4725 gen_op_fsincos();
4726 break;
4727 case 5: /* fscale */
4728 gen_op_fscale();
4729 break;
4730 case 4: /* frndint */
4731 gen_op_frndint();
4732 break;
4733 case 6: /* fsin */
4734 gen_op_fsin();
4735 break;
4736 default:
4737 case 7: /* fcos */
4738 gen_op_fcos();
4739 break;
4740 }
4741 break;
4742 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4743 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4744 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4745 {
4746 int op1;
4747
4748 op1 = op & 7;
4749 if (op >= 0x20) {
4750 gen_op_fp_arith_STN_ST0[op1](opreg);
4751 if (op >= 0x30)
4752 gen_op_fpop();
4753 } else {
4754 gen_op_fmov_FT0_STN(opreg);
4755 gen_op_fp_arith_ST0_FT0[op1]();
4756 }
4757 }
4758 break;
4759 case 0x02: /* fcom */
4760 case 0x22: /* fcom2, undocumented op */
4761 gen_op_fmov_FT0_STN(opreg);
4762 gen_op_fcom_ST0_FT0();
4763 break;
4764 case 0x03: /* fcomp */
4765 case 0x23: /* fcomp3, undocumented op */
4766 case 0x32: /* fcomp5, undocumented op */
4767 gen_op_fmov_FT0_STN(opreg);
4768 gen_op_fcom_ST0_FT0();
4769 gen_op_fpop();
4770 break;
4771 case 0x15: /* da/5 */
4772 switch(rm) {
4773 case 1: /* fucompp */
4774 gen_op_fmov_FT0_STN(1);
4775 gen_op_fucom_ST0_FT0();
4776 gen_op_fpop();
4777 gen_op_fpop();
4778 break;
4779 default:
4780 goto illegal_op;
4781 }
4782 break;
4783 case 0x1c:
4784 switch(rm) {
4785 case 0: /* feni (287 only, just do nop here) */
4786 break;
4787 case 1: /* fdisi (287 only, just do nop here) */
4788 break;
4789 case 2: /* fclex */
4790 gen_op_fclex();
4791 break;
4792 case 3: /* fninit */
4793 gen_op_fninit();
4794 break;
4795 case 4: /* fsetpm (287 only, just do nop here) */
4796 break;
4797 default:
4798 goto illegal_op;
4799 }
4800 break;
4801 case 0x1d: /* fucomi */
4802 if (s->cc_op != CC_OP_DYNAMIC)
4803 gen_op_set_cc_op(s->cc_op);
4804 gen_op_fmov_FT0_STN(opreg);
4805 gen_op_fucomi_ST0_FT0();
4806 s->cc_op = CC_OP_EFLAGS;
4807 break;
4808 case 0x1e: /* fcomi */
4809 if (s->cc_op != CC_OP_DYNAMIC)
4810 gen_op_set_cc_op(s->cc_op);
4811 gen_op_fmov_FT0_STN(opreg);
4812 gen_op_fcomi_ST0_FT0();
4813 s->cc_op = CC_OP_EFLAGS;
4814 break;
4815 case 0x28: /* ffree sti */
4816 gen_op_ffree_STN(opreg);
4817 break;
4818 case 0x2a: /* fst sti */
4819 gen_op_fmov_STN_ST0(opreg);
4820 break;
4821 case 0x2b: /* fstp sti */
4822 case 0x0b: /* fstp1 sti, undocumented op */
4823 case 0x3a: /* fstp8 sti, undocumented op */
4824 case 0x3b: /* fstp9 sti, undocumented op */
4825 gen_op_fmov_STN_ST0(opreg);
4826 gen_op_fpop();
4827 break;
4828 case 0x2c: /* fucom st(i) */
4829 gen_op_fmov_FT0_STN(opreg);
4830 gen_op_fucom_ST0_FT0();
4831 break;
4832 case 0x2d: /* fucomp st(i) */
4833 gen_op_fmov_FT0_STN(opreg);
4834 gen_op_fucom_ST0_FT0();
4835 gen_op_fpop();
4836 break;
4837 case 0x33: /* de/3 */
4838 switch(rm) {
4839 case 1: /* fcompp */
4840 gen_op_fmov_FT0_STN(1);
4841 gen_op_fcom_ST0_FT0();
4842 gen_op_fpop();
4843 gen_op_fpop();
4844 break;
4845 default:
4846 goto illegal_op;
4847 }
4848 break;
4849 case 0x38: /* ffreep sti, undocumented op */
4850 gen_op_ffree_STN(opreg);
4851 gen_op_fpop();
4852 break;
4853 case 0x3c: /* df/4 */
4854 switch(rm) {
4855 case 0:
4856 gen_op_fnstsw_EAX();
4857 break;
4858 default:
4859 goto illegal_op;
4860 }
4861 break;
4862 case 0x3d: /* fucomip */
4863 if (s->cc_op != CC_OP_DYNAMIC)
4864 gen_op_set_cc_op(s->cc_op);
4865 gen_op_fmov_FT0_STN(opreg);
4866 gen_op_fucomi_ST0_FT0();
4867 gen_op_fpop();
4868 s->cc_op = CC_OP_EFLAGS;
4869 break;
4870 case 0x3e: /* fcomip */
4871 if (s->cc_op != CC_OP_DYNAMIC)
4872 gen_op_set_cc_op(s->cc_op);
4873 gen_op_fmov_FT0_STN(opreg);
4874 gen_op_fcomi_ST0_FT0();
4875 gen_op_fpop();
4876 s->cc_op = CC_OP_EFLAGS;
4877 break;
4878 case 0x10 ... 0x13: /* fcmovxx */
4879 case 0x18 ... 0x1b:
4880 {
4881 int op1;
4882 const static uint8_t fcmov_cc[8] = {
4883 (JCC_B << 1),
4884 (JCC_Z << 1),
4885 (JCC_BE << 1),
4886 (JCC_P << 1),
4887 };
4888 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4889 gen_setcc(s, op1);
4890 gen_op_fcmov_ST0_STN_T0(opreg);
4891 }
4892 break;
4893 default:
4894 goto illegal_op;
4895 }
4896 }
4897 break;
4898 /************************/
4899 /* string ops */
4900
4901 case 0xa4: /* movsS */
4902 case 0xa5:
4903 if ((b & 1) == 0)
4904 ot = OT_BYTE;
4905 else
4906 ot = dflag + OT_WORD;
4907
4908 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4909 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4910 } else {
4911 gen_movs(s, ot);
4912 }
4913 break;
4914
4915 case 0xaa: /* stosS */
4916 case 0xab:
4917 if ((b & 1) == 0)
4918 ot = OT_BYTE;
4919 else
4920 ot = dflag + OT_WORD;
4921
4922 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4923 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4924 } else {
4925 gen_stos(s, ot);
4926 }
4927 break;
4928 case 0xac: /* lodsS */
4929 case 0xad:
4930 if ((b & 1) == 0)
4931 ot = OT_BYTE;
4932 else
4933 ot = dflag + OT_WORD;
4934 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4935 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4936 } else {
4937 gen_lods(s, ot);
4938 }
4939 break;
4940 case 0xae: /* scasS */
4941 case 0xaf:
4942 if ((b & 1) == 0)
4943 ot = OT_BYTE;
4944 else
4945 ot = dflag + OT_WORD;
4946 if (prefixes & PREFIX_REPNZ) {
4947 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4948 } else if (prefixes & PREFIX_REPZ) {
4949 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4950 } else {
4951 gen_scas(s, ot);
4952 s->cc_op = CC_OP_SUBB + ot;
4953 }
4954 break;
4955
4956 case 0xa6: /* cmpsS */
4957 case 0xa7:
4958 if ((b & 1) == 0)
4959 ot = OT_BYTE;
4960 else
4961 ot = dflag + OT_WORD;
4962 if (prefixes & PREFIX_REPNZ) {
4963 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4964 } else if (prefixes & PREFIX_REPZ) {
4965 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4966 } else {
4967 gen_cmps(s, ot);
4968 s->cc_op = CC_OP_SUBB + ot;
4969 }
4970 break;
4971 case 0x6c: /* insS */
4972 case 0x6d:
4973 if ((b & 1) == 0)
4974 ot = OT_BYTE;
4975 else
4976 ot = dflag ? OT_LONG : OT_WORD;
4977 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4978 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4979 gen_op_andl_T0_ffff();
4980 if (gen_svm_check_io(s, pc_start,
4981 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4982 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4983 break;
4984 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4985 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4986 } else {
4987 gen_ins(s, ot);
4988 }
4989 break;
4990 case 0x6e: /* outsS */
4991 case 0x6f:
4992 if ((b & 1) == 0)
4993 ot = OT_BYTE;
4994 else
4995 ot = dflag ? OT_LONG : OT_WORD;
4996 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4997 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4998 gen_op_andl_T0_ffff();
4999 if (gen_svm_check_io(s, pc_start,
5000 (1 << (4+ot)) | svm_is_rep(prefixes) |
5001 4 | (1 << (7+s->aflag))))
5002 break;
5003 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5004 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5005 } else {
5006 gen_outs(s, ot);
5007 }
5008 break;
5009
5010 /************************/
5011 /* port I/O */
5012
5013 case 0xe4:
5014 case 0xe5:
5015 if ((b & 1) == 0)
5016 ot = OT_BYTE;
5017 else
5018 ot = dflag ? OT_LONG : OT_WORD;
5019 val = ldub_code(s->pc++);
5020 gen_op_movl_T0_im(val);
5021 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5022 if (gen_svm_check_io(s, pc_start,
5023 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5024 (1 << (4+ot))))
5025 break;
5026 gen_op_in[ot]();
5027 gen_op_mov_reg_T1(ot, R_EAX);
5028 break;
5029 case 0xe6:
5030 case 0xe7:
5031 if ((b & 1) == 0)
5032 ot = OT_BYTE;
5033 else
5034 ot = dflag ? OT_LONG : OT_WORD;
5035 val = ldub_code(s->pc++);
5036 gen_op_movl_T0_im(val);
5037 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5038 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5039 (1 << (4+ot))))
5040 break;
5041 gen_op_mov_TN_reg(ot, 1, R_EAX);
5042 gen_op_out[ot]();
5043 break;
5044 case 0xec:
5045 case 0xed:
5046 if ((b & 1) == 0)
5047 ot = OT_BYTE;
5048 else
5049 ot = dflag ? OT_LONG : OT_WORD;
5050 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5051 gen_op_andl_T0_ffff();
5052 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5053 if (gen_svm_check_io(s, pc_start,
5054 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5055 (1 << (4+ot))))
5056 break;
5057 gen_op_in[ot]();
5058 gen_op_mov_reg_T1(ot, R_EAX);
5059 break;
5060 case 0xee:
5061 case 0xef:
5062 if ((b & 1) == 0)
5063 ot = OT_BYTE;
5064 else
5065 ot = dflag ? OT_LONG : OT_WORD;
5066 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5067 gen_op_andl_T0_ffff();
5068 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5069 if (gen_svm_check_io(s, pc_start,
5070 svm_is_rep(prefixes) | (1 << (4+ot))))
5071 break;
5072 gen_op_mov_TN_reg(ot, 1, R_EAX);
5073 gen_op_out[ot]();
5074 break;
5075
5076 /************************/
5077 /* control */
5078 case 0xc2: /* ret im */
5079 val = ldsw_code(s->pc);
5080 s->pc += 2;
5081 gen_pop_T0(s);
5082 if (CODE64(s) && s->dflag)
5083 s->dflag = 2;
5084 gen_stack_update(s, val + (2 << s->dflag));
5085 if (s->dflag == 0)
5086 gen_op_andl_T0_ffff();
5087 gen_op_jmp_T0();
5088 gen_eob(s);
5089 break;
5090 case 0xc3: /* ret */
5091 gen_pop_T0(s);
5092 gen_pop_update(s);
5093 if (s->dflag == 0)
5094 gen_op_andl_T0_ffff();
5095 gen_op_jmp_T0();
5096 gen_eob(s);
5097 break;
5098 case 0xca: /* lret im */
5099 val = ldsw_code(s->pc);
5100 s->pc += 2;
5101 do_lret:
5102 if (s->pe && !s->vm86) {
5103 if (s->cc_op != CC_OP_DYNAMIC)
5104 gen_op_set_cc_op(s->cc_op);
5105 gen_jmp_im(pc_start - s->cs_base);
5106 gen_op_lret_protected(s->dflag, val);
5107 } else {
5108 gen_stack_A0(s);
5109 /* pop offset */
5110 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5111 if (s->dflag == 0)
5112 gen_op_andl_T0_ffff();
5113 /* NOTE: keeping EIP updated is not a problem in case of
5114 exception */
5115 gen_op_jmp_T0();
5116 /* pop selector */
5117 gen_op_addl_A0_im(2 << s->dflag);
5118 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5119 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5120 /* add stack offset */
5121 gen_stack_update(s, val + (4 << s->dflag));
5122 }
5123 gen_eob(s);
5124 break;
5125 case 0xcb: /* lret */
5126 val = 0;
5127 goto do_lret;
5128 case 0xcf: /* iret */
5129 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5130 break;
5131 if (!s->pe) {
5132 /* real mode */
5133 gen_op_iret_real(s->dflag);
5134 s->cc_op = CC_OP_EFLAGS;
5135 } else if (s->vm86) {
5136 if (s->iopl != 3) {
5137 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5138 } else {
5139 gen_op_iret_real(s->dflag);
5140 s->cc_op = CC_OP_EFLAGS;
5141 }
5142 } else {
5143 if (s->cc_op != CC_OP_DYNAMIC)
5144 gen_op_set_cc_op(s->cc_op);
5145 gen_jmp_im(pc_start - s->cs_base);
5146 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5147 s->cc_op = CC_OP_EFLAGS;
5148 }
5149 gen_eob(s);
5150 break;
5151 case 0xe8: /* call im */
5152 {
5153 if (dflag)
5154 tval = (int32_t)insn_get(s, OT_LONG);
5155 else
5156 tval = (int16_t)insn_get(s, OT_WORD);
5157 next_eip = s->pc - s->cs_base;
5158 tval += next_eip;
5159 if (s->dflag == 0)
5160 tval &= 0xffff;
5161 gen_movtl_T0_im(next_eip);
5162 gen_push_T0(s);
5163 gen_jmp(s, tval);
5164 }
5165 break;
5166 case 0x9a: /* lcall im */
5167 {
5168 unsigned int selector, offset;
5169
5170 if (CODE64(s))
5171 goto illegal_op;
5172 ot = dflag ? OT_LONG : OT_WORD;
5173 offset = insn_get(s, ot);
5174 selector = insn_get(s, OT_WORD);
5175
5176 gen_op_movl_T0_im(selector);
5177 gen_op_movl_T1_imu(offset);
5178 }
5179 goto do_lcall;
5180 case 0xe9: /* jmp im */
5181 if (dflag)
5182 tval = (int32_t)insn_get(s, OT_LONG);
5183 else
5184 tval = (int16_t)insn_get(s, OT_WORD);
5185 tval += s->pc - s->cs_base;
5186 if (s->dflag == 0)
5187 tval &= 0xffff;
5188 gen_jmp(s, tval);
5189 break;
5190 case 0xea: /* ljmp im */
5191 {
5192 unsigned int selector, offset;
5193
5194 if (CODE64(s))
5195 goto illegal_op;
5196 ot = dflag ? OT_LONG : OT_WORD;
5197 offset = insn_get(s, ot);
5198 selector = insn_get(s, OT_WORD);
5199
5200 gen_op_movl_T0_im(selector);
5201 gen_op_movl_T1_imu(offset);
5202 }
5203 goto do_ljmp;
5204 case 0xeb: /* jmp Jb */
5205 tval = (int8_t)insn_get(s, OT_BYTE);
5206 tval += s->pc - s->cs_base;
5207 if (s->dflag == 0)
5208 tval &= 0xffff;
5209 gen_jmp(s, tval);
5210 break;
5211 case 0x70 ... 0x7f: /* jcc Jb */
5212 tval = (int8_t)insn_get(s, OT_BYTE);
5213 goto do_jcc;
5214 case 0x180 ... 0x18f: /* jcc Jv */
5215 if (dflag) {
5216 tval = (int32_t)insn_get(s, OT_LONG);
5217 } else {
5218 tval = (int16_t)insn_get(s, OT_WORD);
5219 }
5220 do_jcc:
5221 next_eip = s->pc - s->cs_base;
5222 tval += next_eip;
5223 if (s->dflag == 0)
5224 tval &= 0xffff;
5225 gen_jcc(s, b, tval, next_eip);
5226 break;
5227
5228 case 0x190 ... 0x19f: /* setcc Gv */
5229 modrm = ldub_code(s->pc++);
5230 gen_setcc(s, b);
5231 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5232 break;
5233 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5234 ot = dflag + OT_WORD;
5235 modrm = ldub_code(s->pc++);
5236 reg = ((modrm >> 3) & 7) | rex_r;
5237 mod = (modrm >> 6) & 3;
5238 gen_setcc(s, b);
5239 if (mod != 3) {
5240 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5241 gen_op_ld_T1_A0(ot + s->mem_index);
5242 } else {
5243 rm = (modrm & 7) | REX_B(s);
5244 gen_op_mov_TN_reg(ot, 1, rm);
5245 }
5246 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5247 break;
5248
5249 /************************/
5250 /* flags */
5251 case 0x9c: /* pushf */
5252 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5253 break;
5254 if (s->vm86 && s->iopl != 3) {
5255 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5256 } else {
5257 if (s->cc_op != CC_OP_DYNAMIC)
5258 gen_op_set_cc_op(s->cc_op);
5259 gen_op_movl_T0_eflags();
5260 gen_push_T0(s);
5261 }
5262 break;
5263 case 0x9d: /* popf */
5264 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5265 break;
5266 if (s->vm86 && s->iopl != 3) {
5267 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5268 } else {
5269 gen_pop_T0(s);
5270 if (s->cpl == 0) {
5271 if (s->dflag) {
5272 gen_op_movl_eflags_T0_cpl0();
5273 } else {
5274 gen_op_movw_eflags_T0_cpl0();
5275 }
5276 } else {
5277 if (s->cpl <= s->iopl) {
5278 if (s->dflag) {
5279 gen_op_movl_eflags_T0_io();
5280 } else {
5281 gen_op_movw_eflags_T0_io();
5282 }
5283 } else {
5284 if (s->dflag) {
5285 gen_op_movl_eflags_T0();
5286 } else {
5287 gen_op_movw_eflags_T0();
5288 }
5289 }
5290 }
5291 gen_pop_update(s);
5292 s->cc_op = CC_OP_EFLAGS;
5293 /* abort translation because TF flag may change */
5294 gen_jmp_im(s->pc - s->cs_base);
5295 gen_eob(s);
5296 }
5297 break;
5298 case 0x9e: /* sahf */
5299 if (CODE64(s))
5300 goto illegal_op;
5301 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5302 if (s->cc_op != CC_OP_DYNAMIC)
5303 gen_op_set_cc_op(s->cc_op);
5304 gen_op_movb_eflags_T0();
5305 s->cc_op = CC_OP_EFLAGS;
5306 break;
5307 case 0x9f: /* lahf */
5308 if (CODE64(s))
5309 goto illegal_op;
5310 if (s->cc_op != CC_OP_DYNAMIC)
5311 gen_op_set_cc_op(s->cc_op);
5312 gen_op_movl_T0_eflags();
5313 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5314 break;
5315 case 0xf5: /* cmc */
5316 if (s->cc_op != CC_OP_DYNAMIC)
5317 gen_op_set_cc_op(s->cc_op);
5318 gen_op_cmc();
5319 s->cc_op = CC_OP_EFLAGS;
5320 break;
5321 case 0xf8: /* clc */
5322 if (s->cc_op != CC_OP_DYNAMIC)
5323 gen_op_set_cc_op(s->cc_op);
5324 gen_op_clc();
5325 s->cc_op = CC_OP_EFLAGS;
5326 break;
5327 case 0xf9: /* stc */
5328 if (s->cc_op != CC_OP_DYNAMIC)
5329 gen_op_set_cc_op(s->cc_op);
5330 gen_op_stc();
5331 s->cc_op = CC_OP_EFLAGS;
5332 break;
5333 case 0xfc: /* cld */
5334 gen_op_cld();
5335 break;
5336 case 0xfd: /* std */
5337 gen_op_std();
5338 break;
5339
5340 /************************/
5341 /* bit operations */
5342 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5343 ot = dflag + OT_WORD;
5344 modrm = ldub_code(s->pc++);
5345 op = (modrm >> 3) & 7;
5346 mod = (modrm >> 6) & 3;
5347 rm = (modrm & 7) | REX_B(s);
5348 if (mod != 3) {
5349 s->rip_offset = 1;
5350 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5351 gen_op_ld_T0_A0(ot + s->mem_index);
5352 } else {
5353 gen_op_mov_TN_reg(ot, 0, rm);
5354 }
5355 /* load shift */
5356 val = ldub_code(s->pc++);
5357 gen_op_movl_T1_im(val);
5358 if (op < 4)
5359 goto illegal_op;
5360 op -= 4;
5361 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5362 s->cc_op = CC_OP_SARB + ot;
5363 if (op != 0) {
5364 if (mod != 3)
5365 gen_op_st_T0_A0(ot + s->mem_index);
5366 else
5367 gen_op_mov_reg_T0(ot, rm);
5368 gen_op_update_bt_cc();
5369 }
5370 break;
5371 case 0x1a3: /* bt Gv, Ev */
5372 op = 0;
5373 goto do_btx;
5374 case 0x1ab: /* bts */
5375 op = 1;
5376 goto do_btx;
5377 case 0x1b3: /* btr */
5378 op = 2;
5379 goto do_btx;
5380 case 0x1bb: /* btc */
5381 op = 3;
5382 do_btx:
5383 ot = dflag + OT_WORD;
5384 modrm = ldub_code(s->pc++);
5385 reg = ((modrm >> 3) & 7) | rex_r;
5386 mod = (modrm >> 6) & 3;
5387 rm = (modrm & 7) | REX_B(s);
5388 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5389 if (mod != 3) {
5390 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5391 /* specific case: we need to add a displacement */
5392 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5393 gen_op_ld_T0_A0(ot + s->mem_index);
5394 } else {
5395 gen_op_mov_TN_reg(ot, 0, rm);
5396 }
5397 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5398 s->cc_op = CC_OP_SARB + ot;
5399 if (op != 0) {
5400 if (mod != 3)
5401 gen_op_st_T0_A0(ot + s->mem_index);
5402 else
5403 gen_op_mov_reg_T0(ot, rm);
5404 gen_op_update_bt_cc();
5405 }
5406 break;
5407 case 0x1bc: /* bsf */
5408 case 0x1bd: /* bsr */
5409 ot = dflag + OT_WORD;
5410 modrm = ldub_code(s->pc++);
5411 reg = ((modrm >> 3) & 7) | rex_r;
5412 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5413 /* NOTE: in order to handle the 0 case, we must load the
5414 result. It could be optimized with a generated jump */
5415 gen_op_mov_TN_reg(ot, 1, reg);
5416 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5417 gen_op_mov_reg_T1(ot, reg);
5418 s->cc_op = CC_OP_LOGICB + ot;
5419 break;
5420 /************************/
5421 /* bcd */
5422 case 0x27: /* daa */
5423 if (CODE64(s))
5424 goto illegal_op;
5425 if (s->cc_op != CC_OP_DYNAMIC)
5426 gen_op_set_cc_op(s->cc_op);
5427 gen_op_daa();
5428 s->cc_op = CC_OP_EFLAGS;
5429 break;
5430 case 0x2f: /* das */
5431 if (CODE64(s))
5432 goto illegal_op;
5433 if (s->cc_op != CC_OP_DYNAMIC)
5434 gen_op_set_cc_op(s->cc_op);
5435 gen_op_das();
5436 s->cc_op = CC_OP_EFLAGS;
5437 break;
5438 case 0x37: /* aaa */
5439 if (CODE64(s))
5440 goto illegal_op;
5441 if (s->cc_op != CC_OP_DYNAMIC)
5442 gen_op_set_cc_op(s->cc_op);
5443 gen_op_aaa();
5444 s->cc_op = CC_OP_EFLAGS;
5445 break;
5446 case 0x3f: /* aas */
5447 if (CODE64(s))
5448 goto illegal_op;
5449 if (s->cc_op != CC_OP_DYNAMIC)
5450 gen_op_set_cc_op(s->cc_op);
5451 gen_op_aas();
5452 s->cc_op = CC_OP_EFLAGS;
5453 break;
5454 case 0xd4: /* aam */
5455 if (CODE64(s))
5456 goto illegal_op;
5457 val = ldub_code(s->pc++);
5458 if (val == 0) {
5459 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5460 } else {
5461 gen_op_aam(val);
5462 s->cc_op = CC_OP_LOGICB;
5463 }
5464 break;
5465 case 0xd5: /* aad */
5466 if (CODE64(s))
5467 goto illegal_op;
5468 val = ldub_code(s->pc++);
5469 gen_op_aad(val);
5470 s->cc_op = CC_OP_LOGICB;
5471 break;
5472 /************************/
5473 /* misc */
5474 case 0x90: /* nop */
5475 /* XXX: xchg + rex handling */
5476 /* XXX: correct lock test for all insn */
5477 if (prefixes & PREFIX_LOCK)
5478 goto illegal_op;
5479 if (prefixes & PREFIX_REPZ) {
5480 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5481 }
5482 break;
5483 case 0x9b: /* fwait */
5484 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5485 (HF_MP_MASK | HF_TS_MASK)) {
5486 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5487 } else {
5488 if (s->cc_op != CC_OP_DYNAMIC)
5489 gen_op_set_cc_op(s->cc_op);
5490 gen_jmp_im(pc_start - s->cs_base);
5491 gen_op_fwait();
5492 }
5493 break;
5494 case 0xcc: /* int3 */
5495 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5496 break;
5497 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5498 break;
5499 case 0xcd: /* int N */
5500 val = ldub_code(s->pc++);
5501 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5502 break;
5503 if (s->vm86 && s->iopl != 3) {
5504 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5505 } else {
5506 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5507 }
5508 break;
5509 case 0xce: /* into */
5510 if (CODE64(s))
5511 goto illegal_op;
5512 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5513 break;
5514 if (s->cc_op != CC_OP_DYNAMIC)
5515 gen_op_set_cc_op(s->cc_op);
5516 gen_jmp_im(pc_start - s->cs_base);
5517 gen_op_into(s->pc - pc_start);
5518 break;
5519 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5520 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5521 break;
5522 #if 1
5523 gen_debug(s, pc_start - s->cs_base);
5524 #else
5525 /* start debug */
5526 tb_flush(cpu_single_env);
5527 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5528 #endif
5529 break;
5530 case 0xfa: /* cli */
5531 if (!s->vm86) {
5532 if (s->cpl <= s->iopl) {
5533 gen_op_cli();
5534 } else {
5535 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5536 }
5537 } else {
5538 if (s->iopl == 3) {
5539 gen_op_cli();
5540 } else {
5541 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5542 }
5543 }
5544 break;
5545 case 0xfb: /* sti */
5546 if (!s->vm86) {
5547 if (s->cpl <= s->iopl) {
5548 gen_sti:
5549 gen_op_sti();
5550 /* interruptions are enabled only the first insn after sti */
5551 /* If several instructions disable interrupts, only the
5552 _first_ does it */
5553 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5554 gen_op_set_inhibit_irq();
5555 /* give a chance to handle pending irqs */
5556 gen_jmp_im(s->pc - s->cs_base);
5557 gen_eob(s);
5558 } else {
5559 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5560 }
5561 } else {
5562 if (s->iopl == 3) {
5563 goto gen_sti;
5564 } else {
5565 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5566 }
5567 }
5568 break;
5569 case 0x62: /* bound */
5570 if (CODE64(s))
5571 goto illegal_op;
5572 ot = dflag ? OT_LONG : OT_WORD;
5573 modrm = ldub_code(s->pc++);
5574 reg = (modrm >> 3) & 7;
5575 mod = (modrm >> 6) & 3;
5576 if (mod == 3)
5577 goto illegal_op;
5578 gen_op_mov_TN_reg(ot, 0, reg);
5579 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5580 gen_jmp_im(pc_start - s->cs_base);
5581 if (ot == OT_WORD)
5582 gen_op_boundw();
5583 else
5584 gen_op_boundl();
5585 break;
5586 case 0x1c8 ... 0x1cf: /* bswap reg */
5587 reg = (b & 7) | REX_B(s);
5588 #ifdef TARGET_X86_64
5589 if (dflag == 2) {
5590 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5591 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5592 gen_op_mov_reg_T0(OT_QUAD, reg);
5593 } else
5594 {
5595 TCGv tmp0;
5596 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5597
5598 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5599 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5600 tcg_gen_bswap_i32(tmp0, tmp0);
5601 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5602 gen_op_mov_reg_T0(OT_LONG, reg);
5603 }
5604 #else
5605 {
5606 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5607 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5608 gen_op_mov_reg_T0(OT_LONG, reg);
5609 }
5610 #endif
5611 break;
5612 case 0xd6: /* salc */
5613 if (CODE64(s))
5614 goto illegal_op;
5615 if (s->cc_op != CC_OP_DYNAMIC)
5616 gen_op_set_cc_op(s->cc_op);
5617 gen_op_salc();
5618 break;
5619 case 0xe0: /* loopnz */
5620 case 0xe1: /* loopz */
5621 if (s->cc_op != CC_OP_DYNAMIC)
5622 gen_op_set_cc_op(s->cc_op);
5623 /* FALL THRU */
5624 case 0xe2: /* loop */
5625 case 0xe3: /* jecxz */
5626 {
5627 int l1, l2;
5628
5629 tval = (int8_t)insn_get(s, OT_BYTE);
5630 next_eip = s->pc - s->cs_base;
5631 tval += next_eip;
5632 if (s->dflag == 0)
5633 tval &= 0xffff;
5634
5635 l1 = gen_new_label();
5636 l2 = gen_new_label();
5637 b &= 3;
5638 if (b == 3) {
5639 gen_op_jz_ecx[s->aflag](l1);
5640 } else {
5641 gen_op_dec_ECX[s->aflag]();
5642 if (b <= 1)
5643 gen_op_mov_T0_cc();
5644 gen_op_loop[s->aflag][b](l1);
5645 }
5646
5647 gen_jmp_im(next_eip);
5648 gen_op_jmp_label(l2);
5649 gen_set_label(l1);
5650 gen_jmp_im(tval);
5651 gen_set_label(l2);
5652 gen_eob(s);
5653 }
5654 break;
5655 case 0x130: /* wrmsr */
5656 case 0x132: /* rdmsr */
5657 if (s->cpl != 0) {
5658 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5659 } else {
5660 int retval = 0;
5661 if (b & 2) {
5662 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5663 gen_op_rdmsr();
5664 } else {
5665 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5666 gen_op_wrmsr();
5667 }
5668 if(retval)
5669 gen_eob(s);
5670 }
5671 break;
5672 case 0x131: /* rdtsc */
5673 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5674 break;
5675 gen_jmp_im(pc_start - s->cs_base);
5676 gen_op_rdtsc();
5677 break;
5678 case 0x133: /* rdpmc */
5679 gen_jmp_im(pc_start - s->cs_base);
5680 gen_op_rdpmc();
5681 break;
5682 case 0x134: /* sysenter */
5683 if (CODE64(s))
5684 goto illegal_op;
5685 if (!s->pe) {
5686 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5687 } else {
5688 if (s->cc_op != CC_OP_DYNAMIC) {
5689 gen_op_set_cc_op(s->cc_op);
5690 s->cc_op = CC_OP_DYNAMIC;
5691 }
5692 gen_jmp_im(pc_start - s->cs_base);
5693 gen_op_sysenter();
5694 gen_eob(s);
5695 }
5696 break;
5697 case 0x135: /* sysexit */
5698 if (CODE64(s))
5699 goto illegal_op;
5700 if (!s->pe) {
5701 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5702 } else {
5703 if (s->cc_op != CC_OP_DYNAMIC) {
5704 gen_op_set_cc_op(s->cc_op);
5705 s->cc_op = CC_OP_DYNAMIC;
5706 }
5707 gen_jmp_im(pc_start - s->cs_base);
5708 gen_op_sysexit();
5709 gen_eob(s);
5710 }
5711 break;
5712 #ifdef TARGET_X86_64
5713 case 0x105: /* syscall */
5714 /* XXX: is it usable in real mode ? */
5715 if (s->cc_op != CC_OP_DYNAMIC) {
5716 gen_op_set_cc_op(s->cc_op);
5717 s->cc_op = CC_OP_DYNAMIC;
5718 }
5719 gen_jmp_im(pc_start - s->cs_base);
5720 gen_op_syscall(s->pc - pc_start);
5721 gen_eob(s);
5722 break;
5723 case 0x107: /* sysret */
5724 if (!s->pe) {
5725 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5726 } else {
5727 if (s->cc_op != CC_OP_DYNAMIC) {
5728 gen_op_set_cc_op(s->cc_op);
5729 s->cc_op = CC_OP_DYNAMIC;
5730 }
5731 gen_jmp_im(pc_start - s->cs_base);
5732 gen_op_sysret(s->dflag);
5733 /* condition codes are modified only in long mode */
5734 if (s->lma)
5735 s->cc_op = CC_OP_EFLAGS;
5736 gen_eob(s);
5737 }
5738 break;
5739 #endif
5740 case 0x1a2: /* cpuid */
5741 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5742 break;
5743 gen_op_cpuid();
5744 break;
5745 case 0xf4: /* hlt */
5746 if (s->cpl != 0) {
5747 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5748 } else {
5749 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5750 break;
5751 if (s->cc_op != CC_OP_DYNAMIC)
5752 gen_op_set_cc_op(s->cc_op);
5753 gen_jmp_im(s->pc - s->cs_base);
5754 gen_op_hlt();
5755 s->is_jmp = 3;
5756 }
5757 break;
5758 case 0x100:
5759 modrm = ldub_code(s->pc++);
5760 mod = (modrm >> 6) & 3;
5761 op = (modrm >> 3) & 7;
5762 switch(op) {
5763 case 0: /* sldt */
5764 if (!s->pe || s->vm86)
5765 goto illegal_op;
5766 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5767 break;
5768 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5769 ot = OT_WORD;
5770 if (mod == 3)
5771 ot += s->dflag;
5772 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5773 break;
5774 case 2: /* lldt */
5775 if (!s->pe || s->vm86)
5776 goto illegal_op;
5777 if (s->cpl != 0) {
5778 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5779 } else {
5780 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5781 break;
5782 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5783 gen_jmp_im(pc_start - s->cs_base);
5784 gen_op_lldt_T0();
5785 }
5786 break;
5787 case 1: /* str */
5788 if (!s->pe || s->vm86)
5789 goto illegal_op;
5790 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5791 break;
5792 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5793 ot = OT_WORD;
5794 if (mod == 3)
5795 ot += s->dflag;
5796 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5797 break;
5798 case 3: /* ltr */
5799 if (!s->pe || s->vm86)
5800 goto illegal_op;
5801 if (s->cpl != 0) {
5802 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5803 } else {
5804 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5805 break;
5806 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5807 gen_jmp_im(pc_start - s->cs_base);
5808 gen_op_ltr_T0();
5809 }
5810 break;
5811 case 4: /* verr */
5812 case 5: /* verw */
5813 if (!s->pe || s->vm86)
5814 goto illegal_op;
5815 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5816 if (s->cc_op != CC_OP_DYNAMIC)
5817 gen_op_set_cc_op(s->cc_op);
5818 if (op == 4)
5819 gen_op_verr();
5820 else
5821 gen_op_verw();
5822 s->cc_op = CC_OP_EFLAGS;
5823 break;
5824 default:
5825 goto illegal_op;
5826 }
5827 break;
5828 case 0x101:
5829 modrm = ldub_code(s->pc++);
5830 mod = (modrm >> 6) & 3;
5831 op = (modrm >> 3) & 7;
5832 rm = modrm & 7;
5833 switch(op) {
5834 case 0: /* sgdt */
5835 if (mod == 3)
5836 goto illegal_op;
5837 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5838 break;
5839 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5840 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5841 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5842 gen_add_A0_im(s, 2);
5843 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5844 if (!s->dflag)
5845 gen_op_andl_T0_im(0xffffff);
5846 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5847 break;
5848 case 1:
5849 if (mod == 3) {
5850 switch (rm) {
5851 case 0: /* monitor */
5852 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5853 s->cpl != 0)
5854 goto illegal_op;
5855 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5856 break;
5857 gen_jmp_im(pc_start - s->cs_base);
5858 #ifdef TARGET_X86_64
5859 if (s->aflag == 2) {
5860 gen_op_movq_A0_reg(R_EBX);
5861 gen_op_addq_A0_AL();
5862 } else
5863 #endif
5864 {
5865 gen_op_movl_A0_reg(R_EBX);
5866 gen_op_addl_A0_AL();
5867 if (s->aflag == 0)
5868 gen_op_andl_A0_ffff();
5869 }
5870 gen_add_A0_ds_seg(s);
5871 gen_op_monitor();
5872 break;
5873 case 1: /* mwait */
5874 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5875 s->cpl != 0)
5876 goto illegal_op;
5877 if (s->cc_op != CC_OP_DYNAMIC) {
5878 gen_op_set_cc_op(s->cc_op);
5879 s->cc_op = CC_OP_DYNAMIC;
5880 }
5881 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5882 break;
5883 gen_jmp_im(s->pc - s->cs_base);
5884 gen_op_mwait();
5885 gen_eob(s);
5886 break;
5887 default:
5888 goto illegal_op;
5889 }
5890 } else { /* sidt */
5891 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5892 break;
5893 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5894 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5895 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5896 gen_add_A0_im(s, 2);
5897 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5898 if (!s->dflag)
5899 gen_op_andl_T0_im(0xffffff);
5900 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5901 }
5902 break;
5903 case 2: /* lgdt */
5904 case 3: /* lidt */
5905 if (mod == 3) {
5906 switch(rm) {
5907 case 0: /* VMRUN */
5908 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5909 break;
5910 if (s->cc_op != CC_OP_DYNAMIC)
5911 gen_op_set_cc_op(s->cc_op);
5912 gen_jmp_im(s->pc - s->cs_base);
5913 gen_op_vmrun();
5914 s->cc_op = CC_OP_EFLAGS;
5915 gen_eob(s);
5916 break;
5917 case 1: /* VMMCALL */
5918 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5919 break;
5920 /* FIXME: cause #UD if hflags & SVM */
5921 gen_op_vmmcall();
5922 break;
5923 case 2: /* VMLOAD */
5924 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5925 break;
5926 gen_op_vmload();
5927 break;
5928 case 3: /* VMSAVE */
5929 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5930 break;
5931 gen_op_vmsave();
5932 break;
5933 case 4: /* STGI */
5934 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5935 break;
5936 gen_op_stgi();
5937 break;
5938 case 5: /* CLGI */
5939 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5940 break;
5941 gen_op_clgi();
5942 break;
5943 case 6: /* SKINIT */
5944 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5945 break;
5946 gen_op_skinit();
5947 break;
5948 case 7: /* INVLPGA */
5949 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5950 break;
5951 gen_op_invlpga();
5952 break;
5953 default:
5954 goto illegal_op;
5955 }
5956 } else if (s->cpl != 0) {
5957 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5958 } else {
5959 if (gen_svm_check_intercept(s, pc_start,
5960 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5961 break;
5962 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5963 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5964 gen_add_A0_im(s, 2);
5965 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5966 if (!s->dflag)
5967 gen_op_andl_T0_im(0xffffff);
5968 if (op == 2) {
5969 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5970 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5971 } else {
5972 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5973 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5974 }
5975 }
5976 break;
5977 case 4: /* smsw */
5978 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5979 break;
5980 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5981 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5982 break;
5983 case 6: /* lmsw */
5984 if (s->cpl != 0) {
5985 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5986 } else {
5987 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5988 break;
5989 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5990 gen_op_lmsw_T0();
5991 gen_jmp_im(s->pc - s->cs_base);
5992 gen_eob(s);
5993 }
5994 break;
5995 case 7: /* invlpg */
5996 if (s->cpl != 0) {
5997 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5998 } else {
5999 if (mod == 3) {
6000 #ifdef TARGET_X86_64
6001 if (CODE64(s) && rm == 0) {
6002 /* swapgs */
6003 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6004 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6005 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6006 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6007 } else
6008 #endif
6009 {
6010 goto illegal_op;
6011 }
6012 } else {
6013 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6014 break;
6015 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6016 gen_op_invlpg_A0();
6017 gen_jmp_im(s->pc - s->cs_base);
6018 gen_eob(s);
6019 }
6020 }
6021 break;
6022 default:
6023 goto illegal_op;
6024 }
6025 break;
6026 case 0x108: /* invd */
6027 case 0x109: /* wbinvd */
6028 if (s->cpl != 0) {
6029 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6030 } else {
6031 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6032 break;
6033 /* nothing to do */
6034 }
6035 break;
6036 case 0x63: /* arpl or movslS (x86_64) */
6037 #ifdef TARGET_X86_64
6038 if (CODE64(s)) {
6039 int d_ot;
6040 /* d_ot is the size of destination */
6041 d_ot = dflag + OT_WORD;
6042
6043 modrm = ldub_code(s->pc++);
6044 reg = ((modrm >> 3) & 7) | rex_r;
6045 mod = (modrm >> 6) & 3;
6046 rm = (modrm & 7) | REX_B(s);
6047
6048 if (mod == 3) {
6049 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6050 /* sign extend */
6051 if (d_ot == OT_QUAD)
6052 gen_op_movslq_T0_T0();
6053 gen_op_mov_reg_T0(d_ot, reg);
6054 } else {
6055 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6056 if (d_ot == OT_QUAD) {
6057 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6058 } else {
6059 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6060 }
6061 gen_op_mov_reg_T0(d_ot, reg);
6062 }
6063 } else
6064 #endif
6065 {
6066 if (!s->pe || s->vm86)
6067 goto illegal_op;
6068 ot = dflag ? OT_LONG : OT_WORD;
6069 modrm = ldub_code(s->pc++);
6070 reg = (modrm >> 3) & 7;
6071 mod = (modrm >> 6) & 3;
6072 rm = modrm & 7;
6073 if (mod != 3) {
6074 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6075 gen_op_ld_T0_A0(ot + s->mem_index);
6076 } else {
6077 gen_op_mov_TN_reg(ot, 0, rm);
6078 }
6079 if (s->cc_op != CC_OP_DYNAMIC)
6080 gen_op_set_cc_op(s->cc_op);
6081 gen_op_arpl();
6082 s->cc_op = CC_OP_EFLAGS;
6083 if (mod != 3) {
6084 gen_op_st_T0_A0(ot + s->mem_index);
6085 } else {
6086 gen_op_mov_reg_T0(ot, rm);
6087 }
6088 gen_op_arpl_update();
6089 }
6090 break;
6091 case 0x102: /* lar */
6092 case 0x103: /* lsl */
6093 if (!s->pe || s->vm86)
6094 goto illegal_op;
6095 ot = dflag ? OT_LONG : OT_WORD;
6096 modrm = ldub_code(s->pc++);
6097 reg = ((modrm >> 3) & 7) | rex_r;
6098 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6099 gen_op_mov_TN_reg(ot, 1, reg);
6100 if (s->cc_op != CC_OP_DYNAMIC)
6101 gen_op_set_cc_op(s->cc_op);
6102 if (b == 0x102)
6103 gen_op_lar();
6104 else
6105 gen_op_lsl();
6106 s->cc_op = CC_OP_EFLAGS;
6107 gen_op_mov_reg_T1(ot, reg);
6108 break;
6109 case 0x118:
6110 modrm = ldub_code(s->pc++);
6111 mod = (modrm >> 6) & 3;
6112 op = (modrm >> 3) & 7;
6113 switch(op) {
6114 case 0: /* prefetchnta */
6115 case 1: /* prefetchnt0 */
6116 case 2: /* prefetchnt0 */
6117 case 3: /* prefetchnt0 */
6118 if (mod == 3)
6119 goto illegal_op;
6120 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6121 /* nothing more to do */
6122 break;
6123 default: /* nop (multi byte) */
6124 gen_nop_modrm(s, modrm);
6125 break;
6126 }
6127 break;
6128 case 0x119 ... 0x11f: /* nop (multi byte) */
6129 modrm = ldub_code(s->pc++);
6130 gen_nop_modrm(s, modrm);
6131 break;
6132 case 0x120: /* mov reg, crN */
6133 case 0x122: /* mov crN, reg */
6134 if (s->cpl != 0) {
6135 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6136 } else {
6137 modrm = ldub_code(s->pc++);
6138 if ((modrm & 0xc0) != 0xc0)
6139 goto illegal_op;
6140 rm = (modrm & 7) | REX_B(s);
6141 reg = ((modrm >> 3) & 7) | rex_r;
6142 if (CODE64(s))
6143 ot = OT_QUAD;
6144 else
6145 ot = OT_LONG;
6146 switch(reg) {
6147 case 0:
6148 case 2:
6149 case 3:
6150 case 4:
6151 case 8:
6152 if (b & 2) {
6153 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6154 gen_op_mov_TN_reg(ot, 0, rm);
6155 gen_op_movl_crN_T0(reg);
6156 gen_jmp_im(s->pc - s->cs_base);
6157 gen_eob(s);
6158 } else {
6159 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6160 #if !defined(CONFIG_USER_ONLY)
6161 if (reg == 8)
6162 gen_op_movtl_T0_cr8();
6163 else
6164 #endif
6165 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6166 gen_op_mov_reg_T0(ot, rm);
6167 }
6168 break;
6169 default:
6170 goto illegal_op;
6171 }
6172 }
6173 break;
6174 case 0x121: /* mov reg, drN */
6175 case 0x123: /* mov drN, reg */
6176 if (s->cpl != 0) {
6177 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6178 } else {
6179 modrm = ldub_code(s->pc++);
6180 if ((modrm & 0xc0) != 0xc0)
6181 goto illegal_op;
6182 rm = (modrm & 7) | REX_B(s);
6183 reg = ((modrm >> 3) & 7) | rex_r;
6184 if (CODE64(s))
6185 ot = OT_QUAD;
6186 else
6187 ot = OT_LONG;
6188 /* XXX: do it dynamically with CR4.DE bit */
6189 if (reg == 4 || reg == 5 || reg >= 8)
6190 goto illegal_op;
6191 if (b & 2) {
6192 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6193 gen_op_mov_TN_reg(ot, 0, rm);
6194 gen_op_movl_drN_T0(reg);
6195 gen_jmp_im(s->pc - s->cs_base);
6196 gen_eob(s);
6197 } else {
6198 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6199 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6200 gen_op_mov_reg_T0(ot, rm);
6201 }
6202 }
6203 break;
6204 case 0x106: /* clts */
6205 if (s->cpl != 0) {
6206 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6207 } else {
6208 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6209 gen_op_clts();
6210 /* abort block because static cpu state changed */
6211 gen_jmp_im(s->pc - s->cs_base);
6212 gen_eob(s);
6213 }
6214 break;
6215 /* MMX/SSE/SSE2/PNI support */
6216 case 0x1c3: /* MOVNTI reg, mem */
6217 if (!(s->cpuid_features & CPUID_SSE2))
6218 goto illegal_op;
6219 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6220 modrm = ldub_code(s->pc++);
6221 mod = (modrm >> 6) & 3;
6222 if (mod == 3)
6223 goto illegal_op;
6224 reg = ((modrm >> 3) & 7) | rex_r;
6225 /* generate a generic store */
6226 gen_ldst_modrm(s, modrm, ot, reg, 1);
6227 break;
6228 case 0x1ae:
6229 modrm = ldub_code(s->pc++);
6230 mod = (modrm >> 6) & 3;
6231 op = (modrm >> 3) & 7;
6232 switch(op) {
6233 case 0: /* fxsave */
6234 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6235 (s->flags & HF_EM_MASK))
6236 goto illegal_op;
6237 if (s->flags & HF_TS_MASK) {
6238 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6239 break;
6240 }
6241 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6242 gen_op_fxsave_A0((s->dflag == 2));
6243 break;
6244 case 1: /* fxrstor */
6245 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6246 (s->flags & HF_EM_MASK))
6247 goto illegal_op;
6248 if (s->flags & HF_TS_MASK) {
6249 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6250 break;
6251 }
6252 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6253 gen_op_fxrstor_A0((s->dflag == 2));
6254 break;
6255 case 2: /* ldmxcsr */
6256 case 3: /* stmxcsr */
6257 if (s->flags & HF_TS_MASK) {
6258 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6259 break;
6260 }
6261 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6262 mod == 3)
6263 goto illegal_op;
6264 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6265 if (op == 2) {
6266 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6267 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6268 } else {
6269 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6270 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6271 }
6272 break;
6273 case 5: /* lfence */
6274 case 6: /* mfence */
6275 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6276 goto illegal_op;
6277 break;
6278 case 7: /* sfence / clflush */
6279 if ((modrm & 0xc7) == 0xc0) {
6280 /* sfence */
6281 if (!(s->cpuid_features & CPUID_SSE))
6282 goto illegal_op;
6283 } else {
6284 /* clflush */
6285 if (!(s->cpuid_features & CPUID_CLFLUSH))
6286 goto illegal_op;
6287 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6288 }
6289 break;
6290 default:
6291 goto illegal_op;
6292 }
6293 break;
6294 case 0x10d: /* prefetch */
6295 modrm = ldub_code(s->pc++);
6296 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6297 /* ignore for now */
6298 break;
6299 case 0x1aa: /* rsm */
6300 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6301 break;
6302 if (!(s->flags & HF_SMM_MASK))
6303 goto illegal_op;
6304 if (s->cc_op != CC_OP_DYNAMIC) {
6305 gen_op_set_cc_op(s->cc_op);
6306 s->cc_op = CC_OP_DYNAMIC;
6307 }
6308 gen_jmp_im(s->pc - s->cs_base);
6309 gen_op_rsm();
6310 gen_eob(s);
6311 break;
6312 case 0x110 ... 0x117:
6313 case 0x128 ... 0x12f:
6314 case 0x150 ... 0x177:
6315 case 0x17c ... 0x17f:
6316 case 0x1c2:
6317 case 0x1c4 ... 0x1c6:
6318 case 0x1d0 ... 0x1fe:
6319 gen_sse(s, b, pc_start, rex_r);
6320 break;
6321 default:
6322 goto illegal_op;
6323 }
6324 /* lock generation */
6325 if (s->prefix & PREFIX_LOCK)
6326 gen_op_unlock();
6327 return s->pc;
6328 illegal_op:
6329 if (s->prefix & PREFIX_LOCK)
6330 gen_op_unlock();
6331 /* XXX: ensure that no lock was generated */
6332 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6333 return s->pc;
6334 }
6335
6336 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6337 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6338
6339 /* flags read by an operation */
6340 static uint16_t opc_read_flags[NB_OPS] = {
6341 [INDEX_op_aas] = CC_A,
6342 [INDEX_op_aaa] = CC_A,
6343 [INDEX_op_das] = CC_A | CC_C,
6344 [INDEX_op_daa] = CC_A | CC_C,
6345
6346 /* subtle: due to the incl/decl implementation, C is used */
6347 [INDEX_op_update_inc_cc] = CC_C,
6348
6349 [INDEX_op_into] = CC_O,
6350
6351 [INDEX_op_jb_subb] = CC_C,
6352 [INDEX_op_jb_subw] = CC_C,
6353 [INDEX_op_jb_subl] = CC_C,
6354
6355 [INDEX_op_jz_subb] = CC_Z,
6356 [INDEX_op_jz_subw] = CC_Z,
6357 [INDEX_op_jz_subl] = CC_Z,
6358
6359 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6360 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6361 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6362
6363 [INDEX_op_js_subb] = CC_S,
6364 [INDEX_op_js_subw] = CC_S,
6365 [INDEX_op_js_subl] = CC_S,
6366
6367 [INDEX_op_jl_subb] = CC_O | CC_S,
6368 [INDEX_op_jl_subw] = CC_O | CC_S,
6369 [INDEX_op_jl_subl] = CC_O | CC_S,
6370
6371 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6372 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6373 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6374
6375 [INDEX_op_loopnzw] = CC_Z,
6376 [INDEX_op_loopnzl] = CC_Z,
6377 [INDEX_op_loopzw] = CC_Z,
6378 [INDEX_op_loopzl] = CC_Z,
6379
6380 [INDEX_op_seto_T0_cc] = CC_O,
6381 [INDEX_op_setb_T0_cc] = CC_C,
6382 [INDEX_op_setz_T0_cc] = CC_Z,
6383 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6384 [INDEX_op_sets_T0_cc] = CC_S,
6385 [INDEX_op_setp_T0_cc] = CC_P,
6386 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6387 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6388
6389 [INDEX_op_setb_T0_subb] = CC_C,
6390 [INDEX_op_setb_T0_subw] = CC_C,
6391 [INDEX_op_setb_T0_subl] = CC_C,
6392
6393 [INDEX_op_setz_T0_subb] = CC_Z,
6394 [INDEX_op_setz_T0_subw] = CC_Z,
6395 [INDEX_op_setz_T0_subl] = CC_Z,
6396
6397 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6398 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6399 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6400
6401 [INDEX_op_sets_T0_subb] = CC_S,
6402 [INDEX_op_sets_T0_subw] = CC_S,
6403 [INDEX_op_sets_T0_subl] = CC_S,
6404
6405 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6406 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6407 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6408
6409 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6410 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6411 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6412
6413 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6414 [INDEX_op_cmc] = CC_C,
6415 [INDEX_op_salc] = CC_C,
6416
6417 /* needed for correct flag optimisation before string ops */
6418 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6419 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6420 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6421 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6422
6423 #ifdef TARGET_X86_64
6424 [INDEX_op_jb_subq] = CC_C,
6425 [INDEX_op_jz_subq] = CC_Z,
6426 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6427 [INDEX_op_js_subq] = CC_S,
6428 [INDEX_op_jl_subq] = CC_O | CC_S,
6429 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6430
6431 [INDEX_op_loopnzq] = CC_Z,
6432 [INDEX_op_loopzq] = CC_Z,
6433
6434 [INDEX_op_setb_T0_subq] = CC_C,
6435 [INDEX_op_setz_T0_subq] = CC_Z,
6436 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6437 [INDEX_op_sets_T0_subq] = CC_S,
6438 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6439 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6440
6441 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6442 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6443 #endif
6444
6445 #define DEF_READF(SUFFIX)\
6446 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6447 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6448 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6449 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6450 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6451 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6452 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6453 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6454 \
6455 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6456 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6457 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6458 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6459 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6460 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6461 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6462 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6463
6464 DEF_READF( )
6465 DEF_READF(_raw)
6466 #ifndef CONFIG_USER_ONLY
6467 DEF_READF(_kernel)
6468 DEF_READF(_user)
6469 #endif
6470 };
6471
6472 /* flags written by an operation */
6473 static uint16_t opc_write_flags[NB_OPS] = {
6474 [INDEX_op_update2_cc] = CC_OSZAPC,
6475 [INDEX_op_update1_cc] = CC_OSZAPC,
6476 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6477 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6478 /* subtle: due to the incl/decl implementation, C is used */
6479 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6480 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6481
6482 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6483 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6484 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6485 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6486 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6487 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6488 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6489 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6490 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6491 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6492 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6493
6494 /* sse */
6495 [INDEX_op_ucomiss] = CC_OSZAPC,
6496 [INDEX_op_ucomisd] = CC_OSZAPC,
6497 [INDEX_op_comiss] = CC_OSZAPC,
6498 [INDEX_op_comisd] = CC_OSZAPC,
6499
6500 /* bcd */
6501 [INDEX_op_aam] = CC_OSZAPC,
6502 [INDEX_op_aad] = CC_OSZAPC,
6503 [INDEX_op_aas] = CC_OSZAPC,
6504 [INDEX_op_aaa] = CC_OSZAPC,
6505 [INDEX_op_das] = CC_OSZAPC,
6506 [INDEX_op_daa] = CC_OSZAPC,
6507
6508 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6509 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6510 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6511 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6512 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6513 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6514 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6515 [INDEX_op_clc] = CC_C,
6516 [INDEX_op_stc] = CC_C,
6517 [INDEX_op_cmc] = CC_C,
6518
6519 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6520 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6521 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6522 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6523 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6524 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6525 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6526 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6527 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6528 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6529 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6530 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6531
6532 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6533 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6534 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6535 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6536 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6537 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6538
6539 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6540 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6541 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6542 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6543
6544 [INDEX_op_cmpxchg8b] = CC_Z,
6545 [INDEX_op_lar] = CC_Z,
6546 [INDEX_op_lsl] = CC_Z,
6547 [INDEX_op_verr] = CC_Z,
6548 [INDEX_op_verw] = CC_Z,
6549 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6550 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6551
6552 #define DEF_WRITEF(SUFFIX)\
6553 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6554 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6555 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6556 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6557 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6558 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6559 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6560 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6561 \
6562 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6563 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6564 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6565 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6566 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6567 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6568 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6569 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6570 \
6571 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6572 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6573 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6574 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6575 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6576 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6577 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6578 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6579 \
6580 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6581 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6582 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6583 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6584 \
6585 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6586 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6587 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6588 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6589 \
6590 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6591 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6592 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6593 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6594 \
6595 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6596 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6597 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6598 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6599 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6600 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6601 \
6602 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6603 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6604 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6605 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6606 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6607 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6608 \
6609 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6610 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6611 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6612 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6613
6614
6615 DEF_WRITEF( )
6616 DEF_WRITEF(_raw)
6617 #ifndef CONFIG_USER_ONLY
6618 DEF_WRITEF(_kernel)
6619 DEF_WRITEF(_user)
6620 #endif
6621 };
6622
6623 /* simpler form of an operation if no flags need to be generated */
6624 static uint16_t opc_simpler[NB_OPS] = {
6625 [INDEX_op_update2_cc] = INDEX_op_nop,
6626 [INDEX_op_update1_cc] = INDEX_op_nop,
6627 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6628 #if 0
6629 /* broken: CC_OP logic must be rewritten */
6630 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6631 #endif
6632
6633 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6634 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6635 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6636 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6637
6638 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6639 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6640 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6641 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6642
6643 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6644 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6645 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6646 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6647
6648 #define DEF_SIMPLER(SUFFIX)\
6649 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6650 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6651 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6652 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6653 \
6654 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6655 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6656 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6657 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6658
6659 DEF_SIMPLER( )
6660 DEF_SIMPLER(_raw)
6661 #ifndef CONFIG_USER_ONLY
6662 DEF_SIMPLER(_kernel)
6663 DEF_SIMPLER(_user)
6664 #endif
6665 };
6666
6667 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6668 {
6669 switch(macro_id) {
6670 #ifdef MACRO_TEST
6671 case MACRO_TEST:
6672 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6673 break;
6674 #endif
6675 }
6676 }
6677
6678 void optimize_flags_init(void)
6679 {
6680 int i;
6681 /* put default values in arrays */
6682 for(i = 0; i < NB_OPS; i++) {
6683 if (opc_simpler[i] == 0)
6684 opc_simpler[i] = i;
6685 }
6686
6687 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6688
6689 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6690 #if TARGET_LONG_BITS > HOST_LONG_BITS
6691 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6692 TCG_AREG0, offsetof(CPUState, t0), "T0");
6693 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6694 TCG_AREG0, offsetof(CPUState, t1), "T1");
6695 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6696 TCG_AREG0, offsetof(CPUState, t2), "A0");
6697 #else
6698 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6699 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6700 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6701 #endif
6702 /* the helpers are only registered to print debug info */
6703 TCG_HELPER(helper_divl_EAX_T0);
6704 TCG_HELPER(helper_idivl_EAX_T0);
6705 }
6706
6707 /* CPU flags computation optimization: we move backward thru the
6708 generated code to see which flags are needed. The operation is
6709 modified if suitable */
6710 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6711 {
6712 uint16_t *opc_ptr;
6713 int live_flags, write_flags, op;
6714
6715 opc_ptr = opc_buf + opc_buf_len;
6716 /* live_flags contains the flags needed by the next instructions
6717 in the code. At the end of the block, we consider that all the
6718 flags are live. */
6719 live_flags = CC_OSZAPC;
6720 while (opc_ptr > opc_buf) {
6721 op = *--opc_ptr;
6722 /* if none of the flags written by the instruction is used,
6723 then we can try to find a simpler instruction */
6724 write_flags = opc_write_flags[op];
6725 if ((live_flags & write_flags) == 0) {
6726 *opc_ptr = opc_simpler[op];
6727 }
6728 /* compute the live flags before the instruction */
6729 live_flags &= ~write_flags;
6730 live_flags |= opc_read_flags[op];
6731 }
6732 }
6733
6734 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6735 basic block 'tb'. If search_pc is TRUE, also generate PC
6736 information for each intermediate instruction. */
6737 static inline int gen_intermediate_code_internal(CPUState *env,
6738 TranslationBlock *tb,
6739 int search_pc)
6740 {
6741 DisasContext dc1, *dc = &dc1;
6742 target_ulong pc_ptr;
6743 uint16_t *gen_opc_end;
6744 int j, lj, cflags;
6745 uint64_t flags;
6746 target_ulong pc_start;
6747 target_ulong cs_base;
6748
6749 /* generate intermediate code */
6750 pc_start = tb->pc;
6751 cs_base = tb->cs_base;
6752 flags = tb->flags;
6753 cflags = tb->cflags;
6754
6755 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6756 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6757 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6758 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6759 dc->f_st = 0;
6760 dc->vm86 = (flags >> VM_SHIFT) & 1;
6761 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6762 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6763 dc->tf = (flags >> TF_SHIFT) & 1;
6764 dc->singlestep_enabled = env->singlestep_enabled;
6765 dc->cc_op = CC_OP_DYNAMIC;
6766 dc->cs_base = cs_base;
6767 dc->tb = tb;
6768 dc->popl_esp_hack = 0;
6769 /* select memory access functions */
6770 dc->mem_index = 0;
6771 if (flags & HF_SOFTMMU_MASK) {
6772 if (dc->cpl == 3)
6773 dc->mem_index = 2 * 4;
6774 else
6775 dc->mem_index = 1 * 4;
6776 }
6777 dc->cpuid_features = env->cpuid_features;
6778 dc->cpuid_ext_features = env->cpuid_ext_features;
6779 #ifdef TARGET_X86_64
6780 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6781 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6782 #endif
6783 dc->flags = flags;
6784 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6785 (flags & HF_INHIBIT_IRQ_MASK)
6786 #ifndef CONFIG_SOFTMMU
6787 || (flags & HF_SOFTMMU_MASK)
6788 #endif
6789 );
6790 #if 0
6791 /* check addseg logic */
6792 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6793 printf("ERROR addseg\n");
6794 #endif
6795
6796 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6797
6798 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6799
6800 dc->is_jmp = DISAS_NEXT;
6801 pc_ptr = pc_start;
6802 lj = -1;
6803
6804 for(;;) {
6805 if (env->nb_breakpoints > 0) {
6806 for(j = 0; j < env->nb_breakpoints; j++) {
6807 if (env->breakpoints[j] == pc_ptr) {
6808 gen_debug(dc, pc_ptr - dc->cs_base);
6809 break;
6810 }
6811 }
6812 }
6813 if (search_pc) {
6814 j = gen_opc_ptr - gen_opc_buf;
6815 if (lj < j) {
6816 lj++;
6817 while (lj < j)
6818 gen_opc_instr_start[lj++] = 0;
6819 }
6820 gen_opc_pc[lj] = pc_ptr;
6821 gen_opc_cc_op[lj] = dc->cc_op;
6822 gen_opc_instr_start[lj] = 1;
6823 }
6824 pc_ptr = disas_insn(dc, pc_ptr);
6825 /* stop translation if indicated */
6826 if (dc->is_jmp)
6827 break;
6828 /* if single step mode, we generate only one instruction and
6829 generate an exception */
6830 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6831 the flag and abort the translation to give the irqs a
6832 change to be happen */
6833 if (dc->tf || dc->singlestep_enabled ||
6834 (flags & HF_INHIBIT_IRQ_MASK) ||
6835 (cflags & CF_SINGLE_INSN)) {
6836 gen_jmp_im(pc_ptr - dc->cs_base);
6837 gen_eob(dc);
6838 break;
6839 }
6840 /* if too long translation, stop generation too */
6841 if (gen_opc_ptr >= gen_opc_end ||
6842 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6843 gen_jmp_im(pc_ptr - dc->cs_base);
6844 gen_eob(dc);
6845 break;
6846 }
6847 }
6848 *gen_opc_ptr = INDEX_op_end;
6849 /* we don't forget to fill the last values */
6850 if (search_pc) {
6851 j = gen_opc_ptr - gen_opc_buf;
6852 lj++;
6853 while (lj <= j)
6854 gen_opc_instr_start[lj++] = 0;
6855 }
6856
6857 #ifdef DEBUG_DISAS
6858 if (loglevel & CPU_LOG_TB_CPU) {
6859 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6860 }
6861 if (loglevel & CPU_LOG_TB_IN_ASM) {
6862 int disas_flags;
6863 fprintf(logfile, "----------------\n");
6864 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6865 #ifdef TARGET_X86_64
6866 if (dc->code64)
6867 disas_flags = 2;
6868 else
6869 #endif
6870 disas_flags = !dc->code32;
6871 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6872 fprintf(logfile, "\n");
6873 if (loglevel & CPU_LOG_TB_OP_OPT) {
6874 fprintf(logfile, "OP before opt:\n");
6875 tcg_dump_ops(&tcg_ctx, logfile);
6876 fprintf(logfile, "\n");
6877 }
6878 }
6879 #endif
6880
6881 /* optimize flag computations */
6882 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6883
6884 if (!search_pc)
6885 tb->size = pc_ptr - pc_start;
6886 return 0;
6887 }
6888
6889 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6890 {
6891 return gen_intermediate_code_internal(env, tb, 0);
6892 }
6893
6894 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6895 {
6896 return gen_intermediate_code_internal(env, tb, 1);
6897 }
6898