]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - drivers/kvm/x86_emulate.c
[PATCH] KVM: x86 emulator: fix bit string instructions
[mirror_ubuntu-zesty-kernel.git] / drivers / kvm / x86_emulate.c
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
10 *
11 * Copyright (C) 2006 Qumranet
12 *
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
15 *
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
18 *
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
20 */
21
22 #ifndef __KERNEL__
23 #include <stdio.h>
24 #include <stdint.h>
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
27 #else
28 #include "kvm.h"
29 #define DPRINTF(x...) do {} while (0)
30 #endif
31 #include "x86_emulate.h"
32 #include <linux/module.h>
33
34 /*
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
40 * not be handled.
41 */
42
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
61 #define ModRM (1<<6)
62 /* Destination is only written; never read. */
63 #define Mov (1<<7)
64 #define BitOp (1<<8)
65
66 static u8 opcode_table[256] = {
67 /* 0x00 - 0x07 */
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
70 0, 0, 0, 0,
71 /* 0x08 - 0x0F */
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
74 0, 0, 0, 0,
75 /* 0x10 - 0x17 */
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
78 0, 0, 0, 0,
79 /* 0x18 - 0x1F */
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
82 0, 0, 0, 0,
83 /* 0x20 - 0x27 */
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 0, 0, 0, 0,
87 /* 0x28 - 0x2F */
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 0, 0, 0, 0,
91 /* 0x30 - 0x37 */
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
94 0, 0, 0, 0,
95 /* 0x38 - 0x3F */
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
98 0, 0, 0, 0,
99 /* 0x40 - 0x4F */
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
101 /* 0x50 - 0x5F */
102 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
103 /* 0x60 - 0x6F */
104 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
105 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
106 /* 0x70 - 0x7F */
107 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
108 /* 0x80 - 0x87 */
109 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
110 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
111 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
112 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
113 /* 0x88 - 0x8F */
114 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
115 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
116 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
117 /* 0x90 - 0x9F */
118 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
119 /* 0xA0 - 0xA7 */
120 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
121 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
122 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
123 ByteOp | ImplicitOps, ImplicitOps,
124 /* 0xA8 - 0xAF */
125 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
126 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
127 ByteOp | ImplicitOps, ImplicitOps,
128 /* 0xB0 - 0xBF */
129 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
130 /* 0xC0 - 0xC7 */
131 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM, 0, 0,
132 0, 0, ByteOp | DstMem | SrcImm | ModRM | Mov,
133 DstMem | SrcImm | ModRM | Mov,
134 /* 0xC8 - 0xCF */
135 0, 0, 0, 0, 0, 0, 0, 0,
136 /* 0xD0 - 0xD7 */
137 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
138 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
139 0, 0, 0, 0,
140 /* 0xD8 - 0xDF */
141 0, 0, 0, 0, 0, 0, 0, 0,
142 /* 0xE0 - 0xEF */
143 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
144 /* 0xF0 - 0xF7 */
145 0, 0, 0, 0,
146 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
147 /* 0xF8 - 0xFF */
148 0, 0, 0, 0,
149 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
150 };
151
152 static u16 twobyte_table[256] = {
153 /* 0x00 - 0x0F */
154 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
155 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
156 /* 0x10 - 0x1F */
157 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
158 /* 0x20 - 0x2F */
159 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
160 0, 0, 0, 0, 0, 0, 0, 0,
161 /* 0x30 - 0x3F */
162 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0x40 - 0x47 */
164 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
165 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
166 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
167 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
168 /* 0x48 - 0x4F */
169 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
170 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
171 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
172 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
173 /* 0x50 - 0x5F */
174 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
175 /* 0x60 - 0x6F */
176 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
177 /* 0x70 - 0x7F */
178 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 /* 0x80 - 0x8F */
180 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
181 /* 0x90 - 0x9F */
182 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 /* 0xA0 - 0xA7 */
184 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
185 /* 0xA8 - 0xAF */
186 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
187 /* 0xB0 - 0xB7 */
188 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
189 DstMem | SrcReg | ModRM | BitOp,
190 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
191 DstReg | SrcMem16 | ModRM | Mov,
192 /* 0xB8 - 0xBF */
193 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
194 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
195 DstReg | SrcMem16 | ModRM | Mov,
196 /* 0xC0 - 0xCF */
197 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
198 /* 0xD0 - 0xDF */
199 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
200 /* 0xE0 - 0xEF */
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 /* 0xF0 - 0xFF */
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
204 };
205
206 /*
207 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
208 * are interested only in invlpg and not in any of the rest.
209 *
210 * invlpg is a special instruction in that the data it references may not
211 * be mapped.
212 */
213 void kvm_emulator_want_group7_invlpg(void)
214 {
215 twobyte_table[1] &= ~SrcMem;
216 }
217 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
218
219 /* Type, address-of, and value of an instruction's operand. */
220 struct operand {
221 enum { OP_REG, OP_MEM, OP_IMM } type;
222 unsigned int bytes;
223 unsigned long val, orig_val, *ptr;
224 };
225
226 /* EFLAGS bit definitions. */
227 #define EFLG_OF (1<<11)
228 #define EFLG_DF (1<<10)
229 #define EFLG_SF (1<<7)
230 #define EFLG_ZF (1<<6)
231 #define EFLG_AF (1<<4)
232 #define EFLG_PF (1<<2)
233 #define EFLG_CF (1<<0)
234
235 /*
236 * Instruction emulation:
237 * Most instructions are emulated directly via a fragment of inline assembly
238 * code. This allows us to save/restore EFLAGS and thus very easily pick up
239 * any modified flags.
240 */
241
242 #if defined(CONFIG_X86_64)
243 #define _LO32 "k" /* force 32-bit operand */
244 #define _STK "%%rsp" /* stack pointer */
245 #elif defined(__i386__)
246 #define _LO32 "" /* force 32-bit operand */
247 #define _STK "%%esp" /* stack pointer */
248 #endif
249
250 /*
251 * These EFLAGS bits are restored from saved value during emulation, and
252 * any changes are written back to the saved value after emulation.
253 */
254 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
255
256 /* Before executing instruction: restore necessary bits in EFLAGS. */
257 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
258 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
259 "push %"_sav"; " \
260 "movl %"_msk",%"_LO32 _tmp"; " \
261 "andl %"_LO32 _tmp",("_STK"); " \
262 "pushf; " \
263 "notl %"_LO32 _tmp"; " \
264 "andl %"_LO32 _tmp",("_STK"); " \
265 "pop %"_tmp"; " \
266 "orl %"_LO32 _tmp",("_STK"); " \
267 "popf; " \
268 /* _sav &= ~msk; */ \
269 "movl %"_msk",%"_LO32 _tmp"; " \
270 "notl %"_LO32 _tmp"; " \
271 "andl %"_LO32 _tmp",%"_sav"; "
272
273 /* After executing instruction: write-back necessary bits in EFLAGS. */
274 #define _POST_EFLAGS(_sav, _msk, _tmp) \
275 /* _sav |= EFLAGS & _msk; */ \
276 "pushf; " \
277 "pop %"_tmp"; " \
278 "andl %"_msk",%"_LO32 _tmp"; " \
279 "orl %"_LO32 _tmp",%"_sav"; "
280
281 /* Raw emulation: instruction has two explicit operands. */
282 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
283 do { \
284 unsigned long _tmp; \
285 \
286 switch ((_dst).bytes) { \
287 case 2: \
288 __asm__ __volatile__ ( \
289 _PRE_EFLAGS("0","4","2") \
290 _op"w %"_wx"3,%1; " \
291 _POST_EFLAGS("0","4","2") \
292 : "=m" (_eflags), "=m" ((_dst).val), \
293 "=&r" (_tmp) \
294 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
295 break; \
296 case 4: \
297 __asm__ __volatile__ ( \
298 _PRE_EFLAGS("0","4","2") \
299 _op"l %"_lx"3,%1; " \
300 _POST_EFLAGS("0","4","2") \
301 : "=m" (_eflags), "=m" ((_dst).val), \
302 "=&r" (_tmp) \
303 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
304 break; \
305 case 8: \
306 __emulate_2op_8byte(_op, _src, _dst, \
307 _eflags, _qx, _qy); \
308 break; \
309 } \
310 } while (0)
311
312 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
313 do { \
314 unsigned long _tmp; \
315 switch ( (_dst).bytes ) \
316 { \
317 case 1: \
318 __asm__ __volatile__ ( \
319 _PRE_EFLAGS("0","4","2") \
320 _op"b %"_bx"3,%1; " \
321 _POST_EFLAGS("0","4","2") \
322 : "=m" (_eflags), "=m" ((_dst).val), \
323 "=&r" (_tmp) \
324 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
325 break; \
326 default: \
327 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
328 _wx, _wy, _lx, _ly, _qx, _qy); \
329 break; \
330 } \
331 } while (0)
332
333 /* Source operand is byte-sized and may be restricted to just %cl. */
334 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
335 __emulate_2op(_op, _src, _dst, _eflags, \
336 "b", "c", "b", "c", "b", "c", "b", "c")
337
338 /* Source operand is byte, word, long or quad sized. */
339 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
340 __emulate_2op(_op, _src, _dst, _eflags, \
341 "b", "q", "w", "r", _LO32, "r", "", "r")
342
343 /* Source operand is word, long or quad sized. */
344 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
345 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
346 "w", "r", _LO32, "r", "", "r")
347
348 /* Instruction has only one explicit operand (no source operand). */
349 #define emulate_1op(_op, _dst, _eflags) \
350 do { \
351 unsigned long _tmp; \
352 \
353 switch ( (_dst).bytes ) \
354 { \
355 case 1: \
356 __asm__ __volatile__ ( \
357 _PRE_EFLAGS("0","3","2") \
358 _op"b %1; " \
359 _POST_EFLAGS("0","3","2") \
360 : "=m" (_eflags), "=m" ((_dst).val), \
361 "=&r" (_tmp) \
362 : "i" (EFLAGS_MASK) ); \
363 break; \
364 case 2: \
365 __asm__ __volatile__ ( \
366 _PRE_EFLAGS("0","3","2") \
367 _op"w %1; " \
368 _POST_EFLAGS("0","3","2") \
369 : "=m" (_eflags), "=m" ((_dst).val), \
370 "=&r" (_tmp) \
371 : "i" (EFLAGS_MASK) ); \
372 break; \
373 case 4: \
374 __asm__ __volatile__ ( \
375 _PRE_EFLAGS("0","3","2") \
376 _op"l %1; " \
377 _POST_EFLAGS("0","3","2") \
378 : "=m" (_eflags), "=m" ((_dst).val), \
379 "=&r" (_tmp) \
380 : "i" (EFLAGS_MASK) ); \
381 break; \
382 case 8: \
383 __emulate_1op_8byte(_op, _dst, _eflags); \
384 break; \
385 } \
386 } while (0)
387
388 /* Emulate an instruction with quadword operands (x86/64 only). */
389 #if defined(CONFIG_X86_64)
390 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
391 do { \
392 __asm__ __volatile__ ( \
393 _PRE_EFLAGS("0","4","2") \
394 _op"q %"_qx"3,%1; " \
395 _POST_EFLAGS("0","4","2") \
396 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
397 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
398 } while (0)
399
400 #define __emulate_1op_8byte(_op, _dst, _eflags) \
401 do { \
402 __asm__ __volatile__ ( \
403 _PRE_EFLAGS("0","3","2") \
404 _op"q %1; " \
405 _POST_EFLAGS("0","3","2") \
406 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
407 : "i" (EFLAGS_MASK) ); \
408 } while (0)
409
410 #elif defined(__i386__)
411 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
412 #define __emulate_1op_8byte(_op, _dst, _eflags)
413 #endif /* __i386__ */
414
415 /* Fetch next part of the instruction being emulated. */
416 #define insn_fetch(_type, _size, _eip) \
417 ({ unsigned long _x; \
418 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
419 (_size), ctxt); \
420 if ( rc != 0 ) \
421 goto done; \
422 (_eip) += (_size); \
423 (_type)_x; \
424 })
425
426 /* Access/update address held in a register, based on addressing mode. */
427 #define register_address(base, reg) \
428 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
429 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
430
431 #define register_address_increment(reg, inc) \
432 do { \
433 /* signed type ensures sign extension to long */ \
434 int _inc = (inc); \
435 if ( ad_bytes == sizeof(unsigned long) ) \
436 (reg) += _inc; \
437 else \
438 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
439 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
440 } while (0)
441
442 void *decode_register(u8 modrm_reg, unsigned long *regs,
443 int highbyte_regs)
444 {
445 void *p;
446
447 p = &regs[modrm_reg];
448 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
449 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
450 return p;
451 }
452
453 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
454 struct x86_emulate_ops *ops,
455 void *ptr,
456 u16 *size, unsigned long *address, int op_bytes)
457 {
458 int rc;
459
460 if (op_bytes == 2)
461 op_bytes = 3;
462 *address = 0;
463 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, ctxt);
464 if (rc)
465 return rc;
466 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, ctxt);
467 return rc;
468 }
469
470 int
471 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
472 {
473 unsigned d;
474 u8 b, sib, twobyte = 0, rex_prefix = 0;
475 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
476 unsigned long *override_base = NULL;
477 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
478 int rc = 0;
479 struct operand src, dst;
480 unsigned long cr2 = ctxt->cr2;
481 int mode = ctxt->mode;
482 unsigned long modrm_ea;
483 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
484
485 /* Shadow copy of register state. Committed on successful emulation. */
486 unsigned long _regs[NR_VCPU_REGS];
487 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
488 unsigned long modrm_val = 0;
489
490 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
491
492 switch (mode) {
493 case X86EMUL_MODE_REAL:
494 case X86EMUL_MODE_PROT16:
495 op_bytes = ad_bytes = 2;
496 break;
497 case X86EMUL_MODE_PROT32:
498 op_bytes = ad_bytes = 4;
499 break;
500 #ifdef CONFIG_X86_64
501 case X86EMUL_MODE_PROT64:
502 op_bytes = 4;
503 ad_bytes = 8;
504 break;
505 #endif
506 default:
507 return -1;
508 }
509
510 /* Legacy prefixes. */
511 for (i = 0; i < 8; i++) {
512 switch (b = insn_fetch(u8, 1, _eip)) {
513 case 0x66: /* operand-size override */
514 op_bytes ^= 6; /* switch between 2/4 bytes */
515 break;
516 case 0x67: /* address-size override */
517 if (mode == X86EMUL_MODE_PROT64)
518 ad_bytes ^= 12; /* switch between 4/8 bytes */
519 else
520 ad_bytes ^= 6; /* switch between 2/4 bytes */
521 break;
522 case 0x2e: /* CS override */
523 override_base = &ctxt->cs_base;
524 break;
525 case 0x3e: /* DS override */
526 override_base = &ctxt->ds_base;
527 break;
528 case 0x26: /* ES override */
529 override_base = &ctxt->es_base;
530 break;
531 case 0x64: /* FS override */
532 override_base = &ctxt->fs_base;
533 break;
534 case 0x65: /* GS override */
535 override_base = &ctxt->gs_base;
536 break;
537 case 0x36: /* SS override */
538 override_base = &ctxt->ss_base;
539 break;
540 case 0xf0: /* LOCK */
541 lock_prefix = 1;
542 break;
543 case 0xf3: /* REP/REPE/REPZ */
544 rep_prefix = 1;
545 break;
546 case 0xf2: /* REPNE/REPNZ */
547 break;
548 default:
549 goto done_prefixes;
550 }
551 }
552
553 done_prefixes:
554
555 /* REX prefix. */
556 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
557 rex_prefix = b;
558 if (b & 8)
559 op_bytes = 8; /* REX.W */
560 modrm_reg = (b & 4) << 1; /* REX.R */
561 index_reg = (b & 2) << 2; /* REX.X */
562 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
563 b = insn_fetch(u8, 1, _eip);
564 }
565
566 /* Opcode byte(s). */
567 d = opcode_table[b];
568 if (d == 0) {
569 /* Two-byte opcode? */
570 if (b == 0x0f) {
571 twobyte = 1;
572 b = insn_fetch(u8, 1, _eip);
573 d = twobyte_table[b];
574 }
575
576 /* Unrecognised? */
577 if (d == 0)
578 goto cannot_emulate;
579 }
580
581 /* ModRM and SIB bytes. */
582 if (d & ModRM) {
583 modrm = insn_fetch(u8, 1, _eip);
584 modrm_mod |= (modrm & 0xc0) >> 6;
585 modrm_reg |= (modrm & 0x38) >> 3;
586 modrm_rm |= (modrm & 0x07);
587 modrm_ea = 0;
588 use_modrm_ea = 1;
589
590 if (modrm_mod == 3) {
591 modrm_val = *(unsigned long *)
592 decode_register(modrm_rm, _regs, d & ByteOp);
593 goto modrm_done;
594 }
595
596 if (ad_bytes == 2) {
597 unsigned bx = _regs[VCPU_REGS_RBX];
598 unsigned bp = _regs[VCPU_REGS_RBP];
599 unsigned si = _regs[VCPU_REGS_RSI];
600 unsigned di = _regs[VCPU_REGS_RDI];
601
602 /* 16-bit ModR/M decode. */
603 switch (modrm_mod) {
604 case 0:
605 if (modrm_rm == 6)
606 modrm_ea += insn_fetch(u16, 2, _eip);
607 break;
608 case 1:
609 modrm_ea += insn_fetch(s8, 1, _eip);
610 break;
611 case 2:
612 modrm_ea += insn_fetch(u16, 2, _eip);
613 break;
614 }
615 switch (modrm_rm) {
616 case 0:
617 modrm_ea += bx + si;
618 break;
619 case 1:
620 modrm_ea += bx + di;
621 break;
622 case 2:
623 modrm_ea += bp + si;
624 break;
625 case 3:
626 modrm_ea += bp + di;
627 break;
628 case 4:
629 modrm_ea += si;
630 break;
631 case 5:
632 modrm_ea += di;
633 break;
634 case 6:
635 if (modrm_mod != 0)
636 modrm_ea += bp;
637 break;
638 case 7:
639 modrm_ea += bx;
640 break;
641 }
642 if (modrm_rm == 2 || modrm_rm == 3 ||
643 (modrm_rm == 6 && modrm_mod != 0))
644 if (!override_base)
645 override_base = &ctxt->ss_base;
646 modrm_ea = (u16)modrm_ea;
647 } else {
648 /* 32/64-bit ModR/M decode. */
649 switch (modrm_rm) {
650 case 4:
651 case 12:
652 sib = insn_fetch(u8, 1, _eip);
653 index_reg |= (sib >> 3) & 7;
654 base_reg |= sib & 7;
655 scale = sib >> 6;
656
657 switch (base_reg) {
658 case 5:
659 if (modrm_mod != 0)
660 modrm_ea += _regs[base_reg];
661 else
662 modrm_ea += insn_fetch(s32, 4, _eip);
663 break;
664 default:
665 modrm_ea += _regs[base_reg];
666 }
667 switch (index_reg) {
668 case 4:
669 break;
670 default:
671 modrm_ea += _regs[index_reg] << scale;
672
673 }
674 break;
675 case 5:
676 if (modrm_mod != 0)
677 modrm_ea += _regs[modrm_rm];
678 else if (mode == X86EMUL_MODE_PROT64)
679 rip_relative = 1;
680 break;
681 default:
682 modrm_ea += _regs[modrm_rm];
683 break;
684 }
685 switch (modrm_mod) {
686 case 0:
687 if (modrm_rm == 5)
688 modrm_ea += insn_fetch(s32, 4, _eip);
689 break;
690 case 1:
691 modrm_ea += insn_fetch(s8, 1, _eip);
692 break;
693 case 2:
694 modrm_ea += insn_fetch(s32, 4, _eip);
695 break;
696 }
697 }
698 if (!override_base)
699 override_base = &ctxt->ds_base;
700 if (mode == X86EMUL_MODE_PROT64 &&
701 override_base != &ctxt->fs_base &&
702 override_base != &ctxt->gs_base)
703 override_base = NULL;
704
705 if (override_base)
706 modrm_ea += *override_base;
707
708 if (rip_relative) {
709 modrm_ea += _eip;
710 switch (d & SrcMask) {
711 case SrcImmByte:
712 modrm_ea += 1;
713 break;
714 case SrcImm:
715 if (d & ByteOp)
716 modrm_ea += 1;
717 else
718 if (op_bytes == 8)
719 modrm_ea += 4;
720 else
721 modrm_ea += op_bytes;
722 }
723 }
724 if (ad_bytes != 8)
725 modrm_ea = (u32)modrm_ea;
726 cr2 = modrm_ea;
727 modrm_done:
728 ;
729 }
730
731 /*
732 * Decode and fetch the source operand: register, memory
733 * or immediate.
734 */
735 switch (d & SrcMask) {
736 case SrcNone:
737 break;
738 case SrcReg:
739 src.type = OP_REG;
740 if (d & ByteOp) {
741 src.ptr = decode_register(modrm_reg, _regs,
742 (rex_prefix == 0));
743 src.val = src.orig_val = *(u8 *) src.ptr;
744 src.bytes = 1;
745 } else {
746 src.ptr = decode_register(modrm_reg, _regs, 0);
747 switch ((src.bytes = op_bytes)) {
748 case 2:
749 src.val = src.orig_val = *(u16 *) src.ptr;
750 break;
751 case 4:
752 src.val = src.orig_val = *(u32 *) src.ptr;
753 break;
754 case 8:
755 src.val = src.orig_val = *(u64 *) src.ptr;
756 break;
757 }
758 }
759 break;
760 case SrcMem16:
761 src.bytes = 2;
762 goto srcmem_common;
763 case SrcMem32:
764 src.bytes = 4;
765 goto srcmem_common;
766 case SrcMem:
767 src.bytes = (d & ByteOp) ? 1 : op_bytes;
768 srcmem_common:
769 src.type = OP_MEM;
770 src.ptr = (unsigned long *)cr2;
771 if ((rc = ops->read_emulated((unsigned long)src.ptr,
772 &src.val, src.bytes, ctxt)) != 0)
773 goto done;
774 src.orig_val = src.val;
775 break;
776 case SrcImm:
777 src.type = OP_IMM;
778 src.ptr = (unsigned long *)_eip;
779 src.bytes = (d & ByteOp) ? 1 : op_bytes;
780 if (src.bytes == 8)
781 src.bytes = 4;
782 /* NB. Immediates are sign-extended as necessary. */
783 switch (src.bytes) {
784 case 1:
785 src.val = insn_fetch(s8, 1, _eip);
786 break;
787 case 2:
788 src.val = insn_fetch(s16, 2, _eip);
789 break;
790 case 4:
791 src.val = insn_fetch(s32, 4, _eip);
792 break;
793 }
794 break;
795 case SrcImmByte:
796 src.type = OP_IMM;
797 src.ptr = (unsigned long *)_eip;
798 src.bytes = 1;
799 src.val = insn_fetch(s8, 1, _eip);
800 break;
801 }
802
803 /* Decode and fetch the destination operand: register or memory. */
804 switch (d & DstMask) {
805 case ImplicitOps:
806 /* Special instructions do their own operand decoding. */
807 goto special_insn;
808 case DstReg:
809 dst.type = OP_REG;
810 if ((d & ByteOp)
811 && !(twobyte_table && (b == 0xb6 || b == 0xb7))) {
812 dst.ptr = decode_register(modrm_reg, _regs,
813 (rex_prefix == 0));
814 dst.val = *(u8 *) dst.ptr;
815 dst.bytes = 1;
816 } else {
817 dst.ptr = decode_register(modrm_reg, _regs, 0);
818 switch ((dst.bytes = op_bytes)) {
819 case 2:
820 dst.val = *(u16 *)dst.ptr;
821 break;
822 case 4:
823 dst.val = *(u32 *)dst.ptr;
824 break;
825 case 8:
826 dst.val = *(u64 *)dst.ptr;
827 break;
828 }
829 }
830 break;
831 case DstMem:
832 dst.type = OP_MEM;
833 dst.ptr = (unsigned long *)cr2;
834 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
835 if (d & BitOp) {
836 dst.ptr += src.val / BITS_PER_LONG;
837 dst.bytes = sizeof(long);
838 }
839 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
840 ((rc = ops->read_emulated((unsigned long)dst.ptr,
841 &dst.val, dst.bytes, ctxt)) != 0))
842 goto done;
843 break;
844 }
845 dst.orig_val = dst.val;
846
847 if (twobyte)
848 goto twobyte_insn;
849
850 switch (b) {
851 case 0x00 ... 0x05:
852 add: /* add */
853 emulate_2op_SrcV("add", src, dst, _eflags);
854 break;
855 case 0x08 ... 0x0d:
856 or: /* or */
857 emulate_2op_SrcV("or", src, dst, _eflags);
858 break;
859 case 0x10 ... 0x15:
860 adc: /* adc */
861 emulate_2op_SrcV("adc", src, dst, _eflags);
862 break;
863 case 0x18 ... 0x1d:
864 sbb: /* sbb */
865 emulate_2op_SrcV("sbb", src, dst, _eflags);
866 break;
867 case 0x20 ... 0x25:
868 and: /* and */
869 emulate_2op_SrcV("and", src, dst, _eflags);
870 break;
871 case 0x28 ... 0x2d:
872 sub: /* sub */
873 emulate_2op_SrcV("sub", src, dst, _eflags);
874 break;
875 case 0x30 ... 0x35:
876 xor: /* xor */
877 emulate_2op_SrcV("xor", src, dst, _eflags);
878 break;
879 case 0x38 ... 0x3d:
880 cmp: /* cmp */
881 emulate_2op_SrcV("cmp", src, dst, _eflags);
882 break;
883 case 0x63: /* movsxd */
884 if (mode != X86EMUL_MODE_PROT64)
885 goto cannot_emulate;
886 dst.val = (s32) src.val;
887 break;
888 case 0x80 ... 0x83: /* Grp1 */
889 switch (modrm_reg) {
890 case 0:
891 goto add;
892 case 1:
893 goto or;
894 case 2:
895 goto adc;
896 case 3:
897 goto sbb;
898 case 4:
899 goto and;
900 case 5:
901 goto sub;
902 case 6:
903 goto xor;
904 case 7:
905 goto cmp;
906 }
907 break;
908 case 0x84 ... 0x85:
909 test: /* test */
910 emulate_2op_SrcV("test", src, dst, _eflags);
911 break;
912 case 0x86 ... 0x87: /* xchg */
913 /* Write back the register source. */
914 switch (dst.bytes) {
915 case 1:
916 *(u8 *) src.ptr = (u8) dst.val;
917 break;
918 case 2:
919 *(u16 *) src.ptr = (u16) dst.val;
920 break;
921 case 4:
922 *src.ptr = (u32) dst.val;
923 break; /* 64b reg: zero-extend */
924 case 8:
925 *src.ptr = dst.val;
926 break;
927 }
928 /*
929 * Write back the memory destination with implicit LOCK
930 * prefix.
931 */
932 dst.val = src.val;
933 lock_prefix = 1;
934 break;
935 case 0xa0 ... 0xa1: /* mov */
936 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
937 dst.val = src.val;
938 _eip += ad_bytes; /* skip src displacement */
939 break;
940 case 0xa2 ... 0xa3: /* mov */
941 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
942 _eip += ad_bytes; /* skip dst displacement */
943 break;
944 case 0x88 ... 0x8b: /* mov */
945 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
946 dst.val = src.val;
947 break;
948 case 0x8f: /* pop (sole member of Grp1a) */
949 /* 64-bit mode: POP always pops a 64-bit operand. */
950 if (mode == X86EMUL_MODE_PROT64)
951 dst.bytes = 8;
952 if ((rc = ops->read_std(register_address(ctxt->ss_base,
953 _regs[VCPU_REGS_RSP]),
954 &dst.val, dst.bytes, ctxt)) != 0)
955 goto done;
956 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
957 break;
958 case 0xc0 ... 0xc1:
959 grp2: /* Grp2 */
960 switch (modrm_reg) {
961 case 0: /* rol */
962 emulate_2op_SrcB("rol", src, dst, _eflags);
963 break;
964 case 1: /* ror */
965 emulate_2op_SrcB("ror", src, dst, _eflags);
966 break;
967 case 2: /* rcl */
968 emulate_2op_SrcB("rcl", src, dst, _eflags);
969 break;
970 case 3: /* rcr */
971 emulate_2op_SrcB("rcr", src, dst, _eflags);
972 break;
973 case 4: /* sal/shl */
974 case 6: /* sal/shl */
975 emulate_2op_SrcB("sal", src, dst, _eflags);
976 break;
977 case 5: /* shr */
978 emulate_2op_SrcB("shr", src, dst, _eflags);
979 break;
980 case 7: /* sar */
981 emulate_2op_SrcB("sar", src, dst, _eflags);
982 break;
983 }
984 break;
985 case 0xd0 ... 0xd1: /* Grp2 */
986 src.val = 1;
987 goto grp2;
988 case 0xd2 ... 0xd3: /* Grp2 */
989 src.val = _regs[VCPU_REGS_RCX];
990 goto grp2;
991 case 0xf6 ... 0xf7: /* Grp3 */
992 switch (modrm_reg) {
993 case 0 ... 1: /* test */
994 /*
995 * Special case in Grp3: test has an immediate
996 * source operand.
997 */
998 src.type = OP_IMM;
999 src.ptr = (unsigned long *)_eip;
1000 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1001 if (src.bytes == 8)
1002 src.bytes = 4;
1003 switch (src.bytes) {
1004 case 1:
1005 src.val = insn_fetch(s8, 1, _eip);
1006 break;
1007 case 2:
1008 src.val = insn_fetch(s16, 2, _eip);
1009 break;
1010 case 4:
1011 src.val = insn_fetch(s32, 4, _eip);
1012 break;
1013 }
1014 goto test;
1015 case 2: /* not */
1016 dst.val = ~dst.val;
1017 break;
1018 case 3: /* neg */
1019 emulate_1op("neg", dst, _eflags);
1020 break;
1021 default:
1022 goto cannot_emulate;
1023 }
1024 break;
1025 case 0xfe ... 0xff: /* Grp4/Grp5 */
1026 switch (modrm_reg) {
1027 case 0: /* inc */
1028 emulate_1op("inc", dst, _eflags);
1029 break;
1030 case 1: /* dec */
1031 emulate_1op("dec", dst, _eflags);
1032 break;
1033 case 6: /* push */
1034 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1035 if (mode == X86EMUL_MODE_PROT64) {
1036 dst.bytes = 8;
1037 if ((rc = ops->read_std((unsigned long)dst.ptr,
1038 &dst.val, 8,
1039 ctxt)) != 0)
1040 goto done;
1041 }
1042 register_address_increment(_regs[VCPU_REGS_RSP],
1043 -dst.bytes);
1044 if ((rc = ops->write_std(
1045 register_address(ctxt->ss_base,
1046 _regs[VCPU_REGS_RSP]),
1047 dst.val, dst.bytes, ctxt)) != 0)
1048 goto done;
1049 dst.val = dst.orig_val; /* skanky: disable writeback */
1050 break;
1051 default:
1052 goto cannot_emulate;
1053 }
1054 break;
1055 }
1056
1057 writeback:
1058 if ((d & Mov) || (dst.orig_val != dst.val)) {
1059 switch (dst.type) {
1060 case OP_REG:
1061 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1062 switch (dst.bytes) {
1063 case 1:
1064 *(u8 *)dst.ptr = (u8)dst.val;
1065 break;
1066 case 2:
1067 *(u16 *)dst.ptr = (u16)dst.val;
1068 break;
1069 case 4:
1070 *dst.ptr = (u32)dst.val;
1071 break; /* 64b: zero-ext */
1072 case 8:
1073 *dst.ptr = dst.val;
1074 break;
1075 }
1076 break;
1077 case OP_MEM:
1078 if (lock_prefix)
1079 rc = ops->cmpxchg_emulated((unsigned long)dst.
1080 ptr, dst.orig_val,
1081 dst.val, dst.bytes,
1082 ctxt);
1083 else
1084 rc = ops->write_emulated((unsigned long)dst.ptr,
1085 dst.val, dst.bytes,
1086 ctxt);
1087 if (rc != 0)
1088 goto done;
1089 default:
1090 break;
1091 }
1092 }
1093
1094 /* Commit shadow register state. */
1095 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1096 ctxt->eflags = _eflags;
1097 ctxt->vcpu->rip = _eip;
1098
1099 done:
1100 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1101
1102 special_insn:
1103 if (twobyte)
1104 goto twobyte_special_insn;
1105 if (rep_prefix) {
1106 if (_regs[VCPU_REGS_RCX] == 0) {
1107 ctxt->vcpu->rip = _eip;
1108 goto done;
1109 }
1110 _regs[VCPU_REGS_RCX]--;
1111 _eip = ctxt->vcpu->rip;
1112 }
1113 switch (b) {
1114 case 0xa4 ... 0xa5: /* movs */
1115 dst.type = OP_MEM;
1116 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1117 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1118 _regs[VCPU_REGS_RDI]);
1119 if ((rc = ops->read_emulated(register_address(
1120 override_base ? *override_base : ctxt->ds_base,
1121 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt)) != 0)
1122 goto done;
1123 register_address_increment(_regs[VCPU_REGS_RSI],
1124 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1125 register_address_increment(_regs[VCPU_REGS_RDI],
1126 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1127 break;
1128 case 0xa6 ... 0xa7: /* cmps */
1129 DPRINTF("Urk! I don't handle CMPS.\n");
1130 goto cannot_emulate;
1131 case 0xaa ... 0xab: /* stos */
1132 dst.type = OP_MEM;
1133 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1134 dst.ptr = (unsigned long *)cr2;
1135 dst.val = _regs[VCPU_REGS_RAX];
1136 register_address_increment(_regs[VCPU_REGS_RDI],
1137 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1138 break;
1139 case 0xac ... 0xad: /* lods */
1140 dst.type = OP_REG;
1141 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1142 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1143 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0)
1144 goto done;
1145 register_address_increment(_regs[VCPU_REGS_RSI],
1146 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1147 break;
1148 case 0xae ... 0xaf: /* scas */
1149 DPRINTF("Urk! I don't handle SCAS.\n");
1150 goto cannot_emulate;
1151 }
1152 goto writeback;
1153
1154 twobyte_insn:
1155 switch (b) {
1156 case 0x01: /* lgdt, lidt, lmsw */
1157 switch (modrm_reg) {
1158 u16 size;
1159 unsigned long address;
1160
1161 case 2: /* lgdt */
1162 rc = read_descriptor(ctxt, ops, src.ptr,
1163 &size, &address, op_bytes);
1164 if (rc)
1165 goto done;
1166 realmode_lgdt(ctxt->vcpu, size, address);
1167 break;
1168 case 3: /* lidt */
1169 rc = read_descriptor(ctxt, ops, src.ptr,
1170 &size, &address, op_bytes);
1171 if (rc)
1172 goto done;
1173 realmode_lidt(ctxt->vcpu, size, address);
1174 break;
1175 case 4: /* smsw */
1176 if (modrm_mod != 3)
1177 goto cannot_emulate;
1178 *(u16 *)&_regs[modrm_rm]
1179 = realmode_get_cr(ctxt->vcpu, 0);
1180 break;
1181 case 6: /* lmsw */
1182 if (modrm_mod != 3)
1183 goto cannot_emulate;
1184 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1185 break;
1186 case 7: /* invlpg*/
1187 emulate_invlpg(ctxt->vcpu, cr2);
1188 break;
1189 default:
1190 goto cannot_emulate;
1191 }
1192 break;
1193 case 0x21: /* mov from dr to reg */
1194 if (modrm_mod != 3)
1195 goto cannot_emulate;
1196 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1197 break;
1198 case 0x23: /* mov from reg to dr */
1199 if (modrm_mod != 3)
1200 goto cannot_emulate;
1201 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1202 break;
1203 case 0x40 ... 0x4f: /* cmov */
1204 dst.val = dst.orig_val = src.val;
1205 d &= ~Mov; /* default to no move */
1206 /*
1207 * First, assume we're decoding an even cmov opcode
1208 * (lsb == 0).
1209 */
1210 switch ((b & 15) >> 1) {
1211 case 0: /* cmovo */
1212 d |= (_eflags & EFLG_OF) ? Mov : 0;
1213 break;
1214 case 1: /* cmovb/cmovc/cmovnae */
1215 d |= (_eflags & EFLG_CF) ? Mov : 0;
1216 break;
1217 case 2: /* cmovz/cmove */
1218 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1219 break;
1220 case 3: /* cmovbe/cmovna */
1221 d |= (_eflags & (EFLG_CF | EFLG_ZF)) ? Mov : 0;
1222 break;
1223 case 4: /* cmovs */
1224 d |= (_eflags & EFLG_SF) ? Mov : 0;
1225 break;
1226 case 5: /* cmovp/cmovpe */
1227 d |= (_eflags & EFLG_PF) ? Mov : 0;
1228 break;
1229 case 7: /* cmovle/cmovng */
1230 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1231 /* fall through */
1232 case 6: /* cmovl/cmovnge */
1233 d |= (!(_eflags & EFLG_SF) !=
1234 !(_eflags & EFLG_OF)) ? Mov : 0;
1235 break;
1236 }
1237 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1238 d ^= (b & 1) ? Mov : 0;
1239 break;
1240 case 0xb0 ... 0xb1: /* cmpxchg */
1241 /*
1242 * Save real source value, then compare EAX against
1243 * destination.
1244 */
1245 src.orig_val = src.val;
1246 src.val = _regs[VCPU_REGS_RAX];
1247 emulate_2op_SrcV("cmp", src, dst, _eflags);
1248 /* Always write back. The question is: where to? */
1249 d |= Mov;
1250 if (_eflags & EFLG_ZF) {
1251 /* Success: write back to memory. */
1252 dst.val = src.orig_val;
1253 } else {
1254 /* Failure: write the value we saw to EAX. */
1255 dst.type = OP_REG;
1256 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1257 }
1258 break;
1259 case 0xa3:
1260 bt: /* bt */
1261 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1262 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1263 break;
1264 case 0xb3:
1265 btr: /* btr */
1266 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1267 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1268 break;
1269 case 0xab:
1270 bts: /* bts */
1271 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1272 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1273 break;
1274 case 0xb6 ... 0xb7: /* movzx */
1275 dst.bytes = op_bytes;
1276 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1277 break;
1278 case 0xbb:
1279 btc: /* btc */
1280 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1281 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1282 break;
1283 case 0xba: /* Grp8 */
1284 switch (modrm_reg & 3) {
1285 case 0:
1286 goto bt;
1287 case 1:
1288 goto bts;
1289 case 2:
1290 goto btr;
1291 case 3:
1292 goto btc;
1293 }
1294 break;
1295 case 0xbe ... 0xbf: /* movsx */
1296 dst.bytes = op_bytes;
1297 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1298 break;
1299 }
1300 goto writeback;
1301
1302 twobyte_special_insn:
1303 /* Disable writeback. */
1304 dst.orig_val = dst.val;
1305 switch (b) {
1306 case 0x0d: /* GrpP (prefetch) */
1307 case 0x18: /* Grp16 (prefetch/nop) */
1308 break;
1309 case 0x06:
1310 emulate_clts(ctxt->vcpu);
1311 break;
1312 case 0x20: /* mov cr, reg */
1313 if (modrm_mod != 3)
1314 goto cannot_emulate;
1315 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1316 break;
1317 case 0x22: /* mov reg, cr */
1318 if (modrm_mod != 3)
1319 goto cannot_emulate;
1320 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1321 break;
1322 case 0xc7: /* Grp9 (cmpxchg8b) */
1323 #if defined(__i386__)
1324 {
1325 unsigned long old_lo, old_hi;
1326 if (((rc = ops->read_emulated(cr2 + 0, &old_lo, 4,
1327 ctxt)) != 0)
1328 || ((rc = ops->read_emulated(cr2 + 4, &old_hi, 4,
1329 ctxt)) != 0))
1330 goto done;
1331 if ((old_lo != _regs[VCPU_REGS_RAX])
1332 || (old_hi != _regs[VCPU_REGS_RDX])) {
1333 _regs[VCPU_REGS_RAX] = old_lo;
1334 _regs[VCPU_REGS_RDX] = old_hi;
1335 _eflags &= ~EFLG_ZF;
1336 } else if (ops->cmpxchg8b_emulated == NULL) {
1337 rc = X86EMUL_UNHANDLEABLE;
1338 goto done;
1339 } else {
1340 if ((rc = ops->cmpxchg8b_emulated(cr2, old_lo,
1341 old_hi,
1342 _regs[VCPU_REGS_RBX],
1343 _regs[VCPU_REGS_RCX],
1344 ctxt)) != 0)
1345 goto done;
1346 _eflags |= EFLG_ZF;
1347 }
1348 break;
1349 }
1350 #elif defined(CONFIG_X86_64)
1351 {
1352 unsigned long old, new;
1353 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0)
1354 goto done;
1355 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1356 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1357 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1358 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1359 _eflags &= ~EFLG_ZF;
1360 } else {
1361 new = (_regs[VCPU_REGS_RCX] << 32) | (u32) _regs[VCPU_REGS_RBX];
1362 if ((rc = ops->cmpxchg_emulated(cr2, old,
1363 new, 8, ctxt)) != 0)
1364 goto done;
1365 _eflags |= EFLG_ZF;
1366 }
1367 break;
1368 }
1369 #endif
1370 }
1371 goto writeback;
1372
1373 cannot_emulate:
1374 DPRINTF("Cannot emulate %02x\n", b);
1375 return -1;
1376 }
1377
1378 #ifdef __XEN__
1379
1380 #include <asm/mm.h>
1381 #include <asm/uaccess.h>
1382
1383 int
1384 x86_emulate_read_std(unsigned long addr,
1385 unsigned long *val,
1386 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1387 {
1388 unsigned int rc;
1389
1390 *val = 0;
1391
1392 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1393 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1394 return X86EMUL_PROPAGATE_FAULT;
1395 }
1396
1397 return X86EMUL_CONTINUE;
1398 }
1399
1400 int
1401 x86_emulate_write_std(unsigned long addr,
1402 unsigned long val,
1403 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1404 {
1405 unsigned int rc;
1406
1407 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1408 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1409 return X86EMUL_PROPAGATE_FAULT;
1410 }
1411
1412 return X86EMUL_CONTINUE;
1413 }
1414
1415 #endif