]>
Commit | Line | Data |
---|---|---|
1 | /****************************************************************************** | |
2 | * x86_emulate.c | |
3 | * | |
4 | * Generic x86 (32-bit and 64-bit) instruction decoder and emulator. | |
5 | * | |
6 | * Copyright (c) 2005 Keir Fraser | |
7 | * | |
8 | * Linux coding style, mod r/m decoder, segment base fixes, real-mode | |
9 | * privileged instructions: | |
10 | * | |
11 | * Copyright (C) 2006 Qumranet | |
12 | * | |
13 | * Avi Kivity <avi@qumranet.com> | |
14 | * Yaniv Kamay <yaniv@qumranet.com> | |
15 | * | |
16 | * This work is licensed under the terms of the GNU GPL, version 2. See | |
17 | * the COPYING file in the top-level directory. | |
18 | * | |
19 | * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4 | |
20 | */ | |
21 | ||
22 | #ifndef __KERNEL__ | |
23 | #include <stdio.h> | |
24 | #include <stdint.h> | |
25 | #include <public/xen.h> | |
26 | #define DPRINTF(_f, _a ...) printf(_f , ## _a) | |
27 | #else | |
28 | #include "kvm.h" | |
29 | #include "x86.h" | |
30 | #define DPRINTF(x...) do {} while (0) | |
31 | #endif | |
32 | #include "x86_emulate.h" | |
33 | #include <linux/module.h> | |
34 | ||
35 | /* | |
36 | * Opcode effective-address decode tables. | |
37 | * Note that we only emulate instructions that have at least one memory | |
38 | * operand (excluding implicit stack references). We assume that stack | |
39 | * references and instruction fetches will never occur in special memory | |
40 | * areas that require emulation. So, for example, 'mov <imm>,<reg>' need | |
41 | * not be handled. | |
42 | */ | |
43 | ||
44 | /* Operand sizes: 8-bit operands or specified/overridden size. */ | |
45 | #define ByteOp (1<<0) /* 8-bit operands. */ | |
46 | /* Destination operand type. */ | |
47 | #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */ | |
48 | #define DstReg (2<<1) /* Register operand. */ | |
49 | #define DstMem (3<<1) /* Memory operand. */ | |
50 | #define DstMask (3<<1) | |
51 | /* Source operand type. */ | |
52 | #define SrcNone (0<<3) /* No source operand. */ | |
53 | #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */ | |
54 | #define SrcReg (1<<3) /* Register operand. */ | |
55 | #define SrcMem (2<<3) /* Memory operand. */ | |
56 | #define SrcMem16 (3<<3) /* Memory operand (16-bit). */ | |
57 | #define SrcMem32 (4<<3) /* Memory operand (32-bit). */ | |
58 | #define SrcImm (5<<3) /* Immediate operand. */ | |
59 | #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */ | |
60 | #define SrcMask (7<<3) | |
61 | /* Generic ModRM decode. */ | |
62 | #define ModRM (1<<6) | |
63 | /* Destination is only written; never read. */ | |
64 | #define Mov (1<<7) | |
65 | #define BitOp (1<<8) | |
66 | #define MemAbs (1<<9) /* Memory operand is absolute displacement */ | |
67 | ||
68 | static u16 opcode_table[256] = { | |
69 | /* 0x00 - 0x07 */ | |
70 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
71 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
72 | 0, 0, 0, 0, | |
73 | /* 0x08 - 0x0F */ | |
74 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
75 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
76 | 0, 0, 0, 0, | |
77 | /* 0x10 - 0x17 */ | |
78 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
79 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
80 | 0, 0, 0, 0, | |
81 | /* 0x18 - 0x1F */ | |
82 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
83 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
84 | 0, 0, 0, 0, | |
85 | /* 0x20 - 0x27 */ | |
86 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
87 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
88 | SrcImmByte, SrcImm, 0, 0, | |
89 | /* 0x28 - 0x2F */ | |
90 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
91 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
92 | 0, 0, 0, 0, | |
93 | /* 0x30 - 0x37 */ | |
94 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
95 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
96 | 0, 0, 0, 0, | |
97 | /* 0x38 - 0x3F */ | |
98 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
99 | ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM, | |
100 | 0, 0, 0, 0, | |
101 | /* 0x40 - 0x47 */ | |
102 | DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, | |
103 | /* 0x48 - 0x4F */ | |
104 | DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, | |
105 | /* 0x50 - 0x57 */ | |
106 | SrcReg, SrcReg, SrcReg, SrcReg, SrcReg, SrcReg, SrcReg, SrcReg, | |
107 | /* 0x58 - 0x5F */ | |
108 | DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, | |
109 | /* 0x60 - 0x67 */ | |
110 | 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ , | |
111 | 0, 0, 0, 0, | |
112 | /* 0x68 - 0x6F */ | |
113 | 0, 0, ImplicitOps|Mov, 0, | |
114 | SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */ | |
115 | SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */ | |
116 | /* 0x70 - 0x77 */ | |
117 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
118 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
119 | /* 0x78 - 0x7F */ | |
120 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
121 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
122 | /* 0x80 - 0x87 */ | |
123 | ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM, | |
124 | ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM, | |
125 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
126 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, | |
127 | /* 0x88 - 0x8F */ | |
128 | ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov, | |
129 | ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
130 | 0, ModRM | DstReg, 0, DstMem | SrcNone | ModRM | Mov, | |
131 | /* 0x90 - 0x9F */ | |
132 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps, ImplicitOps, 0, 0, | |
133 | /* 0xA0 - 0xA7 */ | |
134 | ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs, | |
135 | ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs, | |
136 | ByteOp | ImplicitOps | Mov, ImplicitOps | Mov, | |
137 | ByteOp | ImplicitOps, ImplicitOps, | |
138 | /* 0xA8 - 0xAF */ | |
139 | 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov, | |
140 | ByteOp | ImplicitOps | Mov, ImplicitOps | Mov, | |
141 | ByteOp | ImplicitOps, ImplicitOps, | |
142 | /* 0xB0 - 0xBF */ | |
143 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
144 | /* 0xC0 - 0xC7 */ | |
145 | ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM, | |
146 | 0, ImplicitOps, 0, 0, | |
147 | ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov, | |
148 | /* 0xC8 - 0xCF */ | |
149 | 0, 0, 0, 0, 0, 0, 0, 0, | |
150 | /* 0xD0 - 0xD7 */ | |
151 | ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM, | |
152 | ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM, | |
153 | 0, 0, 0, 0, | |
154 | /* 0xD8 - 0xDF */ | |
155 | 0, 0, 0, 0, 0, 0, 0, 0, | |
156 | /* 0xE0 - 0xE7 */ | |
157 | 0, 0, 0, 0, 0, 0, 0, 0, | |
158 | /* 0xE8 - 0xEF */ | |
159 | ImplicitOps, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps, 0, 0, 0, 0, | |
160 | /* 0xF0 - 0xF7 */ | |
161 | 0, 0, 0, 0, | |
162 | ImplicitOps, ImplicitOps, | |
163 | ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, | |
164 | /* 0xF8 - 0xFF */ | |
165 | ImplicitOps, 0, ImplicitOps, ImplicitOps, | |
166 | 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM | |
167 | }; | |
168 | ||
169 | static u16 twobyte_table[256] = { | |
170 | /* 0x00 - 0x0F */ | |
171 | 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0, | |
172 | ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0, | |
173 | /* 0x10 - 0x1F */ | |
174 | 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, | |
175 | /* 0x20 - 0x2F */ | |
176 | ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0, | |
177 | 0, 0, 0, 0, 0, 0, 0, 0, | |
178 | /* 0x30 - 0x3F */ | |
179 | ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
180 | /* 0x40 - 0x47 */ | |
181 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
182 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
183 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
184 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
185 | /* 0x48 - 0x4F */ | |
186 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
187 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
188 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
189 | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov, | |
190 | /* 0x50 - 0x5F */ | |
191 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
192 | /* 0x60 - 0x6F */ | |
193 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
194 | /* 0x70 - 0x7F */ | |
195 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
196 | /* 0x80 - 0x8F */ | |
197 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
198 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
199 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
200 | ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, | |
201 | /* 0x90 - 0x9F */ | |
202 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
203 | /* 0xA0 - 0xA7 */ | |
204 | 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0, | |
205 | /* 0xA8 - 0xAF */ | |
206 | 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0, | |
207 | /* 0xB0 - 0xB7 */ | |
208 | ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0, | |
209 | DstMem | SrcReg | ModRM | BitOp, | |
210 | 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov, | |
211 | DstReg | SrcMem16 | ModRM | Mov, | |
212 | /* 0xB8 - 0xBF */ | |
213 | 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp, | |
214 | 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov, | |
215 | DstReg | SrcMem16 | ModRM | Mov, | |
216 | /* 0xC0 - 0xCF */ | |
217 | 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM, | |
218 | 0, 0, 0, 0, 0, 0, 0, 0, | |
219 | /* 0xD0 - 0xDF */ | |
220 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
221 | /* 0xE0 - 0xEF */ | |
222 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |
223 | /* 0xF0 - 0xFF */ | |
224 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 | |
225 | }; | |
226 | ||
227 | /* EFLAGS bit definitions. */ | |
228 | #define EFLG_OF (1<<11) | |
229 | #define EFLG_DF (1<<10) | |
230 | #define EFLG_SF (1<<7) | |
231 | #define EFLG_ZF (1<<6) | |
232 | #define EFLG_AF (1<<4) | |
233 | #define EFLG_PF (1<<2) | |
234 | #define EFLG_CF (1<<0) | |
235 | ||
236 | /* | |
237 | * Instruction emulation: | |
238 | * Most instructions are emulated directly via a fragment of inline assembly | |
239 | * code. This allows us to save/restore EFLAGS and thus very easily pick up | |
240 | * any modified flags. | |
241 | */ | |
242 | ||
243 | #if defined(CONFIG_X86_64) | |
244 | #define _LO32 "k" /* force 32-bit operand */ | |
245 | #define _STK "%%rsp" /* stack pointer */ | |
246 | #elif defined(__i386__) | |
247 | #define _LO32 "" /* force 32-bit operand */ | |
248 | #define _STK "%%esp" /* stack pointer */ | |
249 | #endif | |
250 | ||
251 | /* | |
252 | * These EFLAGS bits are restored from saved value during emulation, and | |
253 | * any changes are written back to the saved value after emulation. | |
254 | */ | |
255 | #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF) | |
256 | ||
257 | /* Before executing instruction: restore necessary bits in EFLAGS. */ | |
258 | #define _PRE_EFLAGS(_sav, _msk, _tmp) \ | |
259 | /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \ | |
260 | "push %"_sav"; " \ | |
261 | "movl %"_msk",%"_LO32 _tmp"; " \ | |
262 | "andl %"_LO32 _tmp",("_STK"); " \ | |
263 | "pushf; " \ | |
264 | "notl %"_LO32 _tmp"; " \ | |
265 | "andl %"_LO32 _tmp",("_STK"); " \ | |
266 | "pop %"_tmp"; " \ | |
267 | "orl %"_LO32 _tmp",("_STK"); " \ | |
268 | "popf; " \ | |
269 | /* _sav &= ~msk; */ \ | |
270 | "movl %"_msk",%"_LO32 _tmp"; " \ | |
271 | "notl %"_LO32 _tmp"; " \ | |
272 | "andl %"_LO32 _tmp",%"_sav"; " | |
273 | ||
274 | /* After executing instruction: write-back necessary bits in EFLAGS. */ | |
275 | #define _POST_EFLAGS(_sav, _msk, _tmp) \ | |
276 | /* _sav |= EFLAGS & _msk; */ \ | |
277 | "pushf; " \ | |
278 | "pop %"_tmp"; " \ | |
279 | "andl %"_msk",%"_LO32 _tmp"; " \ | |
280 | "orl %"_LO32 _tmp",%"_sav"; " | |
281 | ||
282 | /* Raw emulation: instruction has two explicit operands. */ | |
283 | #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \ | |
284 | do { \ | |
285 | unsigned long _tmp; \ | |
286 | \ | |
287 | switch ((_dst).bytes) { \ | |
288 | case 2: \ | |
289 | __asm__ __volatile__ ( \ | |
290 | _PRE_EFLAGS("0", "4", "2") \ | |
291 | _op"w %"_wx"3,%1; " \ | |
292 | _POST_EFLAGS("0", "4", "2") \ | |
293 | : "=m" (_eflags), "=m" ((_dst).val), \ | |
294 | "=&r" (_tmp) \ | |
295 | : _wy ((_src).val), "i" (EFLAGS_MASK)); \ | |
296 | break; \ | |
297 | case 4: \ | |
298 | __asm__ __volatile__ ( \ | |
299 | _PRE_EFLAGS("0", "4", "2") \ | |
300 | _op"l %"_lx"3,%1; " \ | |
301 | _POST_EFLAGS("0", "4", "2") \ | |
302 | : "=m" (_eflags), "=m" ((_dst).val), \ | |
303 | "=&r" (_tmp) \ | |
304 | : _ly ((_src).val), "i" (EFLAGS_MASK)); \ | |
305 | break; \ | |
306 | case 8: \ | |
307 | __emulate_2op_8byte(_op, _src, _dst, \ | |
308 | _eflags, _qx, _qy); \ | |
309 | break; \ | |
310 | } \ | |
311 | } while (0) | |
312 | ||
313 | #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \ | |
314 | do { \ | |
315 | unsigned long _tmp; \ | |
316 | switch ((_dst).bytes) { \ | |
317 | case 1: \ | |
318 | __asm__ __volatile__ ( \ | |
319 | _PRE_EFLAGS("0", "4", "2") \ | |
320 | _op"b %"_bx"3,%1; " \ | |
321 | _POST_EFLAGS("0", "4", "2") \ | |
322 | : "=m" (_eflags), "=m" ((_dst).val), \ | |
323 | "=&r" (_tmp) \ | |
324 | : _by ((_src).val), "i" (EFLAGS_MASK)); \ | |
325 | break; \ | |
326 | default: \ | |
327 | __emulate_2op_nobyte(_op, _src, _dst, _eflags, \ | |
328 | _wx, _wy, _lx, _ly, _qx, _qy); \ | |
329 | break; \ | |
330 | } \ | |
331 | } while (0) | |
332 | ||
333 | /* Source operand is byte-sized and may be restricted to just %cl. */ | |
334 | #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \ | |
335 | __emulate_2op(_op, _src, _dst, _eflags, \ | |
336 | "b", "c", "b", "c", "b", "c", "b", "c") | |
337 | ||
338 | /* Source operand is byte, word, long or quad sized. */ | |
339 | #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \ | |
340 | __emulate_2op(_op, _src, _dst, _eflags, \ | |
341 | "b", "q", "w", "r", _LO32, "r", "", "r") | |
342 | ||
343 | /* Source operand is word, long or quad sized. */ | |
344 | #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \ | |
345 | __emulate_2op_nobyte(_op, _src, _dst, _eflags, \ | |
346 | "w", "r", _LO32, "r", "", "r") | |
347 | ||
348 | /* Instruction has only one explicit operand (no source operand). */ | |
349 | #define emulate_1op(_op, _dst, _eflags) \ | |
350 | do { \ | |
351 | unsigned long _tmp; \ | |
352 | \ | |
353 | switch ((_dst).bytes) { \ | |
354 | case 1: \ | |
355 | __asm__ __volatile__ ( \ | |
356 | _PRE_EFLAGS("0", "3", "2") \ | |
357 | _op"b %1; " \ | |
358 | _POST_EFLAGS("0", "3", "2") \ | |
359 | : "=m" (_eflags), "=m" ((_dst).val), \ | |
360 | "=&r" (_tmp) \ | |
361 | : "i" (EFLAGS_MASK)); \ | |
362 | break; \ | |
363 | case 2: \ | |
364 | __asm__ __volatile__ ( \ | |
365 | _PRE_EFLAGS("0", "3", "2") \ | |
366 | _op"w %1; " \ | |
367 | _POST_EFLAGS("0", "3", "2") \ | |
368 | : "=m" (_eflags), "=m" ((_dst).val), \ | |
369 | "=&r" (_tmp) \ | |
370 | : "i" (EFLAGS_MASK)); \ | |
371 | break; \ | |
372 | case 4: \ | |
373 | __asm__ __volatile__ ( \ | |
374 | _PRE_EFLAGS("0", "3", "2") \ | |
375 | _op"l %1; " \ | |
376 | _POST_EFLAGS("0", "3", "2") \ | |
377 | : "=m" (_eflags), "=m" ((_dst).val), \ | |
378 | "=&r" (_tmp) \ | |
379 | : "i" (EFLAGS_MASK)); \ | |
380 | break; \ | |
381 | case 8: \ | |
382 | __emulate_1op_8byte(_op, _dst, _eflags); \ | |
383 | break; \ | |
384 | } \ | |
385 | } while (0) | |
386 | ||
387 | /* Emulate an instruction with quadword operands (x86/64 only). */ | |
388 | #if defined(CONFIG_X86_64) | |
389 | #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \ | |
390 | do { \ | |
391 | __asm__ __volatile__ ( \ | |
392 | _PRE_EFLAGS("0", "4", "2") \ | |
393 | _op"q %"_qx"3,%1; " \ | |
394 | _POST_EFLAGS("0", "4", "2") \ | |
395 | : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ | |
396 | : _qy ((_src).val), "i" (EFLAGS_MASK)); \ | |
397 | } while (0) | |
398 | ||
399 | #define __emulate_1op_8byte(_op, _dst, _eflags) \ | |
400 | do { \ | |
401 | __asm__ __volatile__ ( \ | |
402 | _PRE_EFLAGS("0", "3", "2") \ | |
403 | _op"q %1; " \ | |
404 | _POST_EFLAGS("0", "3", "2") \ | |
405 | : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \ | |
406 | : "i" (EFLAGS_MASK)); \ | |
407 | } while (0) | |
408 | ||
409 | #elif defined(__i386__) | |
410 | #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) | |
411 | #define __emulate_1op_8byte(_op, _dst, _eflags) | |
412 | #endif /* __i386__ */ | |
413 | ||
414 | /* Fetch next part of the instruction being emulated. */ | |
415 | #define insn_fetch(_type, _size, _eip) \ | |
416 | ({ unsigned long _x; \ | |
417 | rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \ | |
418 | if (rc != 0) \ | |
419 | goto done; \ | |
420 | (_eip) += (_size); \ | |
421 | (_type)_x; \ | |
422 | }) | |
423 | ||
424 | /* Access/update address held in a register, based on addressing mode. */ | |
425 | #define address_mask(reg) \ | |
426 | ((c->ad_bytes == sizeof(unsigned long)) ? \ | |
427 | (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1))) | |
428 | #define register_address(base, reg) \ | |
429 | ((base) + address_mask(reg)) | |
430 | #define register_address_increment(reg, inc) \ | |
431 | do { \ | |
432 | /* signed type ensures sign extension to long */ \ | |
433 | int _inc = (inc); \ | |
434 | if (c->ad_bytes == sizeof(unsigned long)) \ | |
435 | (reg) += _inc; \ | |
436 | else \ | |
437 | (reg) = ((reg) & \ | |
438 | ~((1UL << (c->ad_bytes << 3)) - 1)) | \ | |
439 | (((reg) + _inc) & \ | |
440 | ((1UL << (c->ad_bytes << 3)) - 1)); \ | |
441 | } while (0) | |
442 | ||
443 | #define JMP_REL(rel) \ | |
444 | do { \ | |
445 | register_address_increment(c->eip, rel); \ | |
446 | } while (0) | |
447 | ||
448 | static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt, | |
449 | struct x86_emulate_ops *ops, | |
450 | unsigned long linear, u8 *dest) | |
451 | { | |
452 | struct fetch_cache *fc = &ctxt->decode.fetch; | |
453 | int rc; | |
454 | int size; | |
455 | ||
456 | if (linear < fc->start || linear >= fc->end) { | |
457 | size = min(15UL, PAGE_SIZE - offset_in_page(linear)); | |
458 | rc = ops->read_std(linear, fc->data, size, ctxt->vcpu); | |
459 | if (rc) | |
460 | return rc; | |
461 | fc->start = linear; | |
462 | fc->end = linear + size; | |
463 | } | |
464 | *dest = fc->data[linear - fc->start]; | |
465 | return 0; | |
466 | } | |
467 | ||
468 | static int do_insn_fetch(struct x86_emulate_ctxt *ctxt, | |
469 | struct x86_emulate_ops *ops, | |
470 | unsigned long eip, void *dest, unsigned size) | |
471 | { | |
472 | int rc = 0; | |
473 | ||
474 | eip += ctxt->cs_base; | |
475 | while (size--) { | |
476 | rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++); | |
477 | if (rc) | |
478 | return rc; | |
479 | } | |
480 | return 0; | |
481 | } | |
482 | ||
483 | /* | |
484 | * Given the 'reg' portion of a ModRM byte, and a register block, return a | |
485 | * pointer into the block that addresses the relevant register. | |
486 | * @highbyte_regs specifies whether to decode AH,CH,DH,BH. | |
487 | */ | |
488 | static void *decode_register(u8 modrm_reg, unsigned long *regs, | |
489 | int highbyte_regs) | |
490 | { | |
491 | void *p; | |
492 | ||
493 | p = ®s[modrm_reg]; | |
494 | if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8) | |
495 | p = (unsigned char *)®s[modrm_reg & 3] + 1; | |
496 | return p; | |
497 | } | |
498 | ||
499 | static int read_descriptor(struct x86_emulate_ctxt *ctxt, | |
500 | struct x86_emulate_ops *ops, | |
501 | void *ptr, | |
502 | u16 *size, unsigned long *address, int op_bytes) | |
503 | { | |
504 | int rc; | |
505 | ||
506 | if (op_bytes == 2) | |
507 | op_bytes = 3; | |
508 | *address = 0; | |
509 | rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, | |
510 | ctxt->vcpu); | |
511 | if (rc) | |
512 | return rc; | |
513 | rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, | |
514 | ctxt->vcpu); | |
515 | return rc; | |
516 | } | |
517 | ||
518 | static int test_cc(unsigned int condition, unsigned int flags) | |
519 | { | |
520 | int rc = 0; | |
521 | ||
522 | switch ((condition & 15) >> 1) { | |
523 | case 0: /* o */ | |
524 | rc |= (flags & EFLG_OF); | |
525 | break; | |
526 | case 1: /* b/c/nae */ | |
527 | rc |= (flags & EFLG_CF); | |
528 | break; | |
529 | case 2: /* z/e */ | |
530 | rc |= (flags & EFLG_ZF); | |
531 | break; | |
532 | case 3: /* be/na */ | |
533 | rc |= (flags & (EFLG_CF|EFLG_ZF)); | |
534 | break; | |
535 | case 4: /* s */ | |
536 | rc |= (flags & EFLG_SF); | |
537 | break; | |
538 | case 5: /* p/pe */ | |
539 | rc |= (flags & EFLG_PF); | |
540 | break; | |
541 | case 7: /* le/ng */ | |
542 | rc |= (flags & EFLG_ZF); | |
543 | /* fall through */ | |
544 | case 6: /* l/nge */ | |
545 | rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF)); | |
546 | break; | |
547 | } | |
548 | ||
549 | /* Odd condition identifiers (lsb == 1) have inverted sense. */ | |
550 | return (!!rc ^ (condition & 1)); | |
551 | } | |
552 | ||
553 | static void decode_register_operand(struct operand *op, | |
554 | struct decode_cache *c, | |
555 | int inhibit_bytereg) | |
556 | { | |
557 | unsigned reg = c->modrm_reg; | |
558 | int highbyte_regs = c->rex_prefix == 0; | |
559 | ||
560 | if (!(c->d & ModRM)) | |
561 | reg = (c->b & 7) | ((c->rex_prefix & 1) << 3); | |
562 | op->type = OP_REG; | |
563 | if ((c->d & ByteOp) && !inhibit_bytereg) { | |
564 | op->ptr = decode_register(reg, c->regs, highbyte_regs); | |
565 | op->val = *(u8 *)op->ptr; | |
566 | op->bytes = 1; | |
567 | } else { | |
568 | op->ptr = decode_register(reg, c->regs, 0); | |
569 | op->bytes = c->op_bytes; | |
570 | switch (op->bytes) { | |
571 | case 2: | |
572 | op->val = *(u16 *)op->ptr; | |
573 | break; | |
574 | case 4: | |
575 | op->val = *(u32 *)op->ptr; | |
576 | break; | |
577 | case 8: | |
578 | op->val = *(u64 *) op->ptr; | |
579 | break; | |
580 | } | |
581 | } | |
582 | op->orig_val = op->val; | |
583 | } | |
584 | ||
585 | static int decode_modrm(struct x86_emulate_ctxt *ctxt, | |
586 | struct x86_emulate_ops *ops) | |
587 | { | |
588 | struct decode_cache *c = &ctxt->decode; | |
589 | u8 sib; | |
590 | int index_reg = 0, base_reg = 0, scale, rip_relative = 0; | |
591 | int rc = 0; | |
592 | ||
593 | if (c->rex_prefix) { | |
594 | c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */ | |
595 | index_reg = (c->rex_prefix & 2) << 2; /* REX.X */ | |
596 | c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */ | |
597 | } | |
598 | ||
599 | c->modrm = insn_fetch(u8, 1, c->eip); | |
600 | c->modrm_mod |= (c->modrm & 0xc0) >> 6; | |
601 | c->modrm_reg |= (c->modrm & 0x38) >> 3; | |
602 | c->modrm_rm |= (c->modrm & 0x07); | |
603 | c->modrm_ea = 0; | |
604 | c->use_modrm_ea = 1; | |
605 | ||
606 | if (c->modrm_mod == 3) { | |
607 | c->modrm_val = *(unsigned long *) | |
608 | decode_register(c->modrm_rm, c->regs, c->d & ByteOp); | |
609 | return rc; | |
610 | } | |
611 | ||
612 | if (c->ad_bytes == 2) { | |
613 | unsigned bx = c->regs[VCPU_REGS_RBX]; | |
614 | unsigned bp = c->regs[VCPU_REGS_RBP]; | |
615 | unsigned si = c->regs[VCPU_REGS_RSI]; | |
616 | unsigned di = c->regs[VCPU_REGS_RDI]; | |
617 | ||
618 | /* 16-bit ModR/M decode. */ | |
619 | switch (c->modrm_mod) { | |
620 | case 0: | |
621 | if (c->modrm_rm == 6) | |
622 | c->modrm_ea += insn_fetch(u16, 2, c->eip); | |
623 | break; | |
624 | case 1: | |
625 | c->modrm_ea += insn_fetch(s8, 1, c->eip); | |
626 | break; | |
627 | case 2: | |
628 | c->modrm_ea += insn_fetch(u16, 2, c->eip); | |
629 | break; | |
630 | } | |
631 | switch (c->modrm_rm) { | |
632 | case 0: | |
633 | c->modrm_ea += bx + si; | |
634 | break; | |
635 | case 1: | |
636 | c->modrm_ea += bx + di; | |
637 | break; | |
638 | case 2: | |
639 | c->modrm_ea += bp + si; | |
640 | break; | |
641 | case 3: | |
642 | c->modrm_ea += bp + di; | |
643 | break; | |
644 | case 4: | |
645 | c->modrm_ea += si; | |
646 | break; | |
647 | case 5: | |
648 | c->modrm_ea += di; | |
649 | break; | |
650 | case 6: | |
651 | if (c->modrm_mod != 0) | |
652 | c->modrm_ea += bp; | |
653 | break; | |
654 | case 7: | |
655 | c->modrm_ea += bx; | |
656 | break; | |
657 | } | |
658 | if (c->modrm_rm == 2 || c->modrm_rm == 3 || | |
659 | (c->modrm_rm == 6 && c->modrm_mod != 0)) | |
660 | if (!c->override_base) | |
661 | c->override_base = &ctxt->ss_base; | |
662 | c->modrm_ea = (u16)c->modrm_ea; | |
663 | } else { | |
664 | /* 32/64-bit ModR/M decode. */ | |
665 | switch (c->modrm_rm) { | |
666 | case 4: | |
667 | case 12: | |
668 | sib = insn_fetch(u8, 1, c->eip); | |
669 | index_reg |= (sib >> 3) & 7; | |
670 | base_reg |= sib & 7; | |
671 | scale = sib >> 6; | |
672 | ||
673 | switch (base_reg) { | |
674 | case 5: | |
675 | if (c->modrm_mod != 0) | |
676 | c->modrm_ea += c->regs[base_reg]; | |
677 | else | |
678 | c->modrm_ea += | |
679 | insn_fetch(s32, 4, c->eip); | |
680 | break; | |
681 | default: | |
682 | c->modrm_ea += c->regs[base_reg]; | |
683 | } | |
684 | switch (index_reg) { | |
685 | case 4: | |
686 | break; | |
687 | default: | |
688 | c->modrm_ea += c->regs[index_reg] << scale; | |
689 | } | |
690 | break; | |
691 | case 5: | |
692 | if (c->modrm_mod != 0) | |
693 | c->modrm_ea += c->regs[c->modrm_rm]; | |
694 | else if (ctxt->mode == X86EMUL_MODE_PROT64) | |
695 | rip_relative = 1; | |
696 | break; | |
697 | default: | |
698 | c->modrm_ea += c->regs[c->modrm_rm]; | |
699 | break; | |
700 | } | |
701 | switch (c->modrm_mod) { | |
702 | case 0: | |
703 | if (c->modrm_rm == 5) | |
704 | c->modrm_ea += insn_fetch(s32, 4, c->eip); | |
705 | break; | |
706 | case 1: | |
707 | c->modrm_ea += insn_fetch(s8, 1, c->eip); | |
708 | break; | |
709 | case 2: | |
710 | c->modrm_ea += insn_fetch(s32, 4, c->eip); | |
711 | break; | |
712 | } | |
713 | } | |
714 | if (rip_relative) { | |
715 | c->modrm_ea += c->eip; | |
716 | switch (c->d & SrcMask) { | |
717 | case SrcImmByte: | |
718 | c->modrm_ea += 1; | |
719 | break; | |
720 | case SrcImm: | |
721 | if (c->d & ByteOp) | |
722 | c->modrm_ea += 1; | |
723 | else | |
724 | if (c->op_bytes == 8) | |
725 | c->modrm_ea += 4; | |
726 | else | |
727 | c->modrm_ea += c->op_bytes; | |
728 | } | |
729 | } | |
730 | done: | |
731 | return rc; | |
732 | } | |
733 | ||
734 | static int decode_abs(struct x86_emulate_ctxt *ctxt, | |
735 | struct x86_emulate_ops *ops) | |
736 | { | |
737 | struct decode_cache *c = &ctxt->decode; | |
738 | int rc = 0; | |
739 | ||
740 | switch (c->ad_bytes) { | |
741 | case 2: | |
742 | c->modrm_ea = insn_fetch(u16, 2, c->eip); | |
743 | break; | |
744 | case 4: | |
745 | c->modrm_ea = insn_fetch(u32, 4, c->eip); | |
746 | break; | |
747 | case 8: | |
748 | c->modrm_ea = insn_fetch(u64, 8, c->eip); | |
749 | break; | |
750 | } | |
751 | done: | |
752 | return rc; | |
753 | } | |
754 | ||
755 | int | |
756 | x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops) | |
757 | { | |
758 | struct decode_cache *c = &ctxt->decode; | |
759 | int rc = 0; | |
760 | int mode = ctxt->mode; | |
761 | ||
762 | /* Shadow copy of register state. Committed on successful emulation. */ | |
763 | ||
764 | memset(c, 0, sizeof(struct decode_cache)); | |
765 | c->eip = ctxt->vcpu->rip; | |
766 | memcpy(c->regs, ctxt->vcpu->regs, sizeof c->regs); | |
767 | ||
768 | switch (mode) { | |
769 | case X86EMUL_MODE_REAL: | |
770 | case X86EMUL_MODE_PROT16: | |
771 | c->op_bytes = c->ad_bytes = 2; | |
772 | break; | |
773 | case X86EMUL_MODE_PROT32: | |
774 | c->op_bytes = c->ad_bytes = 4; | |
775 | break; | |
776 | #ifdef CONFIG_X86_64 | |
777 | case X86EMUL_MODE_PROT64: | |
778 | c->op_bytes = 4; | |
779 | c->ad_bytes = 8; | |
780 | break; | |
781 | #endif | |
782 | default: | |
783 | return -1; | |
784 | } | |
785 | ||
786 | /* Legacy prefixes. */ | |
787 | for (;;) { | |
788 | switch (c->b = insn_fetch(u8, 1, c->eip)) { | |
789 | case 0x66: /* operand-size override */ | |
790 | c->op_bytes ^= 6; /* switch between 2/4 bytes */ | |
791 | break; | |
792 | case 0x67: /* address-size override */ | |
793 | if (mode == X86EMUL_MODE_PROT64) | |
794 | /* switch between 4/8 bytes */ | |
795 | c->ad_bytes ^= 12; | |
796 | else | |
797 | /* switch between 2/4 bytes */ | |
798 | c->ad_bytes ^= 6; | |
799 | break; | |
800 | case 0x2e: /* CS override */ | |
801 | c->override_base = &ctxt->cs_base; | |
802 | break; | |
803 | case 0x3e: /* DS override */ | |
804 | c->override_base = &ctxt->ds_base; | |
805 | break; | |
806 | case 0x26: /* ES override */ | |
807 | c->override_base = &ctxt->es_base; | |
808 | break; | |
809 | case 0x64: /* FS override */ | |
810 | c->override_base = &ctxt->fs_base; | |
811 | break; | |
812 | case 0x65: /* GS override */ | |
813 | c->override_base = &ctxt->gs_base; | |
814 | break; | |
815 | case 0x36: /* SS override */ | |
816 | c->override_base = &ctxt->ss_base; | |
817 | break; | |
818 | case 0x40 ... 0x4f: /* REX */ | |
819 | if (mode != X86EMUL_MODE_PROT64) | |
820 | goto done_prefixes; | |
821 | c->rex_prefix = c->b; | |
822 | continue; | |
823 | case 0xf0: /* LOCK */ | |
824 | c->lock_prefix = 1; | |
825 | break; | |
826 | case 0xf2: /* REPNE/REPNZ */ | |
827 | case 0xf3: /* REP/REPE/REPZ */ | |
828 | c->rep_prefix = 1; | |
829 | break; | |
830 | default: | |
831 | goto done_prefixes; | |
832 | } | |
833 | ||
834 | /* Any legacy prefix after a REX prefix nullifies its effect. */ | |
835 | ||
836 | c->rex_prefix = 0; | |
837 | } | |
838 | ||
839 | done_prefixes: | |
840 | ||
841 | /* REX prefix. */ | |
842 | if (c->rex_prefix) | |
843 | if (c->rex_prefix & 8) | |
844 | c->op_bytes = 8; /* REX.W */ | |
845 | ||
846 | /* Opcode byte(s). */ | |
847 | c->d = opcode_table[c->b]; | |
848 | if (c->d == 0) { | |
849 | /* Two-byte opcode? */ | |
850 | if (c->b == 0x0f) { | |
851 | c->twobyte = 1; | |
852 | c->b = insn_fetch(u8, 1, c->eip); | |
853 | c->d = twobyte_table[c->b]; | |
854 | } | |
855 | ||
856 | /* Unrecognised? */ | |
857 | if (c->d == 0) { | |
858 | DPRINTF("Cannot emulate %02x\n", c->b); | |
859 | return -1; | |
860 | } | |
861 | } | |
862 | ||
863 | /* ModRM and SIB bytes. */ | |
864 | if (c->d & ModRM) | |
865 | rc = decode_modrm(ctxt, ops); | |
866 | else if (c->d & MemAbs) | |
867 | rc = decode_abs(ctxt, ops); | |
868 | if (rc) | |
869 | goto done; | |
870 | ||
871 | if (!c->override_base) | |
872 | c->override_base = &ctxt->ds_base; | |
873 | if (mode == X86EMUL_MODE_PROT64 && | |
874 | c->override_base != &ctxt->fs_base && | |
875 | c->override_base != &ctxt->gs_base) | |
876 | c->override_base = NULL; | |
877 | ||
878 | if (c->override_base) | |
879 | c->modrm_ea += *c->override_base; | |
880 | ||
881 | if (c->ad_bytes != 8) | |
882 | c->modrm_ea = (u32)c->modrm_ea; | |
883 | /* | |
884 | * Decode and fetch the source operand: register, memory | |
885 | * or immediate. | |
886 | */ | |
887 | switch (c->d & SrcMask) { | |
888 | case SrcNone: | |
889 | break; | |
890 | case SrcReg: | |
891 | decode_register_operand(&c->src, c, 0); | |
892 | break; | |
893 | case SrcMem16: | |
894 | c->src.bytes = 2; | |
895 | goto srcmem_common; | |
896 | case SrcMem32: | |
897 | c->src.bytes = 4; | |
898 | goto srcmem_common; | |
899 | case SrcMem: | |
900 | c->src.bytes = (c->d & ByteOp) ? 1 : | |
901 | c->op_bytes; | |
902 | /* Don't fetch the address for invlpg: it could be unmapped. */ | |
903 | if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7) | |
904 | break; | |
905 | srcmem_common: | |
906 | /* | |
907 | * For instructions with a ModR/M byte, switch to register | |
908 | * access if Mod = 3. | |
909 | */ | |
910 | if ((c->d & ModRM) && c->modrm_mod == 3) { | |
911 | c->src.type = OP_REG; | |
912 | break; | |
913 | } | |
914 | c->src.type = OP_MEM; | |
915 | break; | |
916 | case SrcImm: | |
917 | c->src.type = OP_IMM; | |
918 | c->src.ptr = (unsigned long *)c->eip; | |
919 | c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes; | |
920 | if (c->src.bytes == 8) | |
921 | c->src.bytes = 4; | |
922 | /* NB. Immediates are sign-extended as necessary. */ | |
923 | switch (c->src.bytes) { | |
924 | case 1: | |
925 | c->src.val = insn_fetch(s8, 1, c->eip); | |
926 | break; | |
927 | case 2: | |
928 | c->src.val = insn_fetch(s16, 2, c->eip); | |
929 | break; | |
930 | case 4: | |
931 | c->src.val = insn_fetch(s32, 4, c->eip); | |
932 | break; | |
933 | } | |
934 | break; | |
935 | case SrcImmByte: | |
936 | c->src.type = OP_IMM; | |
937 | c->src.ptr = (unsigned long *)c->eip; | |
938 | c->src.bytes = 1; | |
939 | c->src.val = insn_fetch(s8, 1, c->eip); | |
940 | break; | |
941 | } | |
942 | ||
943 | /* Decode and fetch the destination operand: register or memory. */ | |
944 | switch (c->d & DstMask) { | |
945 | case ImplicitOps: | |
946 | /* Special instructions do their own operand decoding. */ | |
947 | return 0; | |
948 | case DstReg: | |
949 | decode_register_operand(&c->dst, c, | |
950 | c->twobyte && (c->b == 0xb6 || c->b == 0xb7)); | |
951 | break; | |
952 | case DstMem: | |
953 | if ((c->d & ModRM) && c->modrm_mod == 3) { | |
954 | c->dst.type = OP_REG; | |
955 | break; | |
956 | } | |
957 | c->dst.type = OP_MEM; | |
958 | break; | |
959 | } | |
960 | ||
961 | done: | |
962 | return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0; | |
963 | } | |
964 | ||
965 | static inline void emulate_push(struct x86_emulate_ctxt *ctxt) | |
966 | { | |
967 | struct decode_cache *c = &ctxt->decode; | |
968 | ||
969 | c->dst.type = OP_MEM; | |
970 | c->dst.bytes = c->op_bytes; | |
971 | c->dst.val = c->src.val; | |
972 | register_address_increment(c->regs[VCPU_REGS_RSP], -c->op_bytes); | |
973 | c->dst.ptr = (void *) register_address(ctxt->ss_base, | |
974 | c->regs[VCPU_REGS_RSP]); | |
975 | } | |
976 | ||
977 | static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt, | |
978 | struct x86_emulate_ops *ops) | |
979 | { | |
980 | struct decode_cache *c = &ctxt->decode; | |
981 | int rc; | |
982 | ||
983 | /* 64-bit mode: POP always pops a 64-bit operand. */ | |
984 | ||
985 | if (ctxt->mode == X86EMUL_MODE_PROT64) | |
986 | c->dst.bytes = 8; | |
987 | ||
988 | rc = ops->read_std(register_address(ctxt->ss_base, | |
989 | c->regs[VCPU_REGS_RSP]), | |
990 | &c->dst.val, c->dst.bytes, ctxt->vcpu); | |
991 | if (rc != 0) | |
992 | return rc; | |
993 | ||
994 | register_address_increment(c->regs[VCPU_REGS_RSP], c->dst.bytes); | |
995 | ||
996 | return 0; | |
997 | } | |
998 | ||
999 | static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt) | |
1000 | { | |
1001 | struct decode_cache *c = &ctxt->decode; | |
1002 | switch (c->modrm_reg) { | |
1003 | case 0: /* rol */ | |
1004 | emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags); | |
1005 | break; | |
1006 | case 1: /* ror */ | |
1007 | emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags); | |
1008 | break; | |
1009 | case 2: /* rcl */ | |
1010 | emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags); | |
1011 | break; | |
1012 | case 3: /* rcr */ | |
1013 | emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags); | |
1014 | break; | |
1015 | case 4: /* sal/shl */ | |
1016 | case 6: /* sal/shl */ | |
1017 | emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags); | |
1018 | break; | |
1019 | case 5: /* shr */ | |
1020 | emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags); | |
1021 | break; | |
1022 | case 7: /* sar */ | |
1023 | emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags); | |
1024 | break; | |
1025 | } | |
1026 | } | |
1027 | ||
1028 | static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt, | |
1029 | struct x86_emulate_ops *ops) | |
1030 | { | |
1031 | struct decode_cache *c = &ctxt->decode; | |
1032 | int rc = 0; | |
1033 | ||
1034 | switch (c->modrm_reg) { | |
1035 | case 0 ... 1: /* test */ | |
1036 | /* | |
1037 | * Special case in Grp3: test has an immediate | |
1038 | * source operand. | |
1039 | */ | |
1040 | c->src.type = OP_IMM; | |
1041 | c->src.ptr = (unsigned long *)c->eip; | |
1042 | c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes; | |
1043 | if (c->src.bytes == 8) | |
1044 | c->src.bytes = 4; | |
1045 | switch (c->src.bytes) { | |
1046 | case 1: | |
1047 | c->src.val = insn_fetch(s8, 1, c->eip); | |
1048 | break; | |
1049 | case 2: | |
1050 | c->src.val = insn_fetch(s16, 2, c->eip); | |
1051 | break; | |
1052 | case 4: | |
1053 | c->src.val = insn_fetch(s32, 4, c->eip); | |
1054 | break; | |
1055 | } | |
1056 | emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags); | |
1057 | break; | |
1058 | case 2: /* not */ | |
1059 | c->dst.val = ~c->dst.val; | |
1060 | break; | |
1061 | case 3: /* neg */ | |
1062 | emulate_1op("neg", c->dst, ctxt->eflags); | |
1063 | break; | |
1064 | default: | |
1065 | DPRINTF("Cannot emulate %02x\n", c->b); | |
1066 | rc = X86EMUL_UNHANDLEABLE; | |
1067 | break; | |
1068 | } | |
1069 | done: | |
1070 | return rc; | |
1071 | } | |
1072 | ||
1073 | static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt, | |
1074 | struct x86_emulate_ops *ops) | |
1075 | { | |
1076 | struct decode_cache *c = &ctxt->decode; | |
1077 | int rc; | |
1078 | ||
1079 | switch (c->modrm_reg) { | |
1080 | case 0: /* inc */ | |
1081 | emulate_1op("inc", c->dst, ctxt->eflags); | |
1082 | break; | |
1083 | case 1: /* dec */ | |
1084 | emulate_1op("dec", c->dst, ctxt->eflags); | |
1085 | break; | |
1086 | case 4: /* jmp abs */ | |
1087 | if (c->b == 0xff) | |
1088 | c->eip = c->dst.val; | |
1089 | else { | |
1090 | DPRINTF("Cannot emulate %02x\n", c->b); | |
1091 | return X86EMUL_UNHANDLEABLE; | |
1092 | } | |
1093 | break; | |
1094 | case 6: /* push */ | |
1095 | ||
1096 | /* 64-bit mode: PUSH always pushes a 64-bit operand. */ | |
1097 | ||
1098 | if (ctxt->mode == X86EMUL_MODE_PROT64) { | |
1099 | c->dst.bytes = 8; | |
1100 | rc = ops->read_std((unsigned long)c->dst.ptr, | |
1101 | &c->dst.val, 8, ctxt->vcpu); | |
1102 | if (rc != 0) | |
1103 | return rc; | |
1104 | } | |
1105 | register_address_increment(c->regs[VCPU_REGS_RSP], | |
1106 | -c->dst.bytes); | |
1107 | rc = ops->write_emulated(register_address(ctxt->ss_base, | |
1108 | c->regs[VCPU_REGS_RSP]), &c->dst.val, | |
1109 | c->dst.bytes, ctxt->vcpu); | |
1110 | if (rc != 0) | |
1111 | return rc; | |
1112 | c->dst.type = OP_NONE; | |
1113 | break; | |
1114 | default: | |
1115 | DPRINTF("Cannot emulate %02x\n", c->b); | |
1116 | return X86EMUL_UNHANDLEABLE; | |
1117 | } | |
1118 | return 0; | |
1119 | } | |
1120 | ||
1121 | static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt, | |
1122 | struct x86_emulate_ops *ops, | |
1123 | unsigned long cr2) | |
1124 | { | |
1125 | struct decode_cache *c = &ctxt->decode; | |
1126 | u64 old, new; | |
1127 | int rc; | |
1128 | ||
1129 | rc = ops->read_emulated(cr2, &old, 8, ctxt->vcpu); | |
1130 | if (rc != 0) | |
1131 | return rc; | |
1132 | ||
1133 | if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) || | |
1134 | ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) { | |
1135 | ||
1136 | c->regs[VCPU_REGS_RAX] = (u32) (old >> 0); | |
1137 | c->regs[VCPU_REGS_RDX] = (u32) (old >> 32); | |
1138 | ctxt->eflags &= ~EFLG_ZF; | |
1139 | ||
1140 | } else { | |
1141 | new = ((u64)c->regs[VCPU_REGS_RCX] << 32) | | |
1142 | (u32) c->regs[VCPU_REGS_RBX]; | |
1143 | ||
1144 | rc = ops->cmpxchg_emulated(cr2, &old, &new, 8, ctxt->vcpu); | |
1145 | if (rc != 0) | |
1146 | return rc; | |
1147 | ctxt->eflags |= EFLG_ZF; | |
1148 | } | |
1149 | return 0; | |
1150 | } | |
1151 | ||
1152 | static inline int writeback(struct x86_emulate_ctxt *ctxt, | |
1153 | struct x86_emulate_ops *ops) | |
1154 | { | |
1155 | int rc; | |
1156 | struct decode_cache *c = &ctxt->decode; | |
1157 | ||
1158 | switch (c->dst.type) { | |
1159 | case OP_REG: | |
1160 | /* The 4-byte case *is* correct: | |
1161 | * in 64-bit mode we zero-extend. | |
1162 | */ | |
1163 | switch (c->dst.bytes) { | |
1164 | case 1: | |
1165 | *(u8 *)c->dst.ptr = (u8)c->dst.val; | |
1166 | break; | |
1167 | case 2: | |
1168 | *(u16 *)c->dst.ptr = (u16)c->dst.val; | |
1169 | break; | |
1170 | case 4: | |
1171 | *c->dst.ptr = (u32)c->dst.val; | |
1172 | break; /* 64b: zero-ext */ | |
1173 | case 8: | |
1174 | *c->dst.ptr = c->dst.val; | |
1175 | break; | |
1176 | } | |
1177 | break; | |
1178 | case OP_MEM: | |
1179 | if (c->lock_prefix) | |
1180 | rc = ops->cmpxchg_emulated( | |
1181 | (unsigned long)c->dst.ptr, | |
1182 | &c->dst.orig_val, | |
1183 | &c->dst.val, | |
1184 | c->dst.bytes, | |
1185 | ctxt->vcpu); | |
1186 | else | |
1187 | rc = ops->write_emulated( | |
1188 | (unsigned long)c->dst.ptr, | |
1189 | &c->dst.val, | |
1190 | c->dst.bytes, | |
1191 | ctxt->vcpu); | |
1192 | if (rc != 0) | |
1193 | return rc; | |
1194 | break; | |
1195 | case OP_NONE: | |
1196 | /* no writeback */ | |
1197 | break; | |
1198 | default: | |
1199 | break; | |
1200 | } | |
1201 | return 0; | |
1202 | } | |
1203 | ||
1204 | int | |
1205 | x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops) | |
1206 | { | |
1207 | unsigned long cr2 = ctxt->cr2; | |
1208 | u64 msr_data; | |
1209 | unsigned long saved_eip = 0; | |
1210 | struct decode_cache *c = &ctxt->decode; | |
1211 | int rc = 0; | |
1212 | ||
1213 | /* Shadow copy of register state. Committed on successful emulation. | |
1214 | * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't | |
1215 | * modify them. | |
1216 | */ | |
1217 | ||
1218 | memcpy(c->regs, ctxt->vcpu->regs, sizeof c->regs); | |
1219 | saved_eip = c->eip; | |
1220 | ||
1221 | if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs)) | |
1222 | cr2 = c->modrm_ea; | |
1223 | ||
1224 | if (c->src.type == OP_MEM) { | |
1225 | c->src.ptr = (unsigned long *)cr2; | |
1226 | c->src.val = 0; | |
1227 | rc = ops->read_emulated((unsigned long)c->src.ptr, | |
1228 | &c->src.val, | |
1229 | c->src.bytes, | |
1230 | ctxt->vcpu); | |
1231 | if (rc != 0) | |
1232 | goto done; | |
1233 | c->src.orig_val = c->src.val; | |
1234 | } | |
1235 | ||
1236 | if ((c->d & DstMask) == ImplicitOps) | |
1237 | goto special_insn; | |
1238 | ||
1239 | ||
1240 | if (c->dst.type == OP_MEM) { | |
1241 | c->dst.ptr = (unsigned long *)cr2; | |
1242 | c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes; | |
1243 | c->dst.val = 0; | |
1244 | if (c->d & BitOp) { | |
1245 | unsigned long mask = ~(c->dst.bytes * 8 - 1); | |
1246 | ||
1247 | c->dst.ptr = (void *)c->dst.ptr + | |
1248 | (c->src.val & mask) / 8; | |
1249 | } | |
1250 | if (!(c->d & Mov) && | |
1251 | /* optimisation - avoid slow emulated read */ | |
1252 | ((rc = ops->read_emulated((unsigned long)c->dst.ptr, | |
1253 | &c->dst.val, | |
1254 | c->dst.bytes, ctxt->vcpu)) != 0)) | |
1255 | goto done; | |
1256 | } | |
1257 | c->dst.orig_val = c->dst.val; | |
1258 | ||
1259 | if (c->twobyte) | |
1260 | goto twobyte_insn; | |
1261 | ||
1262 | switch (c->b) { | |
1263 | case 0x00 ... 0x05: | |
1264 | add: /* add */ | |
1265 | emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags); | |
1266 | break; | |
1267 | case 0x08 ... 0x0d: | |
1268 | or: /* or */ | |
1269 | emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags); | |
1270 | break; | |
1271 | case 0x10 ... 0x15: | |
1272 | adc: /* adc */ | |
1273 | emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags); | |
1274 | break; | |
1275 | case 0x18 ... 0x1d: | |
1276 | sbb: /* sbb */ | |
1277 | emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags); | |
1278 | break; | |
1279 | case 0x20 ... 0x23: | |
1280 | and: /* and */ | |
1281 | emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags); | |
1282 | break; | |
1283 | case 0x24: /* and al imm8 */ | |
1284 | c->dst.type = OP_REG; | |
1285 | c->dst.ptr = &c->regs[VCPU_REGS_RAX]; | |
1286 | c->dst.val = *(u8 *)c->dst.ptr; | |
1287 | c->dst.bytes = 1; | |
1288 | c->dst.orig_val = c->dst.val; | |
1289 | goto and; | |
1290 | case 0x25: /* and ax imm16, or eax imm32 */ | |
1291 | c->dst.type = OP_REG; | |
1292 | c->dst.bytes = c->op_bytes; | |
1293 | c->dst.ptr = &c->regs[VCPU_REGS_RAX]; | |
1294 | if (c->op_bytes == 2) | |
1295 | c->dst.val = *(u16 *)c->dst.ptr; | |
1296 | else | |
1297 | c->dst.val = *(u32 *)c->dst.ptr; | |
1298 | c->dst.orig_val = c->dst.val; | |
1299 | goto and; | |
1300 | case 0x28 ... 0x2d: | |
1301 | sub: /* sub */ | |
1302 | emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags); | |
1303 | break; | |
1304 | case 0x30 ... 0x35: | |
1305 | xor: /* xor */ | |
1306 | emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags); | |
1307 | break; | |
1308 | case 0x38 ... 0x3d: | |
1309 | cmp: /* cmp */ | |
1310 | emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags); | |
1311 | break; | |
1312 | case 0x40 ... 0x47: /* inc r16/r32 */ | |
1313 | emulate_1op("inc", c->dst, ctxt->eflags); | |
1314 | break; | |
1315 | case 0x48 ... 0x4f: /* dec r16/r32 */ | |
1316 | emulate_1op("dec", c->dst, ctxt->eflags); | |
1317 | break; | |
1318 | case 0x50 ... 0x57: /* push reg */ | |
1319 | c->dst.type = OP_MEM; | |
1320 | c->dst.bytes = c->op_bytes; | |
1321 | c->dst.val = c->src.val; | |
1322 | register_address_increment(c->regs[VCPU_REGS_RSP], | |
1323 | -c->op_bytes); | |
1324 | c->dst.ptr = (void *) register_address( | |
1325 | ctxt->ss_base, c->regs[VCPU_REGS_RSP]); | |
1326 | break; | |
1327 | case 0x58 ... 0x5f: /* pop reg */ | |
1328 | pop_instruction: | |
1329 | if ((rc = ops->read_std(register_address(ctxt->ss_base, | |
1330 | c->regs[VCPU_REGS_RSP]), c->dst.ptr, | |
1331 | c->op_bytes, ctxt->vcpu)) != 0) | |
1332 | goto done; | |
1333 | ||
1334 | register_address_increment(c->regs[VCPU_REGS_RSP], | |
1335 | c->op_bytes); | |
1336 | c->dst.type = OP_NONE; /* Disable writeback. */ | |
1337 | break; | |
1338 | case 0x63: /* movsxd */ | |
1339 | if (ctxt->mode != X86EMUL_MODE_PROT64) | |
1340 | goto cannot_emulate; | |
1341 | c->dst.val = (s32) c->src.val; | |
1342 | break; | |
1343 | case 0x80 ... 0x83: /* Grp1 */ | |
1344 | switch (c->modrm_reg) { | |
1345 | case 0: | |
1346 | goto add; | |
1347 | case 1: | |
1348 | goto or; | |
1349 | case 2: | |
1350 | goto adc; | |
1351 | case 3: | |
1352 | goto sbb; | |
1353 | case 4: | |
1354 | goto and; | |
1355 | case 5: | |
1356 | goto sub; | |
1357 | case 6: | |
1358 | goto xor; | |
1359 | case 7: | |
1360 | goto cmp; | |
1361 | } | |
1362 | break; | |
1363 | case 0x84 ... 0x85: | |
1364 | emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags); | |
1365 | break; | |
1366 | case 0x86 ... 0x87: /* xchg */ | |
1367 | /* Write back the register source. */ | |
1368 | switch (c->dst.bytes) { | |
1369 | case 1: | |
1370 | *(u8 *) c->src.ptr = (u8) c->dst.val; | |
1371 | break; | |
1372 | case 2: | |
1373 | *(u16 *) c->src.ptr = (u16) c->dst.val; | |
1374 | break; | |
1375 | case 4: | |
1376 | *c->src.ptr = (u32) c->dst.val; | |
1377 | break; /* 64b reg: zero-extend */ | |
1378 | case 8: | |
1379 | *c->src.ptr = c->dst.val; | |
1380 | break; | |
1381 | } | |
1382 | /* | |
1383 | * Write back the memory destination with implicit LOCK | |
1384 | * prefix. | |
1385 | */ | |
1386 | c->dst.val = c->src.val; | |
1387 | c->lock_prefix = 1; | |
1388 | break; | |
1389 | case 0x88 ... 0x8b: /* mov */ | |
1390 | goto mov; | |
1391 | case 0x8d: /* lea r16/r32, m */ | |
1392 | c->dst.val = c->modrm_val; | |
1393 | break; | |
1394 | case 0x8f: /* pop (sole member of Grp1a) */ | |
1395 | rc = emulate_grp1a(ctxt, ops); | |
1396 | if (rc != 0) | |
1397 | goto done; | |
1398 | break; | |
1399 | case 0xa0 ... 0xa1: /* mov */ | |
1400 | c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX]; | |
1401 | c->dst.val = c->src.val; | |
1402 | break; | |
1403 | case 0xa2 ... 0xa3: /* mov */ | |
1404 | c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX]; | |
1405 | break; | |
1406 | case 0xc0 ... 0xc1: | |
1407 | emulate_grp2(ctxt); | |
1408 | break; | |
1409 | case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */ | |
1410 | mov: | |
1411 | c->dst.val = c->src.val; | |
1412 | break; | |
1413 | case 0xd0 ... 0xd1: /* Grp2 */ | |
1414 | c->src.val = 1; | |
1415 | emulate_grp2(ctxt); | |
1416 | break; | |
1417 | case 0xd2 ... 0xd3: /* Grp2 */ | |
1418 | c->src.val = c->regs[VCPU_REGS_RCX]; | |
1419 | emulate_grp2(ctxt); | |
1420 | break; | |
1421 | case 0xf6 ... 0xf7: /* Grp3 */ | |
1422 | rc = emulate_grp3(ctxt, ops); | |
1423 | if (rc != 0) | |
1424 | goto done; | |
1425 | break; | |
1426 | case 0xfe ... 0xff: /* Grp4/Grp5 */ | |
1427 | rc = emulate_grp45(ctxt, ops); | |
1428 | if (rc != 0) | |
1429 | goto done; | |
1430 | break; | |
1431 | } | |
1432 | ||
1433 | writeback: | |
1434 | rc = writeback(ctxt, ops); | |
1435 | if (rc != 0) | |
1436 | goto done; | |
1437 | ||
1438 | /* Commit shadow register state. */ | |
1439 | memcpy(ctxt->vcpu->regs, c->regs, sizeof c->regs); | |
1440 | ctxt->vcpu->rip = c->eip; | |
1441 | ||
1442 | done: | |
1443 | if (rc == X86EMUL_UNHANDLEABLE) { | |
1444 | c->eip = saved_eip; | |
1445 | return -1; | |
1446 | } | |
1447 | return 0; | |
1448 | ||
1449 | special_insn: | |
1450 | if (c->twobyte) | |
1451 | goto twobyte_special_insn; | |
1452 | switch (c->b) { | |
1453 | case 0x6a: /* push imm8 */ | |
1454 | c->src.val = 0L; | |
1455 | c->src.val = insn_fetch(s8, 1, c->eip); | |
1456 | emulate_push(ctxt); | |
1457 | break; | |
1458 | case 0x6c: /* insb */ | |
1459 | case 0x6d: /* insw/insd */ | |
1460 | if (kvm_emulate_pio_string(ctxt->vcpu, NULL, | |
1461 | 1, | |
1462 | (c->d & ByteOp) ? 1 : c->op_bytes, | |
1463 | c->rep_prefix ? | |
1464 | address_mask(c->regs[VCPU_REGS_RCX]) : 1, | |
1465 | (ctxt->eflags & EFLG_DF), | |
1466 | register_address(ctxt->es_base, | |
1467 | c->regs[VCPU_REGS_RDI]), | |
1468 | c->rep_prefix, | |
1469 | c->regs[VCPU_REGS_RDX]) == 0) { | |
1470 | c->eip = saved_eip; | |
1471 | return -1; | |
1472 | } | |
1473 | return 0; | |
1474 | case 0x6e: /* outsb */ | |
1475 | case 0x6f: /* outsw/outsd */ | |
1476 | if (kvm_emulate_pio_string(ctxt->vcpu, NULL, | |
1477 | 0, | |
1478 | (c->d & ByteOp) ? 1 : c->op_bytes, | |
1479 | c->rep_prefix ? | |
1480 | address_mask(c->regs[VCPU_REGS_RCX]) : 1, | |
1481 | (ctxt->eflags & EFLG_DF), | |
1482 | register_address(c->override_base ? | |
1483 | *c->override_base : | |
1484 | ctxt->ds_base, | |
1485 | c->regs[VCPU_REGS_RSI]), | |
1486 | c->rep_prefix, | |
1487 | c->regs[VCPU_REGS_RDX]) == 0) { | |
1488 | c->eip = saved_eip; | |
1489 | return -1; | |
1490 | } | |
1491 | return 0; | |
1492 | case 0x70 ... 0x7f: /* jcc (short) */ { | |
1493 | int rel = insn_fetch(s8, 1, c->eip); | |
1494 | ||
1495 | if (test_cc(c->b, ctxt->eflags)) | |
1496 | JMP_REL(rel); | |
1497 | break; | |
1498 | } | |
1499 | case 0x9c: /* pushf */ | |
1500 | c->src.val = (unsigned long) ctxt->eflags; | |
1501 | emulate_push(ctxt); | |
1502 | break; | |
1503 | case 0x9d: /* popf */ | |
1504 | c->dst.ptr = (unsigned long *) &ctxt->eflags; | |
1505 | goto pop_instruction; | |
1506 | case 0xc3: /* ret */ | |
1507 | c->dst.ptr = &c->eip; | |
1508 | goto pop_instruction; | |
1509 | case 0xf4: /* hlt */ | |
1510 | ctxt->vcpu->halt_request = 1; | |
1511 | goto done; | |
1512 | case 0xf5: /* cmc */ | |
1513 | /* complement carry flag from eflags reg */ | |
1514 | ctxt->eflags ^= EFLG_CF; | |
1515 | c->dst.type = OP_NONE; /* Disable writeback. */ | |
1516 | break; | |
1517 | case 0xf8: /* clc */ | |
1518 | ctxt->eflags &= ~EFLG_CF; | |
1519 | c->dst.type = OP_NONE; /* Disable writeback. */ | |
1520 | break; | |
1521 | case 0xfa: /* cli */ | |
1522 | ctxt->eflags &= ~X86_EFLAGS_IF; | |
1523 | c->dst.type = OP_NONE; /* Disable writeback. */ | |
1524 | break; | |
1525 | case 0xfb: /* sti */ | |
1526 | ctxt->eflags |= X86_EFLAGS_IF; | |
1527 | c->dst.type = OP_NONE; /* Disable writeback. */ | |
1528 | break; | |
1529 | } | |
1530 | if (c->rep_prefix) { | |
1531 | if (c->regs[VCPU_REGS_RCX] == 0) { | |
1532 | ctxt->vcpu->rip = c->eip; | |
1533 | goto done; | |
1534 | } | |
1535 | c->regs[VCPU_REGS_RCX]--; | |
1536 | c->eip = ctxt->vcpu->rip; | |
1537 | } | |
1538 | switch (c->b) { | |
1539 | case 0xa4 ... 0xa5: /* movs */ | |
1540 | c->dst.type = OP_MEM; | |
1541 | c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes; | |
1542 | c->dst.ptr = (unsigned long *)register_address( | |
1543 | ctxt->es_base, | |
1544 | c->regs[VCPU_REGS_RDI]); | |
1545 | if ((rc = ops->read_emulated(register_address( | |
1546 | c->override_base ? *c->override_base : | |
1547 | ctxt->ds_base, | |
1548 | c->regs[VCPU_REGS_RSI]), | |
1549 | &c->dst.val, | |
1550 | c->dst.bytes, ctxt->vcpu)) != 0) | |
1551 | goto done; | |
1552 | register_address_increment(c->regs[VCPU_REGS_RSI], | |
1553 | (ctxt->eflags & EFLG_DF) ? -c->dst.bytes | |
1554 | : c->dst.bytes); | |
1555 | register_address_increment(c->regs[VCPU_REGS_RDI], | |
1556 | (ctxt->eflags & EFLG_DF) ? -c->dst.bytes | |
1557 | : c->dst.bytes); | |
1558 | break; | |
1559 | case 0xa6 ... 0xa7: /* cmps */ | |
1560 | DPRINTF("Urk! I don't handle CMPS.\n"); | |
1561 | goto cannot_emulate; | |
1562 | case 0xaa ... 0xab: /* stos */ | |
1563 | c->dst.type = OP_MEM; | |
1564 | c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes; | |
1565 | c->dst.ptr = (unsigned long *)register_address( | |
1566 | ctxt->es_base, | |
1567 | c->regs[VCPU_REGS_RDI]); | |
1568 | c->dst.val = c->regs[VCPU_REGS_RAX]; | |
1569 | register_address_increment(c->regs[VCPU_REGS_RDI], | |
1570 | (ctxt->eflags & EFLG_DF) ? -c->dst.bytes | |
1571 | : c->dst.bytes); | |
1572 | break; | |
1573 | case 0xac ... 0xad: /* lods */ | |
1574 | c->dst.type = OP_REG; | |
1575 | c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes; | |
1576 | c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX]; | |
1577 | if ((rc = ops->read_emulated(register_address( | |
1578 | c->override_base ? *c->override_base : | |
1579 | ctxt->ds_base, | |
1580 | c->regs[VCPU_REGS_RSI]), | |
1581 | &c->dst.val, | |
1582 | c->dst.bytes, | |
1583 | ctxt->vcpu)) != 0) | |
1584 | goto done; | |
1585 | register_address_increment(c->regs[VCPU_REGS_RSI], | |
1586 | (ctxt->eflags & EFLG_DF) ? -c->dst.bytes | |
1587 | : c->dst.bytes); | |
1588 | break; | |
1589 | case 0xae ... 0xaf: /* scas */ | |
1590 | DPRINTF("Urk! I don't handle SCAS.\n"); | |
1591 | goto cannot_emulate; | |
1592 | case 0xe8: /* call (near) */ { | |
1593 | long int rel; | |
1594 | switch (c->op_bytes) { | |
1595 | case 2: | |
1596 | rel = insn_fetch(s16, 2, c->eip); | |
1597 | break; | |
1598 | case 4: | |
1599 | rel = insn_fetch(s32, 4, c->eip); | |
1600 | break; | |
1601 | default: | |
1602 | DPRINTF("Call: Invalid op_bytes\n"); | |
1603 | goto cannot_emulate; | |
1604 | } | |
1605 | c->src.val = (unsigned long) c->eip; | |
1606 | JMP_REL(rel); | |
1607 | c->op_bytes = c->ad_bytes; | |
1608 | emulate_push(ctxt); | |
1609 | break; | |
1610 | } | |
1611 | case 0xe9: /* jmp rel */ | |
1612 | case 0xeb: /* jmp rel short */ | |
1613 | JMP_REL(c->src.val); | |
1614 | c->dst.type = OP_NONE; /* Disable writeback. */ | |
1615 | break; | |
1616 | ||
1617 | ||
1618 | } | |
1619 | goto writeback; | |
1620 | ||
1621 | twobyte_insn: | |
1622 | switch (c->b) { | |
1623 | case 0x01: /* lgdt, lidt, lmsw */ | |
1624 | switch (c->modrm_reg) { | |
1625 | u16 size; | |
1626 | unsigned long address; | |
1627 | ||
1628 | case 0: /* vmcall */ | |
1629 | if (c->modrm_mod != 3 || c->modrm_rm != 1) | |
1630 | goto cannot_emulate; | |
1631 | ||
1632 | rc = kvm_fix_hypercall(ctxt->vcpu); | |
1633 | if (rc) | |
1634 | goto done; | |
1635 | ||
1636 | kvm_emulate_hypercall(ctxt->vcpu); | |
1637 | break; | |
1638 | case 2: /* lgdt */ | |
1639 | rc = read_descriptor(ctxt, ops, c->src.ptr, | |
1640 | &size, &address, c->op_bytes); | |
1641 | if (rc) | |
1642 | goto done; | |
1643 | realmode_lgdt(ctxt->vcpu, size, address); | |
1644 | break; | |
1645 | case 3: /* lidt/vmmcall */ | |
1646 | if (c->modrm_mod == 3 && c->modrm_rm == 1) { | |
1647 | rc = kvm_fix_hypercall(ctxt->vcpu); | |
1648 | if (rc) | |
1649 | goto done; | |
1650 | kvm_emulate_hypercall(ctxt->vcpu); | |
1651 | } else { | |
1652 | rc = read_descriptor(ctxt, ops, c->src.ptr, | |
1653 | &size, &address, | |
1654 | c->op_bytes); | |
1655 | if (rc) | |
1656 | goto done; | |
1657 | realmode_lidt(ctxt->vcpu, size, address); | |
1658 | } | |
1659 | break; | |
1660 | case 4: /* smsw */ | |
1661 | if (c->modrm_mod != 3) | |
1662 | goto cannot_emulate; | |
1663 | *(u16 *)&c->regs[c->modrm_rm] | |
1664 | = realmode_get_cr(ctxt->vcpu, 0); | |
1665 | break; | |
1666 | case 6: /* lmsw */ | |
1667 | if (c->modrm_mod != 3) | |
1668 | goto cannot_emulate; | |
1669 | realmode_lmsw(ctxt->vcpu, (u16)c->modrm_val, | |
1670 | &ctxt->eflags); | |
1671 | break; | |
1672 | case 7: /* invlpg*/ | |
1673 | emulate_invlpg(ctxt->vcpu, cr2); | |
1674 | break; | |
1675 | default: | |
1676 | goto cannot_emulate; | |
1677 | } | |
1678 | /* Disable writeback. */ | |
1679 | c->dst.type = OP_NONE; | |
1680 | break; | |
1681 | case 0x21: /* mov from dr to reg */ | |
1682 | if (c->modrm_mod != 3) | |
1683 | goto cannot_emulate; | |
1684 | rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]); | |
1685 | if (rc) | |
1686 | goto cannot_emulate; | |
1687 | c->dst.type = OP_NONE; /* no writeback */ | |
1688 | break; | |
1689 | case 0x23: /* mov from reg to dr */ | |
1690 | if (c->modrm_mod != 3) | |
1691 | goto cannot_emulate; | |
1692 | rc = emulator_set_dr(ctxt, c->modrm_reg, | |
1693 | c->regs[c->modrm_rm]); | |
1694 | if (rc) | |
1695 | goto cannot_emulate; | |
1696 | c->dst.type = OP_NONE; /* no writeback */ | |
1697 | break; | |
1698 | case 0x40 ... 0x4f: /* cmov */ | |
1699 | c->dst.val = c->dst.orig_val = c->src.val; | |
1700 | if (!test_cc(c->b, ctxt->eflags)) | |
1701 | c->dst.type = OP_NONE; /* no writeback */ | |
1702 | break; | |
1703 | case 0xa3: | |
1704 | bt: /* bt */ | |
1705 | c->dst.type = OP_NONE; | |
1706 | /* only subword offset */ | |
1707 | c->src.val &= (c->dst.bytes << 3) - 1; | |
1708 | emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags); | |
1709 | break; | |
1710 | case 0xab: | |
1711 | bts: /* bts */ | |
1712 | /* only subword offset */ | |
1713 | c->src.val &= (c->dst.bytes << 3) - 1; | |
1714 | emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags); | |
1715 | break; | |
1716 | case 0xb0 ... 0xb1: /* cmpxchg */ | |
1717 | /* | |
1718 | * Save real source value, then compare EAX against | |
1719 | * destination. | |
1720 | */ | |
1721 | c->src.orig_val = c->src.val; | |
1722 | c->src.val = c->regs[VCPU_REGS_RAX]; | |
1723 | emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags); | |
1724 | if (ctxt->eflags & EFLG_ZF) { | |
1725 | /* Success: write back to memory. */ | |
1726 | c->dst.val = c->src.orig_val; | |
1727 | } else { | |
1728 | /* Failure: write the value we saw to EAX. */ | |
1729 | c->dst.type = OP_REG; | |
1730 | c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX]; | |
1731 | } | |
1732 | break; | |
1733 | case 0xb3: | |
1734 | btr: /* btr */ | |
1735 | /* only subword offset */ | |
1736 | c->src.val &= (c->dst.bytes << 3) - 1; | |
1737 | emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags); | |
1738 | break; | |
1739 | case 0xb6 ... 0xb7: /* movzx */ | |
1740 | c->dst.bytes = c->op_bytes; | |
1741 | c->dst.val = (c->d & ByteOp) ? (u8) c->src.val | |
1742 | : (u16) c->src.val; | |
1743 | break; | |
1744 | case 0xba: /* Grp8 */ | |
1745 | switch (c->modrm_reg & 3) { | |
1746 | case 0: | |
1747 | goto bt; | |
1748 | case 1: | |
1749 | goto bts; | |
1750 | case 2: | |
1751 | goto btr; | |
1752 | case 3: | |
1753 | goto btc; | |
1754 | } | |
1755 | break; | |
1756 | case 0xbb: | |
1757 | btc: /* btc */ | |
1758 | /* only subword offset */ | |
1759 | c->src.val &= (c->dst.bytes << 3) - 1; | |
1760 | emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags); | |
1761 | break; | |
1762 | case 0xbe ... 0xbf: /* movsx */ | |
1763 | c->dst.bytes = c->op_bytes; | |
1764 | c->dst.val = (c->d & ByteOp) ? (s8) c->src.val : | |
1765 | (s16) c->src.val; | |
1766 | break; | |
1767 | case 0xc3: /* movnti */ | |
1768 | c->dst.bytes = c->op_bytes; | |
1769 | c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val : | |
1770 | (u64) c->src.val; | |
1771 | break; | |
1772 | } | |
1773 | goto writeback; | |
1774 | ||
1775 | twobyte_special_insn: | |
1776 | switch (c->b) { | |
1777 | case 0x06: | |
1778 | emulate_clts(ctxt->vcpu); | |
1779 | break; | |
1780 | case 0x08: /* invd */ | |
1781 | break; | |
1782 | case 0x09: /* wbinvd */ | |
1783 | break; | |
1784 | case 0x0d: /* GrpP (prefetch) */ | |
1785 | case 0x18: /* Grp16 (prefetch/nop) */ | |
1786 | break; | |
1787 | case 0x20: /* mov cr, reg */ | |
1788 | if (c->modrm_mod != 3) | |
1789 | goto cannot_emulate; | |
1790 | c->regs[c->modrm_rm] = | |
1791 | realmode_get_cr(ctxt->vcpu, c->modrm_reg); | |
1792 | break; | |
1793 | case 0x22: /* mov reg, cr */ | |
1794 | if (c->modrm_mod != 3) | |
1795 | goto cannot_emulate; | |
1796 | realmode_set_cr(ctxt->vcpu, | |
1797 | c->modrm_reg, c->modrm_val, &ctxt->eflags); | |
1798 | break; | |
1799 | case 0x30: | |
1800 | /* wrmsr */ | |
1801 | msr_data = (u32)c->regs[VCPU_REGS_RAX] | |
1802 | | ((u64)c->regs[VCPU_REGS_RDX] << 32); | |
1803 | rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data); | |
1804 | if (rc) { | |
1805 | kvm_x86_ops->inject_gp(ctxt->vcpu, 0); | |
1806 | c->eip = ctxt->vcpu->rip; | |
1807 | } | |
1808 | rc = X86EMUL_CONTINUE; | |
1809 | break; | |
1810 | case 0x32: | |
1811 | /* rdmsr */ | |
1812 | rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data); | |
1813 | if (rc) { | |
1814 | kvm_x86_ops->inject_gp(ctxt->vcpu, 0); | |
1815 | c->eip = ctxt->vcpu->rip; | |
1816 | } else { | |
1817 | c->regs[VCPU_REGS_RAX] = (u32)msr_data; | |
1818 | c->regs[VCPU_REGS_RDX] = msr_data >> 32; | |
1819 | } | |
1820 | rc = X86EMUL_CONTINUE; | |
1821 | break; | |
1822 | case 0x80 ... 0x8f: /* jnz rel, etc*/ { | |
1823 | long int rel; | |
1824 | ||
1825 | switch (c->op_bytes) { | |
1826 | case 2: | |
1827 | rel = insn_fetch(s16, 2, c->eip); | |
1828 | break; | |
1829 | case 4: | |
1830 | rel = insn_fetch(s32, 4, c->eip); | |
1831 | break; | |
1832 | case 8: | |
1833 | rel = insn_fetch(s64, 8, c->eip); | |
1834 | break; | |
1835 | default: | |
1836 | DPRINTF("jnz: Invalid op_bytes\n"); | |
1837 | goto cannot_emulate; | |
1838 | } | |
1839 | if (test_cc(c->b, ctxt->eflags)) | |
1840 | JMP_REL(rel); | |
1841 | break; | |
1842 | } | |
1843 | case 0xc7: /* Grp9 (cmpxchg8b) */ | |
1844 | rc = emulate_grp9(ctxt, ops, cr2); | |
1845 | if (rc != 0) | |
1846 | goto done; | |
1847 | break; | |
1848 | } | |
1849 | /* Disable writeback. */ | |
1850 | c->dst.type = OP_NONE; | |
1851 | goto writeback; | |
1852 | ||
1853 | cannot_emulate: | |
1854 | DPRINTF("Cannot emulate %02x\n", c->b); | |
1855 | c->eip = saved_eip; | |
1856 | return -1; | |
1857 | } |