2 * i386 virtual CPU header
4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
58 #define TRAP_FLAG 0x0100
59 #define INTERRUPT_FLAG 0x0200
60 #define DIRECTION_FLAG 0x0400
61 #define IOPL_FLAG_MASK 0x3000
62 #define NESTED_FLAG 0x4000
63 #define BYTE_FL 0x8000 /* Intel reserved! */
64 #define RF_FLAG 0x10000
65 #define VM_FLAG 0x20000
73 #define EXCP05_BOUND 6
74 #define EXCP06_ILLOP 7
77 #define EXCP09_XERR 10
79 #define EXCP0B_NOSEG 12
80 #define EXCP0C_STACK 13
82 #define EXCP0E_PAGE 15
83 #define EXCP10_COPR 17
84 #define EXCP11_ALGN 18
85 #define EXCP12_MCHK 19
87 #define EXCP_INTERRUPT 256 /* async interruption */
90 CC_OP_DYNAMIC
, /* must use dynamic code to get cc_op */
91 CC_OP_EFLAGS
, /* all cc are explicitely computed, CC_SRC = flags */
92 CC_OP_MUL
, /* modify all flags, C, O = (CC_SRC != 0) */
94 CC_OP_ADDB
, /* modify all flags, CC_DST = res, CC_SRC = src1 */
98 CC_OP_ADCB
, /* modify all flags, CC_DST = res, CC_SRC = src1 */
102 CC_OP_SUBB
, /* modify all flags, CC_DST = res, CC_SRC = src1 */
106 CC_OP_SBBB
, /* modify all flags, CC_DST = res, CC_SRC = src1 */
110 CC_OP_LOGICB
, /* modify all flags, CC_DST = res */
114 CC_OP_INCB
, /* modify all flags except, CC_DST = res, CC_SRC = C */
118 CC_OP_DECB
, /* modify all flags except, CC_DST = res, CC_SRC = C */
122 CC_OP_SHLB
, /* modify all flags, CC_DST = res, CC_SRC.lsb = C */
126 CC_OP_SARB
, /* modify all flags, CC_DST = res, CC_SRC.lsb = C */
134 #define USE_X86LDOUBLE
137 #ifdef USE_X86LDOUBLE
138 typedef long double CPU86_LDouble
;
140 typedef double CPU86_LDouble
;
143 typedef struct SegmentCache
{
149 typedef struct SegmentDescriptorTable
{
152 /* this is the returned base when reading the register, just to
153 avoid that the emulated program modifies it */
154 unsigned long emu_base
;
155 } SegmentDescriptorTable
;
157 typedef struct CPUX86State
{
158 /* standard registers */
163 /* emulator internal eflags handling */
167 int32_t df
; /* D flag : 1 if D = 0, -1 if D = 1 */
170 unsigned int fpstt
; /* top of stack index */
173 uint8_t fptags
[8]; /* 0 = valid, 1 = empty */
174 CPU86_LDouble fpregs
[8];
176 /* emulator internal variables */
180 uint32_t segs
[6]; /* selector values */
181 SegmentCache seg_cache
[6]; /* info taken from LDT/GDT */
182 SegmentDescriptorTable gdt
;
183 SegmentDescriptorTable ldt
;
184 SegmentDescriptorTable idt
;
186 /* various CPU modes */
189 /* exception/interrupt handling */
192 int interrupt_request
;
195 /* all CPU memory access use these macros */
196 static inline int ldub(void *ptr
)
198 return *(uint8_t *)ptr
;
201 static inline int ldsb(void *ptr
)
203 return *(int8_t *)ptr
;
206 static inline void stb(void *ptr
, int v
)
211 #ifdef WORDS_BIGENDIAN
213 /* conservative code for little endian unaligned accesses */
214 static inline int lduw(void *ptr
)
218 __asm__
__volatile__ ("lhbrx %0,0,%1" : "=r" (val
) : "r" (ptr
));
222 return p
[0] | (p
[1] << 8);
226 static inline int ldsw(void *ptr
)
230 __asm__
__volatile__ ("lhbrx %0,0,%1" : "=r" (val
) : "r" (ptr
));
234 return (int16_t)(p
[0] | (p
[1] << 8));
238 static inline int ldl(void *ptr
)
242 __asm__
__volatile__ ("lwbrx %0,0,%1" : "=r" (val
) : "r" (ptr
));
246 return p
[0] | (p
[1] << 8) | (p
[2] << 16) | (p
[3] << 24);
250 static inline uint64_t ldq(void *ptr
)
256 return v1
| ((uint64_t)v2
<< 32);
259 static inline void stw(void *ptr
, int v
)
262 __asm__
__volatile__ ("sthbrx %1,0,%2" : "=m" (*(uint16_t *)ptr
) : "r" (v
), "r" (ptr
));
270 static inline void stl(void *ptr
, int v
)
273 __asm__
__volatile__ ("stwbrx %1,0,%2" : "=m" (*(uint32_t *)ptr
) : "r" (v
), "r" (ptr
));
283 static inline void stq(void *ptr
, uint64_t v
)
292 static inline float ldfl(void *ptr
)
302 static inline double ldfq(void *ptr
)
312 static inline void stfl(void *ptr
, float v
)
322 static inline void stfq(void *ptr
, double v
)
334 static inline int lduw(void *ptr
)
336 return *(uint16_t *)ptr
;
339 static inline int ldsw(void *ptr
)
341 return *(int16_t *)ptr
;
344 static inline int ldl(void *ptr
)
346 return *(uint32_t *)ptr
;
349 static inline uint64_t ldq(void *ptr
)
351 return *(uint64_t *)ptr
;
354 static inline void stw(void *ptr
, int v
)
356 *(uint16_t *)ptr
= v
;
359 static inline void stl(void *ptr
, int v
)
361 *(uint32_t *)ptr
= v
;
364 static inline void stq(void *ptr
, uint64_t v
)
366 *(uint64_t *)ptr
= v
;
371 static inline float ldfl(void *ptr
)
373 return *(float *)ptr
;
376 static inline double ldfq(void *ptr
)
378 return *(double *)ptr
;
381 static inline void stfl(void *ptr
, float v
)
386 static inline void stfq(void *ptr
, double v
)
393 void cpu_x86_outb(int addr
, int val
);
394 void cpu_x86_outw(int addr
, int val
);
395 void cpu_x86_outl(int addr
, int val
);
396 int cpu_x86_inb(int addr
);
397 int cpu_x86_inw(int addr
);
398 int cpu_x86_inl(int addr
);
401 CPUX86State
*cpu_x86_init(void);
402 int cpu_x86_exec(CPUX86State
*s
);
403 void cpu_x86_interrupt(CPUX86State
*s
);
404 void cpu_x86_close(CPUX86State
*s
);
406 /* needed to load some predefinied segment registers */
407 void cpu_x86_load_seg(CPUX86State
*s
, int seg_reg
, int selector
);
409 /* you can call this signal handler from your SIGBUS and SIGSEGV
410 signal handlers to inform the virtual CPU of exceptions. non zero
411 is returned if the signal was handled by the virtual CPU. */
413 int cpu_x86_signal_handler(int host_signum
, struct siginfo
*info
,
416 /* internal functions */
418 #define GEN_FLAG_CODE32_SHIFT 0
419 #define GEN_FLAG_ADDSEG_SHIFT 1
420 #define GEN_FLAG_SS32_SHIFT 2
421 #define GEN_FLAG_ST_SHIFT 3
423 int cpu_x86_gen_code(uint8_t *gen_code_buf
, int max_code_size
,
424 int *gen_code_size_ptr
,
425 uint8_t *pc_start
, uint8_t *cs_base
, int flags
);
426 void cpu_x86_tblocks_init(void);
428 #endif /* CPU_I386_H */