blob: bd469296f5e597478835c86a7be916b4d8ba9743 [file] [log] [blame]
Avi Kivity6aa8b732006-12-10 02:21:36 -08001/******************************************************************************
2 * x86_emulate.h
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
9 */
10
H. Peter Anvin1965aae2008-10-22 22:26:29 -070011#ifndef _ASM_X86_KVM_X86_EMULATE_H
12#define _ASM_X86_KVM_X86_EMULATE_H
Avi Kivity6aa8b732006-12-10 02:21:36 -080013
Gleb Natapov38ba30b2010-03-18 15:20:17 +020014#include <asm/desc_defs.h>
15
Avi Kivity6aa8b732006-12-10 02:21:36 -080016struct x86_emulate_ctxt;
17
18/*
19 * x86_emulate_ops:
20 *
21 * These operations represent the instruction emulator's interface to memory.
22 * There are two categories of operation: those that act on ordinary memory
23 * regions (*_std), and those that act on memory regions known to require
24 * special treatment or emulation (*_emulated).
25 *
26 * The emulator assumes that an instruction accesses only one 'emulated memory'
27 * location, that this location is the given linear faulting address (cr2), and
28 * that this is one of the instruction's data operands. Instruction fetches and
29 * stack operations are assumed never to access emulated memory. The emulator
30 * automatically deduces which operand of a string-move operation is accessing
31 * emulated memory, and assumes that the other operand accesses normal memory.
32 *
33 * NOTES:
34 * 1. The emulator isn't very smart about emulated vs. standard memory.
35 * 'Emulated memory' access addresses should be checked for sanity.
36 * 'Normal memory' accesses may fault, and the caller must arrange to
37 * detect and handle reentrancy into the emulator via recursive faults.
38 * Accesses may be unaligned and may cross page boundaries.
39 * 2. If the access fails (cannot emulate, or a standard access faults) then
40 * it is up to the memop to propagate the fault to the guest VM via
41 * some out-of-band mechanism, unknown to the emulator. The memop signals
42 * failure by returning X86EMUL_PROPAGATE_FAULT to the emulator, which will
43 * then immediately bail.
44 * 3. Valid access sizes are 1, 2, 4 and 8 bytes. On x86/32 systems only
45 * cmpxchg8b_emulated need support 8-byte accesses.
46 * 4. The emulator cannot handle 64-bit mode emulation on an x86/32 system.
47 */
48/* Access completed successfully: continue emulation as normal. */
49#define X86EMUL_CONTINUE 0
50/* Access is unhandleable: bail from emulation and return error to caller. */
51#define X86EMUL_UNHANDLEABLE 1
52/* Terminate emulation but return success to the caller. */
53#define X86EMUL_PROPAGATE_FAULT 2 /* propagate a generated fault to guest */
54#define X86EMUL_RETRY_INSTR 2 /* retry the instruction for some reason */
55#define X86EMUL_CMPXCHG_FAILED 2 /* cmpxchg did not see expected value */
56struct x86_emulate_ops {
57 /*
58 * read_std: Read bytes of standard (non-emulated/special) memory.
Gleb Natapov1871c602010-02-10 14:21:32 +020059 * Used for descriptor reading.
Avi Kivity6aa8b732006-12-10 02:21:36 -080060 * @addr: [IN ] Linear address from which to read.
61 * @val: [OUT] Value read from memory, zero-extended to 'u_long'.
62 * @bytes: [IN ] Number of bytes to read from memory.
63 */
Avi Kivity4c690a12007-04-22 15:28:19 +030064 int (*read_std)(unsigned long addr, void *val,
Gleb Natapov1871c602010-02-10 14:21:32 +020065 unsigned int bytes, struct kvm_vcpu *vcpu, u32 *error);
66
67 /*
Gleb Natapov2dafc6c2010-03-18 15:20:16 +020068 * write_std: Write bytes of standard (non-emulated/special) memory.
69 * Used for descriptor writing.
70 * @addr: [IN ] Linear address to which to write.
71 * @val: [OUT] Value write to memory, zero-extended to 'u_long'.
72 * @bytes: [IN ] Number of bytes to write to memory.
73 */
74 int (*write_std)(unsigned long addr, void *val,
75 unsigned int bytes, struct kvm_vcpu *vcpu, u32 *error);
76 /*
Gleb Natapov1871c602010-02-10 14:21:32 +020077 * fetch: Read bytes of standard (non-emulated/special) memory.
78 * Used for instruction fetch.
79 * @addr: [IN ] Linear address from which to read.
80 * @val: [OUT] Value read from memory, zero-extended to 'u_long'.
81 * @bytes: [IN ] Number of bytes to read from memory.
82 */
83 int (*fetch)(unsigned long addr, void *val,
84 unsigned int bytes, struct kvm_vcpu *vcpu, u32 *error);
Avi Kivity6aa8b732006-12-10 02:21:36 -080085
86 /*
Avi Kivity6aa8b732006-12-10 02:21:36 -080087 * read_emulated: Read bytes from emulated/special memory area.
88 * @addr: [IN ] Linear address from which to read.
89 * @val: [OUT] Value read from memory, zero-extended to 'u_long'.
90 * @bytes: [IN ] Number of bytes to read from memory.
91 */
Joe Perches0c7825e2008-03-23 01:02:35 -070092 int (*read_emulated)(unsigned long addr,
93 void *val,
94 unsigned int bytes,
95 struct kvm_vcpu *vcpu);
Avi Kivity6aa8b732006-12-10 02:21:36 -080096
97 /*
Takuya Yoshikawa0d178972010-01-06 17:55:23 +090098 * write_emulated: Write bytes to emulated/special memory area.
Avi Kivity6aa8b732006-12-10 02:21:36 -080099 * @addr: [IN ] Linear address to which to write.
100 * @val: [IN ] Value to write to memory (low-order bytes used as
101 * required).
102 * @bytes: [IN ] Number of bytes to write to memory.
103 */
Joe Perches0c7825e2008-03-23 01:02:35 -0700104 int (*write_emulated)(unsigned long addr,
105 const void *val,
106 unsigned int bytes,
107 struct kvm_vcpu *vcpu);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800108
109 /*
110 * cmpxchg_emulated: Emulate an atomic (LOCKed) CMPXCHG operation on an
111 * emulated/special memory area.
112 * @addr: [IN ] Linear address to access.
113 * @old: [IN ] Value expected to be current at @addr.
114 * @new: [IN ] Value to write to @addr.
115 * @bytes: [IN ] Number of bytes to access using CMPXCHG.
116 */
Joe Perches0c7825e2008-03-23 01:02:35 -0700117 int (*cmpxchg_emulated)(unsigned long addr,
118 const void *old,
119 const void *new,
120 unsigned int bytes,
121 struct kvm_vcpu *vcpu);
Gleb Natapov2dafc6c2010-03-18 15:20:16 +0200122 bool (*get_cached_descriptor)(struct desc_struct *desc,
123 int seg, struct kvm_vcpu *vcpu);
124 void (*set_cached_descriptor)(struct desc_struct *desc,
125 int seg, struct kvm_vcpu *vcpu);
126 u16 (*get_segment_selector)(int seg, struct kvm_vcpu *vcpu);
127 void (*set_segment_selector)(u16 sel, int seg, struct kvm_vcpu *vcpu);
128 void (*get_gdt)(struct desc_ptr *dt, struct kvm_vcpu *vcpu);
Gleb Natapov52a46612010-03-18 15:20:03 +0200129 ulong (*get_cr)(int cr, struct kvm_vcpu *vcpu);
130 void (*set_cr)(int cr, ulong val, struct kvm_vcpu *vcpu);
Gleb Natapov9c537242010-03-18 15:20:05 +0200131 int (*cpl)(struct kvm_vcpu *vcpu);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800132};
133
Laurent Viviere4e03de2007-09-18 11:52:50 +0200134/* Type, address-of, and value of an instruction's operand. */
135struct operand {
Laurent Viviera01af5e2007-09-24 11:10:56 +0200136 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200137 unsigned int bytes;
138 unsigned long val, orig_val, *ptr;
139};
140
Avi Kivity62266862007-11-20 13:15:52 +0200141struct fetch_cache {
142 u8 data[15];
143 unsigned long start;
144 unsigned long end;
145};
146
Laurent Viviere4e03de2007-09-18 11:52:50 +0200147struct decode_cache {
148 u8 twobyte;
149 u8 b;
150 u8 lock_prefix;
151 u8 rep_prefix;
152 u8 op_bytes;
153 u8 ad_bytes;
Avi Kivity33615aa2007-10-31 11:15:56 +0200154 u8 rex_prefix;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200155 struct operand src;
Guillaume Thouvenin0dc8d102008-12-04 14:26:42 +0100156 struct operand src2;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200157 struct operand dst;
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300158 bool has_seg_override;
159 u8 seg_override;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200160 unsigned int d;
161 unsigned long regs[NR_VCPU_REGS];
Gleb Natapov063db062010-03-18 15:20:06 +0200162 unsigned long eip;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200163 /* modrm */
164 u8 modrm;
165 u8 modrm_mod;
166 u8 modrm_reg;
167 u8 modrm_rm;
168 u8 use_modrm_ea;
Avi Kivityf5b4edc2008-06-15 22:09:11 -0700169 bool rip_relative;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200170 unsigned long modrm_ea;
Avi Kivity107d6d22008-05-05 14:58:26 +0300171 void *modrm_ptr;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200172 unsigned long modrm_val;
Avi Kivity62266862007-11-20 13:15:52 +0200173 struct fetch_cache fetch;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200174};
175
Avi Kivity6aa8b732006-12-10 02:21:36 -0800176struct x86_emulate_ctxt {
177 /* Register state before/after emulation. */
178 struct kvm_vcpu *vcpu;
179
Avi Kivity6aa8b732006-12-10 02:21:36 -0800180 unsigned long eflags;
Gleb Natapov063db062010-03-18 15:20:06 +0200181 unsigned long eip; /* eip before instruction emulation */
Avi Kivity6aa8b732006-12-10 02:21:36 -0800182 /* Emulated execution mode, represented by an X86EMUL_MODE value. */
183 int mode;
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300184 u32 cs_base;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200185
Glauber Costa310b5d32009-05-12 16:21:06 -0400186 /* interruptibility state, as a result of execution of STI or MOV SS */
187 int interruptibility;
188
Laurent Viviere4e03de2007-09-18 11:52:50 +0200189 /* decode cache */
Laurent Viviere4e03de2007-09-18 11:52:50 +0200190 struct decode_cache decode;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800191};
192
Guillaume Thouvenin90e0a282007-11-22 11:32:09 +0100193/* Repeat String Operation Prefix */
Sheng Yangd73fa292008-10-14 15:59:10 +0800194#define REPE_PREFIX 1
195#define REPNE_PREFIX 2
Guillaume Thouvenin90e0a282007-11-22 11:32:09 +0100196
Avi Kivity6aa8b732006-12-10 02:21:36 -0800197/* Execution mode, passed to the emulator. */
198#define X86EMUL_MODE_REAL 0 /* Real mode. */
Gleb Natapova0044752010-02-10 14:21:31 +0200199#define X86EMUL_MODE_VM86 1 /* Virtual 8086 mode. */
Avi Kivity6aa8b732006-12-10 02:21:36 -0800200#define X86EMUL_MODE_PROT16 2 /* 16-bit protected mode. */
201#define X86EMUL_MODE_PROT32 4 /* 32-bit protected mode. */
202#define X86EMUL_MODE_PROT64 8 /* 64-bit (long) mode. */
203
204/* Host execution mode. */
Sheng Yangd73fa292008-10-14 15:59:10 +0800205#if defined(CONFIG_X86_32)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800206#define X86EMUL_MODE_HOST X86EMUL_MODE_PROT32
Avi Kivity05b3e0c2006-12-13 00:33:45 -0800207#elif defined(CONFIG_X86_64)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800208#define X86EMUL_MODE_HOST X86EMUL_MODE_PROT64
209#endif
210
Laurent Vivier1be3aa42007-09-18 11:27:27 +0200211int x86_decode_insn(struct x86_emulate_ctxt *ctxt,
212 struct x86_emulate_ops *ops);
213int x86_emulate_insn(struct x86_emulate_ctxt *ctxt,
214 struct x86_emulate_ops *ops);
Gleb Natapov38ba30b2010-03-18 15:20:17 +0200215int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
216 struct x86_emulate_ops *ops,
217 u16 tss_selector, int reason);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800218
H. Peter Anvin1965aae2008-10-22 22:26:29 -0700219#endif /* _ASM_X86_KVM_X86_EMULATE_H */