blob: b7c11270ae8fb78af752598c43a884902c908d77 [file] [log] [blame]
Avi Kivity6aa8b732006-12-10 02:21:36 -08001/******************************************************************************
2 * x86_emulate.h
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
9 */
10
H. Peter Anvin1965aae2008-10-22 22:26:29 -070011#ifndef _ASM_X86_KVM_X86_EMULATE_H
12#define _ASM_X86_KVM_X86_EMULATE_H
Avi Kivity6aa8b732006-12-10 02:21:36 -080013
Gleb Natapov38ba30b2010-03-18 15:20:17 +020014#include <asm/desc_defs.h>
15
Avi Kivity6aa8b732006-12-10 02:21:36 -080016struct x86_emulate_ctxt;
17
Avi Kivityda9cb572010-11-22 17:53:21 +020018struct x86_exception {
19 u8 vector;
20 bool error_code_valid;
21 u16 error_code;
22};
23
Avi Kivity6aa8b732006-12-10 02:21:36 -080024/*
25 * x86_emulate_ops:
26 *
27 * These operations represent the instruction emulator's interface to memory.
28 * There are two categories of operation: those that act on ordinary memory
29 * regions (*_std), and those that act on memory regions known to require
30 * special treatment or emulation (*_emulated).
31 *
32 * The emulator assumes that an instruction accesses only one 'emulated memory'
33 * location, that this location is the given linear faulting address (cr2), and
34 * that this is one of the instruction's data operands. Instruction fetches and
35 * stack operations are assumed never to access emulated memory. The emulator
36 * automatically deduces which operand of a string-move operation is accessing
37 * emulated memory, and assumes that the other operand accesses normal memory.
38 *
39 * NOTES:
40 * 1. The emulator isn't very smart about emulated vs. standard memory.
41 * 'Emulated memory' access addresses should be checked for sanity.
42 * 'Normal memory' accesses may fault, and the caller must arrange to
43 * detect and handle reentrancy into the emulator via recursive faults.
44 * Accesses may be unaligned and may cross page boundaries.
45 * 2. If the access fails (cannot emulate, or a standard access faults) then
46 * it is up to the memop to propagate the fault to the guest VM via
47 * some out-of-band mechanism, unknown to the emulator. The memop signals
48 * failure by returning X86EMUL_PROPAGATE_FAULT to the emulator, which will
49 * then immediately bail.
50 * 3. Valid access sizes are 1, 2, 4 and 8 bytes. On x86/32 systems only
51 * cmpxchg8b_emulated need support 8-byte accesses.
52 * 4. The emulator cannot handle 64-bit mode emulation on an x86/32 system.
53 */
54/* Access completed successfully: continue emulation as normal. */
55#define X86EMUL_CONTINUE 0
56/* Access is unhandleable: bail from emulation and return error to caller. */
57#define X86EMUL_UNHANDLEABLE 1
58/* Terminate emulation but return success to the caller. */
59#define X86EMUL_PROPAGATE_FAULT 2 /* propagate a generated fault to guest */
Gleb Natapove6800802010-04-28 19:15:33 +030060#define X86EMUL_RETRY_INSTR 3 /* retry the instruction for some reason */
61#define X86EMUL_CMPXCHG_FAILED 4 /* cmpxchg did not see expected value */
Gleb Natapovc3cd7ff2010-04-28 19:15:35 +030062#define X86EMUL_IO_NEEDED 5 /* IO is needed to complete emulation */
Gleb Natapove6800802010-04-28 19:15:33 +030063
Avi Kivity6aa8b732006-12-10 02:21:36 -080064struct x86_emulate_ops {
65 /*
66 * read_std: Read bytes of standard (non-emulated/special) memory.
Gleb Natapov1871c602010-02-10 14:21:32 +020067 * Used for descriptor reading.
Avi Kivity6aa8b732006-12-10 02:21:36 -080068 * @addr: [IN ] Linear address from which to read.
69 * @val: [OUT] Value read from memory, zero-extended to 'u_long'.
70 * @bytes: [IN ] Number of bytes to read from memory.
71 */
Avi Kivity4c690a12007-04-22 15:28:19 +030072 int (*read_std)(unsigned long addr, void *val,
Gleb Natapov1871c602010-02-10 14:21:32 +020073 unsigned int bytes, struct kvm_vcpu *vcpu, u32 *error);
74
75 /*
Gleb Natapov2dafc6c2010-03-18 15:20:16 +020076 * write_std: Write bytes of standard (non-emulated/special) memory.
77 * Used for descriptor writing.
78 * @addr: [IN ] Linear address to which to write.
79 * @val: [OUT] Value write to memory, zero-extended to 'u_long'.
80 * @bytes: [IN ] Number of bytes to write to memory.
81 */
82 int (*write_std)(unsigned long addr, void *val,
83 unsigned int bytes, struct kvm_vcpu *vcpu, u32 *error);
84 /*
Gleb Natapov1871c602010-02-10 14:21:32 +020085 * fetch: Read bytes of standard (non-emulated/special) memory.
86 * Used for instruction fetch.
87 * @addr: [IN ] Linear address from which to read.
88 * @val: [OUT] Value read from memory, zero-extended to 'u_long'.
89 * @bytes: [IN ] Number of bytes to read from memory.
90 */
91 int (*fetch)(unsigned long addr, void *val,
92 unsigned int bytes, struct kvm_vcpu *vcpu, u32 *error);
Avi Kivity6aa8b732006-12-10 02:21:36 -080093
94 /*
Avi Kivity6aa8b732006-12-10 02:21:36 -080095 * read_emulated: Read bytes from emulated/special memory area.
96 * @addr: [IN ] Linear address from which to read.
97 * @val: [OUT] Value read from memory, zero-extended to 'u_long'.
98 * @bytes: [IN ] Number of bytes to read from memory.
99 */
Joe Perches0c7825e2008-03-23 01:02:35 -0700100 int (*read_emulated)(unsigned long addr,
101 void *val,
102 unsigned int bytes,
Gleb Natapov8fe681e2010-04-28 19:15:37 +0300103 unsigned int *error,
Joe Perches0c7825e2008-03-23 01:02:35 -0700104 struct kvm_vcpu *vcpu);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800105
106 /*
Takuya Yoshikawa0d178972010-01-06 17:55:23 +0900107 * write_emulated: Write bytes to emulated/special memory area.
Avi Kivity6aa8b732006-12-10 02:21:36 -0800108 * @addr: [IN ] Linear address to which to write.
109 * @val: [IN ] Value to write to memory (low-order bytes used as
110 * required).
111 * @bytes: [IN ] Number of bytes to write to memory.
112 */
Joe Perches0c7825e2008-03-23 01:02:35 -0700113 int (*write_emulated)(unsigned long addr,
114 const void *val,
115 unsigned int bytes,
Gleb Natapov8fe681e2010-04-28 19:15:37 +0300116 unsigned int *error,
Joe Perches0c7825e2008-03-23 01:02:35 -0700117 struct kvm_vcpu *vcpu);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800118
119 /*
120 * cmpxchg_emulated: Emulate an atomic (LOCKed) CMPXCHG operation on an
121 * emulated/special memory area.
122 * @addr: [IN ] Linear address to access.
123 * @old: [IN ] Value expected to be current at @addr.
124 * @new: [IN ] Value to write to @addr.
125 * @bytes: [IN ] Number of bytes to access using CMPXCHG.
126 */
Joe Perches0c7825e2008-03-23 01:02:35 -0700127 int (*cmpxchg_emulated)(unsigned long addr,
128 const void *old,
129 const void *new,
130 unsigned int bytes,
Gleb Natapov8fe681e2010-04-28 19:15:37 +0300131 unsigned int *error,
Joe Perches0c7825e2008-03-23 01:02:35 -0700132 struct kvm_vcpu *vcpu);
Gleb Natapovcf8f70b2010-03-18 15:20:23 +0200133
134 int (*pio_in_emulated)(int size, unsigned short port, void *val,
135 unsigned int count, struct kvm_vcpu *vcpu);
136
137 int (*pio_out_emulated)(int size, unsigned short port, const void *val,
138 unsigned int count, struct kvm_vcpu *vcpu);
139
Gleb Natapov2dafc6c2010-03-18 15:20:16 +0200140 bool (*get_cached_descriptor)(struct desc_struct *desc,
141 int seg, struct kvm_vcpu *vcpu);
142 void (*set_cached_descriptor)(struct desc_struct *desc,
143 int seg, struct kvm_vcpu *vcpu);
144 u16 (*get_segment_selector)(int seg, struct kvm_vcpu *vcpu);
145 void (*set_segment_selector)(u16 sel, int seg, struct kvm_vcpu *vcpu);
Gleb Natapov5951c442010-04-28 19:15:29 +0300146 unsigned long (*get_cached_segment_base)(int seg, struct kvm_vcpu *vcpu);
Gleb Natapov2dafc6c2010-03-18 15:20:16 +0200147 void (*get_gdt)(struct desc_ptr *dt, struct kvm_vcpu *vcpu);
Mohammed Gamal160ce1f2010-08-04 05:44:24 +0300148 void (*get_idt)(struct desc_ptr *dt, struct kvm_vcpu *vcpu);
Gleb Natapov52a46612010-03-18 15:20:03 +0200149 ulong (*get_cr)(int cr, struct kvm_vcpu *vcpu);
Gleb Natapov0f122442010-04-28 19:15:31 +0300150 int (*set_cr)(int cr, ulong val, struct kvm_vcpu *vcpu);
Gleb Natapov9c537242010-03-18 15:20:05 +0200151 int (*cpl)(struct kvm_vcpu *vcpu);
Gleb Natapov35aa5372010-04-28 19:15:27 +0300152 int (*get_dr)(int dr, unsigned long *dest, struct kvm_vcpu *vcpu);
153 int (*set_dr)(int dr, unsigned long value, struct kvm_vcpu *vcpu);
Gleb Natapov3fb1b5d2010-04-28 19:15:28 +0300154 int (*set_msr)(struct kvm_vcpu *vcpu, u32 msr_index, u64 data);
155 int (*get_msr)(struct kvm_vcpu *vcpu, u32 msr_index, u64 *pdata);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800156};
157
Laurent Viviere4e03de2007-09-18 11:52:50 +0200158/* Type, address-of, and value of an instruction's operand. */
159struct operand {
Laurent Viviera01af5e2007-09-24 11:10:56 +0200160 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200161 unsigned int bytes;
Avi Kivity16518d52010-08-26 14:31:30 +0300162 union {
163 unsigned long orig_val;
164 u64 orig_val64;
165 };
Avi Kivity1a6440aef2010-08-01 12:35:10 +0300166 union {
167 unsigned long *reg;
Avi Kivity90de84f2010-11-17 15:28:21 +0200168 struct segmented_address {
169 ulong ea;
170 unsigned seg;
171 } mem;
Avi Kivity1a6440aef2010-08-01 12:35:10 +0300172 } addr;
Gleb Natapov414e6272010-04-28 19:15:26 +0300173 union {
174 unsigned long val;
Avi Kivity16518d52010-08-26 14:31:30 +0300175 u64 val64;
Gleb Natapov414e6272010-04-28 19:15:26 +0300176 char valptr[sizeof(unsigned long) + 2];
177 };
Laurent Viviere4e03de2007-09-18 11:52:50 +0200178};
179
Avi Kivity62266862007-11-20 13:15:52 +0200180struct fetch_cache {
181 u8 data[15];
182 unsigned long start;
183 unsigned long end;
184};
185
Gleb Natapov7b262e92010-03-18 15:20:27 +0200186struct read_cache {
187 u8 data[1024];
188 unsigned long pos;
189 unsigned long end;
190};
191
Laurent Viviere4e03de2007-09-18 11:52:50 +0200192struct decode_cache {
193 u8 twobyte;
194 u8 b;
195 u8 lock_prefix;
196 u8 rep_prefix;
197 u8 op_bytes;
198 u8 ad_bytes;
Avi Kivity33615aa2007-10-31 11:15:56 +0200199 u8 rex_prefix;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200200 struct operand src;
Guillaume Thouvenin0dc8d102008-12-04 14:26:42 +0100201 struct operand src2;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200202 struct operand dst;
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300203 bool has_seg_override;
204 u8 seg_override;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200205 unsigned int d;
Avi Kivityef65c882010-07-29 15:11:51 +0300206 int (*execute)(struct x86_emulate_ctxt *ctxt);
Laurent Viviere4e03de2007-09-18 11:52:50 +0200207 unsigned long regs[NR_VCPU_REGS];
Gleb Natapov063db062010-03-18 15:20:06 +0200208 unsigned long eip;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200209 /* modrm */
210 u8 modrm;
211 u8 modrm_mod;
212 u8 modrm_reg;
213 u8 modrm_rm;
Avi Kivity09ee57c2010-08-01 12:07:29 +0300214 u8 modrm_seg;
Avi Kivityf5b4edc2008-06-15 22:09:11 -0700215 bool rip_relative;
Avi Kivity62266862007-11-20 13:15:52 +0200216 struct fetch_cache fetch;
Gleb Natapov7b262e92010-03-18 15:20:27 +0200217 struct read_cache io_read;
Gleb Natapov9de41572010-04-28 19:15:22 +0300218 struct read_cache mem_read;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200219};
220
Avi Kivity6aa8b732006-12-10 02:21:36 -0800221struct x86_emulate_ctxt {
Avi Kivity9aabc88f2010-07-29 15:11:50 +0300222 struct x86_emulate_ops *ops;
223
Avi Kivity6aa8b732006-12-10 02:21:36 -0800224 /* Register state before/after emulation. */
225 struct kvm_vcpu *vcpu;
226
Avi Kivity6aa8b732006-12-10 02:21:36 -0800227 unsigned long eflags;
Gleb Natapov063db062010-03-18 15:20:06 +0200228 unsigned long eip; /* eip before instruction emulation */
Avi Kivity6aa8b732006-12-10 02:21:36 -0800229 /* Emulated execution mode, represented by an X86EMUL_MODE value. */
230 int mode;
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300231 u32 cs_base;
Laurent Viviere4e03de2007-09-18 11:52:50 +0200232
Glauber Costa310b5d32009-05-12 16:21:06 -0400233 /* interruptibility state, as a result of execution of STI or MOV SS */
234 int interruptibility;
235
Gleb Natapov4fc40f02010-08-02 12:47:51 +0300236 bool perm_ok; /* do not check permissions if true */
Gleb Natapov54b84862010-04-28 19:15:44 +0300237
Avi Kivityda9cb572010-11-22 17:53:21 +0200238 bool have_exception;
239 struct x86_exception exception;
Gleb Natapov54b84862010-04-28 19:15:44 +0300240
Laurent Viviere4e03de2007-09-18 11:52:50 +0200241 /* decode cache */
Laurent Viviere4e03de2007-09-18 11:52:50 +0200242 struct decode_cache decode;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800243};
244
Guillaume Thouvenin90e0a282007-11-22 11:32:09 +0100245/* Repeat String Operation Prefix */
Sheng Yangd73fa292008-10-14 15:59:10 +0800246#define REPE_PREFIX 1
247#define REPNE_PREFIX 2
Guillaume Thouvenin90e0a282007-11-22 11:32:09 +0100248
Avi Kivity6aa8b732006-12-10 02:21:36 -0800249/* Execution mode, passed to the emulator. */
250#define X86EMUL_MODE_REAL 0 /* Real mode. */
Gleb Natapova0044752010-02-10 14:21:31 +0200251#define X86EMUL_MODE_VM86 1 /* Virtual 8086 mode. */
Avi Kivity6aa8b732006-12-10 02:21:36 -0800252#define X86EMUL_MODE_PROT16 2 /* 16-bit protected mode. */
253#define X86EMUL_MODE_PROT32 4 /* 32-bit protected mode. */
254#define X86EMUL_MODE_PROT64 8 /* 64-bit (long) mode. */
255
256/* Host execution mode. */
Sheng Yangd73fa292008-10-14 15:59:10 +0800257#if defined(CONFIG_X86_32)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800258#define X86EMUL_MODE_HOST X86EMUL_MODE_PROT32
Avi Kivity05b3e0c2006-12-13 00:33:45 -0800259#elif defined(CONFIG_X86_64)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800260#define X86EMUL_MODE_HOST X86EMUL_MODE_PROT64
261#endif
262
Avi Kivity9aabc88f2010-07-29 15:11:50 +0300263int x86_decode_insn(struct x86_emulate_ctxt *ctxt);
Gleb Natapovd2ddd1c2010-08-25 12:47:43 +0300264#define EMULATION_FAILED -1
265#define EMULATION_OK 0
266#define EMULATION_RESTART 1
Avi Kivity9aabc88f2010-07-29 15:11:50 +0300267int x86_emulate_insn(struct x86_emulate_ctxt *ctxt);
Gleb Natapov38ba30b2010-03-18 15:20:17 +0200268int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
Jan Kiszkae269fb22010-04-14 15:51:09 +0200269 u16 tss_selector, int reason,
270 bool has_error_code, u32 error_code);
Mohammed Gamal4ab8e022010-09-19 14:34:05 +0200271int emulate_int_real(struct x86_emulate_ctxt *ctxt,
272 struct x86_emulate_ops *ops, int irq);
H. Peter Anvin1965aae2008-10-22 22:26:29 -0700273#endif /* _ASM_X86_KVM_X86_EMULATE_H */