blob: 8c65ff274785b9871a5609adc815a15265cdd40a [file] [log] [blame]
Avi Kivity6aa8b732006-12-10 02:21:36 -08001/******************************************************************************
Avi Kivity56e82312009-08-12 15:04:37 +03002 * emulate.c
Avi Kivity6aa8b732006-12-10 02:21:36 -08003 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
Rusty Russelldcc07662007-07-17 23:16:56 +10009 * privileged instructions:
Avi Kivity6aa8b732006-12-10 02:21:36 -080010 *
11 * Copyright (C) 2006 Qumranet
Nicolas Kaiser9611c182010-10-06 14:23:22 +020012 * Copyright 2010 Red Hat, Inc. and/or its affiliates.
Avi Kivity6aa8b732006-12-10 02:21:36 -080013 *
14 * Avi Kivity <avi@qumranet.com>
15 * Yaniv Kamay <yaniv@qumranet.com>
16 *
17 * This work is licensed under the terms of the GNU GPL, version 2. See
18 * the COPYING file in the top-level directory.
19 *
20 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
21 */
22
Avi Kivityedf88412007-12-16 11:02:48 +020023#include <linux/kvm_host.h>
Marcelo Tosatti5fdbf972008-06-27 14:58:02 -030024#include "kvm_cache_regs.h"
Avi Kivity6aa8b732006-12-10 02:21:36 -080025#include <linux/module.h>
Avi Kivity56e82312009-08-12 15:04:37 +030026#include <asm/kvm_emulate.h>
Avi Kivity6aa8b732006-12-10 02:21:36 -080027
Avi Kivity3eeb3282010-01-21 15:31:48 +020028#include "x86.h"
Gleb Natapov38ba30b2010-03-18 15:20:17 +020029#include "tss.h"
Andre Przywarae99f0502009-06-17 15:50:33 +020030
Avi Kivity6aa8b732006-12-10 02:21:36 -080031/*
Avi Kivitya9945542011-09-13 10:45:41 +030032 * Operand types
33 */
Avi Kivityb1ea50b2011-09-13 10:45:42 +030034#define OpNone 0ull
35#define OpImplicit 1ull /* No generic decode */
36#define OpReg 2ull /* Register */
37#define OpMem 3ull /* Memory */
38#define OpAcc 4ull /* Accumulator: AL/AX/EAX/RAX */
39#define OpDI 5ull /* ES:DI/EDI/RDI */
40#define OpMem64 6ull /* Memory, 64-bit */
41#define OpImmUByte 7ull /* Zero-extended 8-bit immediate */
42#define OpDX 8ull /* DX register */
Avi Kivitya9945542011-09-13 10:45:41 +030043
44#define OpBits 4 /* Width of operand field */
Avi Kivityb1ea50b2011-09-13 10:45:42 +030045#define OpMask ((1ull << OpBits) - 1)
Avi Kivitya9945542011-09-13 10:45:41 +030046
47/*
Avi Kivity6aa8b732006-12-10 02:21:36 -080048 * Opcode effective-address decode tables.
49 * Note that we only emulate instructions that have at least one memory
50 * operand (excluding implicit stack references). We assume that stack
51 * references and instruction fetches will never occur in special memory
52 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
53 * not be handled.
54 */
55
56/* Operand sizes: 8-bit operands or specified/overridden size. */
Avi Kivityab85b122010-07-29 15:11:49 +030057#define ByteOp (1<<0) /* 8-bit operands. */
Avi Kivity6aa8b732006-12-10 02:21:36 -080058/* Destination operand type. */
Avi Kivitya9945542011-09-13 10:45:41 +030059#define DstShift 1
60#define ImplicitOps (OpImplicit << DstShift)
61#define DstReg (OpReg << DstShift)
62#define DstMem (OpMem << DstShift)
63#define DstAcc (OpAcc << DstShift)
64#define DstDI (OpDI << DstShift)
65#define DstMem64 (OpMem64 << DstShift)
66#define DstImmUByte (OpImmUByte << DstShift)
67#define DstDX (OpDX << DstShift)
68#define DstMask (OpMask << DstShift)
Avi Kivity6aa8b732006-12-10 02:21:36 -080069/* Source operand type. */
Marcelo Tosatti221192b2011-05-30 15:23:14 -030070#define SrcNone (0<<5) /* No source operand. */
71#define SrcReg (1<<5) /* Register operand. */
72#define SrcMem (2<<5) /* Memory operand. */
73#define SrcMem16 (3<<5) /* Memory operand (16-bit). */
74#define SrcMem32 (4<<5) /* Memory operand (32-bit). */
75#define SrcImm (5<<5) /* Immediate operand. */
76#define SrcImmByte (6<<5) /* 8-bit sign-extended immediate operand. */
77#define SrcOne (7<<5) /* Implied '1' */
78#define SrcImmUByte (8<<5) /* 8-bit unsigned immediate operand. */
79#define SrcImmU (9<<5) /* Immediate operand, unsigned */
80#define SrcSI (0xa<<5) /* Source is in the DS:RSI */
81#define SrcImmFAddr (0xb<<5) /* Source is immediate far address */
82#define SrcMemFAddr (0xc<<5) /* Source is far address in memory */
83#define SrcAcc (0xd<<5) /* Source Accumulator */
84#define SrcImmU16 (0xe<<5) /* Immediate operand, unsigned, 16 bits */
85#define SrcDX (0xf<<5) /* Source is in DX register */
86#define SrcMask (0xf<<5)
Avi Kivity6aa8b732006-12-10 02:21:36 -080087/* Generic ModRM decode. */
Marcelo Tosatti221192b2011-05-30 15:23:14 -030088#define ModRM (1<<9)
Avi Kivity6aa8b732006-12-10 02:21:36 -080089/* Destination is only written; never read. */
Marcelo Tosatti221192b2011-05-30 15:23:14 -030090#define Mov (1<<10)
91#define BitOp (1<<11)
92#define MemAbs (1<<12) /* Memory operand is absolute displacement */
93#define String (1<<13) /* String instruction (rep capable) */
94#define Stack (1<<14) /* Stack instruction (push/pop) */
95#define GroupMask (7<<15) /* Opcode uses one of the group mechanisms */
96#define Group (1<<15) /* Bits 3:5 of modrm byte extend opcode */
97#define GroupDual (2<<15) /* Alternate decoding of mod == 3 */
98#define Prefix (3<<15) /* Instruction varies with 66/f2/f3 prefix */
99#define RMExt (4<<15) /* Opcode extension in ModRM r/m if mod == 3 */
100#define Sse (1<<18) /* SSE Vector instruction */
Mohammed Gamald8769fe2009-08-23 14:24:25 +0300101/* Misc flags */
Joerg Roedel8ea7d6a2011-04-04 12:39:26 +0200102#define Prot (1<<21) /* instruction generates #UD if not in prot-mode */
Avi Kivityd8671622011-02-01 16:32:03 +0200103#define VendorSpecific (1<<22) /* Vendor specific instruction */
Avi Kivity5a506b12010-08-01 15:10:29 +0300104#define NoAccess (1<<23) /* Don't access memory (lea/invlpg/verr etc) */
Avi Kivity7f9b4b72010-08-01 14:46:54 +0300105#define Op3264 (1<<24) /* Operand is 64b in long mode, 32b otherwise */
Avi Kivity047a4812010-07-26 14:37:47 +0300106#define Undefined (1<<25) /* No Such Instruction */
Gleb Natapovd380a5e2010-02-10 14:21:36 +0200107#define Lock (1<<26) /* lock prefix is allowed for the instruction */
Gleb Natapove92805a2010-02-10 14:21:35 +0200108#define Priv (1<<27) /* instruction generates #GP if current CPL != 0 */
Mohammed Gamald8769fe2009-08-23 14:24:25 +0300109#define No64 (1<<28)
Guillaume Thouvenin0dc8d102008-12-04 14:26:42 +0100110/* Source 2 operand type */
Avi Kivityb1ea50b2011-09-13 10:45:42 +0300111#define Src2None (0u<<29)
112#define Src2CL (1u<<29)
113#define Src2ImmByte (2u<<29)
114#define Src2One (3u<<29)
115#define Src2Imm (4u<<29)
116#define Src2Mask (7u<<29)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800117
Avi Kivityd0e53322010-07-29 15:11:54 +0300118#define X2(x...) x, x
119#define X3(x...) X2(x), x
120#define X4(x...) X2(x), X2(x)
121#define X5(x...) X4(x), x
122#define X6(x...) X4(x), X2(x)
123#define X7(x...) X4(x), X3(x)
124#define X8(x...) X4(x), X4(x)
125#define X16(x...) X8(x), X8(x)
Avi Kivity83babbc2010-07-26 14:37:39 +0300126
Avi Kivityd65b1de2010-07-29 15:11:35 +0300127struct opcode {
Avi Kivityb1ea50b2011-09-13 10:45:42 +0300128 u64 flags : 56;
129 u64 intercept : 8;
Avi Kivity120df892010-07-29 15:11:39 +0300130 union {
Avi Kivityef65c882010-07-29 15:11:51 +0300131 int (*execute)(struct x86_emulate_ctxt *ctxt);
Avi Kivity120df892010-07-29 15:11:39 +0300132 struct opcode *group;
133 struct group_dual *gdual;
Avi Kivity0d7cdee2011-03-29 11:34:38 +0200134 struct gprefix *gprefix;
Avi Kivity120df892010-07-29 15:11:39 +0300135 } u;
Joerg Roedeld09beab2011-04-04 12:39:25 +0200136 int (*check_perm)(struct x86_emulate_ctxt *ctxt);
Avi Kivity120df892010-07-29 15:11:39 +0300137};
138
139struct group_dual {
140 struct opcode mod012[8];
141 struct opcode mod3[8];
Avi Kivityd65b1de2010-07-29 15:11:35 +0300142};
143
Avi Kivity0d7cdee2011-03-29 11:34:38 +0200144struct gprefix {
145 struct opcode pfx_no;
146 struct opcode pfx_66;
147 struct opcode pfx_f2;
148 struct opcode pfx_f3;
149};
150
Avi Kivity6aa8b732006-12-10 02:21:36 -0800151/* EFLAGS bit definitions. */
Gleb Natapovd4c6a152010-02-10 14:21:34 +0200152#define EFLG_ID (1<<21)
153#define EFLG_VIP (1<<20)
154#define EFLG_VIF (1<<19)
155#define EFLG_AC (1<<18)
Andre Przywarab1d86142009-06-17 15:50:32 +0200156#define EFLG_VM (1<<17)
157#define EFLG_RF (1<<16)
Gleb Natapovd4c6a152010-02-10 14:21:34 +0200158#define EFLG_IOPL (3<<12)
159#define EFLG_NT (1<<14)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800160#define EFLG_OF (1<<11)
161#define EFLG_DF (1<<10)
Andre Przywarab1d86142009-06-17 15:50:32 +0200162#define EFLG_IF (1<<9)
Gleb Natapovd4c6a152010-02-10 14:21:34 +0200163#define EFLG_TF (1<<8)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800164#define EFLG_SF (1<<7)
165#define EFLG_ZF (1<<6)
166#define EFLG_AF (1<<4)
167#define EFLG_PF (1<<2)
168#define EFLG_CF (1<<0)
169
Mohammed Gamal62bd4302010-07-28 12:38:40 +0300170#define EFLG_RESERVED_ZEROS_MASK 0xffc0802a
171#define EFLG_RESERVED_ONE_MASK 2
172
Avi Kivity6aa8b732006-12-10 02:21:36 -0800173/*
174 * Instruction emulation:
175 * Most instructions are emulated directly via a fragment of inline assembly
176 * code. This allows us to save/restore EFLAGS and thus very easily pick up
177 * any modified flags.
178 */
179
Avi Kivity05b3e0c2006-12-13 00:33:45 -0800180#if defined(CONFIG_X86_64)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800181#define _LO32 "k" /* force 32-bit operand */
182#define _STK "%%rsp" /* stack pointer */
183#elif defined(__i386__)
184#define _LO32 "" /* force 32-bit operand */
185#define _STK "%%esp" /* stack pointer */
186#endif
187
188/*
189 * These EFLAGS bits are restored from saved value during emulation, and
190 * any changes are written back to the saved value after emulation.
191 */
192#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
193
194/* Before executing instruction: restore necessary bits in EFLAGS. */
Avi Kivitye934c9c2007-12-06 16:15:02 +0200195#define _PRE_EFLAGS(_sav, _msk, _tmp) \
196 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
197 "movl %"_sav",%"_LO32 _tmp"; " \
198 "push %"_tmp"; " \
199 "push %"_tmp"; " \
200 "movl %"_msk",%"_LO32 _tmp"; " \
201 "andl %"_LO32 _tmp",("_STK"); " \
202 "pushf; " \
203 "notl %"_LO32 _tmp"; " \
204 "andl %"_LO32 _tmp",("_STK"); " \
205 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
206 "pop %"_tmp"; " \
207 "orl %"_LO32 _tmp",("_STK"); " \
208 "popf; " \
209 "pop %"_sav"; "
Avi Kivity6aa8b732006-12-10 02:21:36 -0800210
211/* After executing instruction: write-back necessary bits in EFLAGS. */
212#define _POST_EFLAGS(_sav, _msk, _tmp) \
213 /* _sav |= EFLAGS & _msk; */ \
214 "pushf; " \
215 "pop %"_tmp"; " \
216 "andl %"_msk",%"_LO32 _tmp"; " \
217 "orl %"_LO32 _tmp",%"_sav"; "
218
Avi Kivitydda96d82008-11-26 15:14:10 +0200219#ifdef CONFIG_X86_64
220#define ON64(x) x
221#else
222#define ON64(x)
223#endif
224
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300225#define ____emulate_2op(ctxt, _op, _x, _y, _suffix, _dsttype) \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200226 do { \
227 __asm__ __volatile__ ( \
228 _PRE_EFLAGS("0", "4", "2") \
229 _op _suffix " %"_x"3,%1; " \
230 _POST_EFLAGS("0", "4", "2") \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300231 : "=m" ((ctxt)->eflags), \
232 "+q" (*(_dsttype*)&(ctxt)->dst.val), \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200233 "=&r" (_tmp) \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300234 : _y ((ctxt)->src.val), "i" (EFLAGS_MASK)); \
Avi Kivityf3fd92f2008-11-29 20:38:12 +0200235 } while (0)
Avi Kivity6b7ad612008-11-26 15:30:45 +0200236
237
Avi Kivity6aa8b732006-12-10 02:21:36 -0800238/* Raw emulation: instruction has two explicit operands. */
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300239#define __emulate_2op_nobyte(ctxt,_op,_wx,_wy,_lx,_ly,_qx,_qy) \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200240 do { \
241 unsigned long _tmp; \
242 \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300243 switch ((ctxt)->dst.bytes) { \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200244 case 2: \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300245 ____emulate_2op(ctxt,_op,_wx,_wy,"w",u16); \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200246 break; \
247 case 4: \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300248 ____emulate_2op(ctxt,_op,_lx,_ly,"l",u32); \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200249 break; \
250 case 8: \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300251 ON64(____emulate_2op(ctxt,_op,_qx,_qy,"q",u64)); \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200252 break; \
253 } \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800254 } while (0)
255
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300256#define __emulate_2op(ctxt,_op,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800257 do { \
Avi Kivity6b7ad612008-11-26 15:30:45 +0200258 unsigned long _tmp; \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300259 switch ((ctxt)->dst.bytes) { \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800260 case 1: \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300261 ____emulate_2op(ctxt,_op,_bx,_by,"b",u8); \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800262 break; \
263 default: \
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300264 __emulate_2op_nobyte(ctxt, _op, \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800265 _wx, _wy, _lx, _ly, _qx, _qy); \
266 break; \
267 } \
268 } while (0)
269
270/* Source operand is byte-sized and may be restricted to just %cl. */
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300271#define emulate_2op_SrcB(ctxt, _op) \
272 __emulate_2op(ctxt, _op, "b", "c", "b", "c", "b", "c", "b", "c")
Avi Kivity6aa8b732006-12-10 02:21:36 -0800273
274/* Source operand is byte, word, long or quad sized. */
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300275#define emulate_2op_SrcV(ctxt, _op) \
276 __emulate_2op(ctxt, _op, "b", "q", "w", "r", _LO32, "r", "", "r")
Avi Kivity6aa8b732006-12-10 02:21:36 -0800277
278/* Source operand is word, long or quad sized. */
Avi Kivitya31b9ce2011-09-07 16:41:35 +0300279#define emulate_2op_SrcV_nobyte(ctxt, _op) \
280 __emulate_2op_nobyte(ctxt, _op, "w", "r", _LO32, "r", "", "r")
Avi Kivity6aa8b732006-12-10 02:21:36 -0800281
Guillaume Thouvenind1752262008-12-04 14:29:00 +0100282/* Instruction has three operands and one operand is stored in ECX register */
Avi Kivity29053a62011-09-07 16:41:37 +0300283#define __emulate_2op_cl(ctxt, _op, _suffix, _type) \
Avi Kivity72952612011-04-20 13:12:27 +0300284 do { \
285 unsigned long _tmp; \
Avi Kivity761441b2011-09-07 16:41:36 +0300286 _type _clv = (ctxt)->src2.val; \
287 _type _srcv = (ctxt)->src.val; \
288 _type _dstv = (ctxt)->dst.val; \
Avi Kivity72952612011-04-20 13:12:27 +0300289 \
290 __asm__ __volatile__ ( \
291 _PRE_EFLAGS("0", "5", "2") \
292 _op _suffix " %4,%1 \n" \
293 _POST_EFLAGS("0", "5", "2") \
Avi Kivity761441b2011-09-07 16:41:36 +0300294 : "=m" ((ctxt)->eflags), "+r" (_dstv), "=&r" (_tmp) \
Avi Kivity72952612011-04-20 13:12:27 +0300295 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
296 ); \
297 \
Avi Kivity761441b2011-09-07 16:41:36 +0300298 (ctxt)->src2.val = (unsigned long) _clv; \
299 (ctxt)->src2.val = (unsigned long) _srcv; \
300 (ctxt)->dst.val = (unsigned long) _dstv; \
Guillaume Thouvenind1752262008-12-04 14:29:00 +0100301 } while (0)
302
Avi Kivity761441b2011-09-07 16:41:36 +0300303#define emulate_2op_cl(ctxt, _op) \
Avi Kivity72952612011-04-20 13:12:27 +0300304 do { \
Avi Kivity761441b2011-09-07 16:41:36 +0300305 switch ((ctxt)->dst.bytes) { \
Avi Kivity72952612011-04-20 13:12:27 +0300306 case 2: \
Avi Kivity29053a62011-09-07 16:41:37 +0300307 __emulate_2op_cl(ctxt, _op, "w", u16); \
Avi Kivity72952612011-04-20 13:12:27 +0300308 break; \
309 case 4: \
Avi Kivity29053a62011-09-07 16:41:37 +0300310 __emulate_2op_cl(ctxt, _op, "l", u32); \
Avi Kivity72952612011-04-20 13:12:27 +0300311 break; \
312 case 8: \
Avi Kivity29053a62011-09-07 16:41:37 +0300313 ON64(__emulate_2op_cl(ctxt, _op, "q", ulong)); \
Avi Kivity72952612011-04-20 13:12:27 +0300314 break; \
315 } \
Guillaume Thouvenind1752262008-12-04 14:29:00 +0100316 } while (0)
317
Avi Kivityd1eef452011-09-07 16:41:38 +0300318#define __emulate_1op(ctxt, _op, _suffix) \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800319 do { \
320 unsigned long _tmp; \
321 \
Avi Kivitydda96d82008-11-26 15:14:10 +0200322 __asm__ __volatile__ ( \
323 _PRE_EFLAGS("0", "3", "2") \
324 _op _suffix " %1; " \
325 _POST_EFLAGS("0", "3", "2") \
Avi Kivityd1eef452011-09-07 16:41:38 +0300326 : "=m" ((ctxt)->eflags), "+m" ((ctxt)->dst.val), \
Avi Kivitydda96d82008-11-26 15:14:10 +0200327 "=&r" (_tmp) \
328 : "i" (EFLAGS_MASK)); \
329 } while (0)
330
331/* Instruction has only one explicit operand (no source operand). */
Avi Kivityd1eef452011-09-07 16:41:38 +0300332#define emulate_1op(ctxt, _op) \
Avi Kivitydda96d82008-11-26 15:14:10 +0200333 do { \
Avi Kivityd1eef452011-09-07 16:41:38 +0300334 switch ((ctxt)->dst.bytes) { \
335 case 1: __emulate_1op(ctxt, _op, "b"); break; \
336 case 2: __emulate_1op(ctxt, _op, "w"); break; \
337 case 4: __emulate_1op(ctxt, _op, "l"); break; \
338 case 8: ON64(__emulate_1op(ctxt, _op, "q")); break; \
Avi Kivity6aa8b732006-12-10 02:21:36 -0800339 } \
340 } while (0)
341
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300342#define __emulate_1op_rax_rdx(ctxt, _op, _suffix, _ex) \
Avi Kivityf6b35972010-08-26 11:59:00 +0300343 do { \
344 unsigned long _tmp; \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300345 ulong *rax = &(ctxt)->regs[VCPU_REGS_RAX]; \
346 ulong *rdx = &(ctxt)->regs[VCPU_REGS_RDX]; \
Avi Kivityf6b35972010-08-26 11:59:00 +0300347 \
348 __asm__ __volatile__ ( \
349 _PRE_EFLAGS("0", "5", "1") \
350 "1: \n\t" \
351 _op _suffix " %6; " \
352 "2: \n\t" \
353 _POST_EFLAGS("0", "5", "1") \
354 ".pushsection .fixup,\"ax\" \n\t" \
355 "3: movb $1, %4 \n\t" \
356 "jmp 2b \n\t" \
357 ".popsection \n\t" \
358 _ASM_EXTABLE(1b, 3b) \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300359 : "=m" ((ctxt)->eflags), "=&r" (_tmp), \
360 "+a" (*rax), "+d" (*rdx), "+qm"(_ex) \
361 : "i" (EFLAGS_MASK), "m" ((ctxt)->src.val), \
362 "a" (*rax), "d" (*rdx)); \
Avi Kivityf6b35972010-08-26 11:59:00 +0300363 } while (0)
364
Mohammed Gamal3f9f53b2010-08-08 21:11:37 +0300365/* instruction has only one source operand, destination is implicit (e.g. mul, div, imul, idiv) */
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300366#define emulate_1op_rax_rdx(ctxt, _op, _ex) \
Avi Kivity72952612011-04-20 13:12:27 +0300367 do { \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300368 switch((ctxt)->src.bytes) { \
Avi Kivity72952612011-04-20 13:12:27 +0300369 case 1: \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300370 __emulate_1op_rax_rdx(ctxt, _op, "b", _ex); \
Avi Kivity72952612011-04-20 13:12:27 +0300371 break; \
372 case 2: \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300373 __emulate_1op_rax_rdx(ctxt, _op, "w", _ex); \
Avi Kivity72952612011-04-20 13:12:27 +0300374 break; \
375 case 4: \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300376 __emulate_1op_rax_rdx(ctxt, _op, "l", _ex); \
Avi Kivityf6b35972010-08-26 11:59:00 +0300377 break; \
378 case 8: ON64( \
Avi Kivitye8f2b1d2011-09-07 16:41:40 +0300379 __emulate_1op_rax_rdx(ctxt, _op, "q", _ex)); \
Avi Kivityf6b35972010-08-26 11:59:00 +0300380 break; \
381 } \
382 } while (0)
383
Joerg Roedel8a76d7f2011-04-04 12:39:27 +0200384static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt,
385 enum x86_intercept intercept,
386 enum x86_intercept_stage stage)
387{
388 struct x86_instruction_info info = {
389 .intercept = intercept,
Avi Kivity9dac77f2011-06-01 15:34:25 +0300390 .rep_prefix = ctxt->rep_prefix,
391 .modrm_mod = ctxt->modrm_mod,
392 .modrm_reg = ctxt->modrm_reg,
393 .modrm_rm = ctxt->modrm_rm,
394 .src_val = ctxt->src.val64,
395 .src_bytes = ctxt->src.bytes,
396 .dst_bytes = ctxt->dst.bytes,
397 .ad_bytes = ctxt->ad_bytes,
Joerg Roedel8a76d7f2011-04-04 12:39:27 +0200398 .next_rip = ctxt->eip,
399 };
400
Avi Kivity29535382011-04-20 13:37:53 +0300401 return ctxt->ops->intercept(ctxt, &info, stage);
Joerg Roedel8a76d7f2011-04-04 12:39:27 +0200402}
403
Avi Kivity9dac77f2011-06-01 15:34:25 +0300404static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt)
Harvey Harrisonddcb2882008-02-18 11:12:48 -0800405{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300406 return (1UL << (ctxt->ad_bytes << 3)) - 1;
Harvey Harrisonddcb2882008-02-18 11:12:48 -0800407}
408
Avi Kivity6aa8b732006-12-10 02:21:36 -0800409/* Access/update address held in a register, based on addressing mode. */
Harvey Harrisone4706772008-02-19 07:40:38 -0800410static inline unsigned long
Avi Kivity9dac77f2011-06-01 15:34:25 +0300411address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg)
Harvey Harrisone4706772008-02-19 07:40:38 -0800412{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300413 if (ctxt->ad_bytes == sizeof(unsigned long))
Harvey Harrisone4706772008-02-19 07:40:38 -0800414 return reg;
415 else
Avi Kivity9dac77f2011-06-01 15:34:25 +0300416 return reg & ad_mask(ctxt);
Harvey Harrisone4706772008-02-19 07:40:38 -0800417}
418
419static inline unsigned long
Avi Kivity9dac77f2011-06-01 15:34:25 +0300420register_address(struct x86_emulate_ctxt *ctxt, unsigned long reg)
Harvey Harrisone4706772008-02-19 07:40:38 -0800421{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300422 return address_mask(ctxt, reg);
Harvey Harrisone4706772008-02-19 07:40:38 -0800423}
424
Harvey Harrison7a9572752008-02-19 07:40:41 -0800425static inline void
Avi Kivity9dac77f2011-06-01 15:34:25 +0300426register_address_increment(struct x86_emulate_ctxt *ctxt, unsigned long *reg, int inc)
Harvey Harrison7a9572752008-02-19 07:40:41 -0800427{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300428 if (ctxt->ad_bytes == sizeof(unsigned long))
Harvey Harrison7a9572752008-02-19 07:40:41 -0800429 *reg += inc;
430 else
Avi Kivity9dac77f2011-06-01 15:34:25 +0300431 *reg = (*reg & ~ad_mask(ctxt)) | ((*reg + inc) & ad_mask(ctxt));
Harvey Harrison7a9572752008-02-19 07:40:41 -0800432}
Avi Kivity6aa8b732006-12-10 02:21:36 -0800433
Avi Kivity9dac77f2011-06-01 15:34:25 +0300434static inline void jmp_rel(struct x86_emulate_ctxt *ctxt, int rel)
Harvey Harrison7a9572752008-02-19 07:40:41 -0800435{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300436 register_address_increment(ctxt, &ctxt->_eip, rel);
Harvey Harrison7a9572752008-02-19 07:40:41 -0800437}
Nitin A Kamble098c9372007-08-19 11:00:36 +0300438
Avi Kivity56697682011-04-03 14:08:51 +0300439static u32 desc_limit_scaled(struct desc_struct *desc)
440{
441 u32 limit = get_desc_limit(desc);
442
443 return desc->g ? (limit << 12) | 0xfff : limit;
444}
445
Avi Kivity9dac77f2011-06-01 15:34:25 +0300446static void set_seg_override(struct x86_emulate_ctxt *ctxt, int seg)
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300447{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300448 ctxt->has_seg_override = true;
449 ctxt->seg_override = seg;
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300450}
451
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +0900452static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300453{
454 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
455 return 0;
456
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +0900457 return ctxt->ops->get_cached_segment_base(ctxt, seg);
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300458}
459
Avi Kivity9dac77f2011-06-01 15:34:25 +0300460static unsigned seg_override(struct x86_emulate_ctxt *ctxt)
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300461{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300462 if (!ctxt->has_seg_override)
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300463 return 0;
464
Avi Kivity9dac77f2011-06-01 15:34:25 +0300465 return ctxt->seg_override;
Avi Kivity7a5b56d2008-06-22 16:22:51 +0300466}
467
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200468static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec,
469 u32 error, bool valid)
Gleb Natapov54b84862010-04-28 19:15:44 +0300470{
Avi Kivityda9cb572010-11-22 17:53:21 +0200471 ctxt->exception.vector = vec;
472 ctxt->exception.error_code = error;
473 ctxt->exception.error_code_valid = valid;
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200474 return X86EMUL_PROPAGATE_FAULT;
Gleb Natapov54b84862010-04-28 19:15:44 +0300475}
476
Joerg Roedel3b88e412011-04-04 12:39:29 +0200477static int emulate_db(struct x86_emulate_ctxt *ctxt)
478{
479 return emulate_exception(ctxt, DB_VECTOR, 0, false);
480}
481
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200482static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err)
Gleb Natapov54b84862010-04-28 19:15:44 +0300483{
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200484 return emulate_exception(ctxt, GP_VECTOR, err, true);
Gleb Natapov54b84862010-04-28 19:15:44 +0300485}
486
Avi Kivity618ff152011-04-03 12:32:09 +0300487static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err)
488{
489 return emulate_exception(ctxt, SS_VECTOR, err, true);
490}
491
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200492static int emulate_ud(struct x86_emulate_ctxt *ctxt)
Gleb Natapov54b84862010-04-28 19:15:44 +0300493{
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200494 return emulate_exception(ctxt, UD_VECTOR, 0, false);
Gleb Natapov54b84862010-04-28 19:15:44 +0300495}
496
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200497static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err)
Gleb Natapov54b84862010-04-28 19:15:44 +0300498{
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200499 return emulate_exception(ctxt, TS_VECTOR, err, true);
Gleb Natapov54b84862010-04-28 19:15:44 +0300500}
501
Avi Kivity34d1f492010-08-26 11:59:01 +0300502static int emulate_de(struct x86_emulate_ctxt *ctxt)
503{
Avi Kivity35d3d4a2010-11-22 17:53:25 +0200504 return emulate_exception(ctxt, DE_VECTOR, 0, false);
Avi Kivity34d1f492010-08-26 11:59:01 +0300505}
506
Avi Kivity12537912011-03-29 11:41:27 +0200507static int emulate_nm(struct x86_emulate_ctxt *ctxt)
508{
509 return emulate_exception(ctxt, NM_VECTOR, 0, false);
510}
511
Avi Kivity1aa36612011-04-27 13:20:30 +0300512static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg)
513{
514 u16 selector;
515 struct desc_struct desc;
516
517 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
518 return selector;
519}
520
521static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector,
522 unsigned seg)
523{
524 u16 dummy;
525 u32 base3;
526 struct desc_struct desc;
527
528 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
529 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
530}
531
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400532static int __linearize(struct x86_emulate_ctxt *ctxt,
Avi Kivity52fd8b42011-04-03 12:33:12 +0300533 struct segmented_address addr,
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400534 unsigned size, bool write, bool fetch,
Avi Kivity52fd8b42011-04-03 12:33:12 +0300535 ulong *linear)
536{
Avi Kivity618ff152011-04-03 12:32:09 +0300537 struct desc_struct desc;
538 bool usable;
Avi Kivity52fd8b42011-04-03 12:33:12 +0300539 ulong la;
Avi Kivity618ff152011-04-03 12:32:09 +0300540 u32 lim;
Avi Kivity1aa36612011-04-27 13:20:30 +0300541 u16 sel;
Avi Kivity618ff152011-04-03 12:32:09 +0300542 unsigned cpl, rpl;
Avi Kivity52fd8b42011-04-03 12:33:12 +0300543
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +0900544 la = seg_base(ctxt, addr.seg) + addr.ea;
Avi Kivity618ff152011-04-03 12:32:09 +0300545 switch (ctxt->mode) {
546 case X86EMUL_MODE_REAL:
547 break;
548 case X86EMUL_MODE_PROT64:
549 if (((signed long)la << 16) >> 16 != la)
550 return emulate_gp(ctxt, 0);
551 break;
552 default:
Avi Kivity1aa36612011-04-27 13:20:30 +0300553 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
554 addr.seg);
Avi Kivity618ff152011-04-03 12:32:09 +0300555 if (!usable)
556 goto bad;
557 /* code segment or read-only data segment */
558 if (((desc.type & 8) || !(desc.type & 2)) && write)
559 goto bad;
560 /* unreadable code segment */
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400561 if (!fetch && (desc.type & 8) && !(desc.type & 2))
Avi Kivity618ff152011-04-03 12:32:09 +0300562 goto bad;
563 lim = desc_limit_scaled(&desc);
564 if ((desc.type & 8) || !(desc.type & 4)) {
565 /* expand-up segment */
566 if (addr.ea > lim || (u32)(addr.ea + size - 1) > lim)
567 goto bad;
568 } else {
569 /* exapand-down segment */
570 if (addr.ea <= lim || (u32)(addr.ea + size - 1) <= lim)
571 goto bad;
572 lim = desc.d ? 0xffffffff : 0xffff;
573 if (addr.ea > lim || (u32)(addr.ea + size - 1) > lim)
574 goto bad;
575 }
Avi Kivity717746e2011-04-20 13:37:53 +0300576 cpl = ctxt->ops->cpl(ctxt);
Avi Kivity1aa36612011-04-27 13:20:30 +0300577 rpl = sel & 3;
Avi Kivity618ff152011-04-03 12:32:09 +0300578 cpl = max(cpl, rpl);
579 if (!(desc.type & 8)) {
580 /* data segment */
581 if (cpl > desc.dpl)
582 goto bad;
583 } else if ((desc.type & 8) && !(desc.type & 4)) {
584 /* nonconforming code segment */
585 if (cpl != desc.dpl)
586 goto bad;
587 } else if ((desc.type & 8) && (desc.type & 4)) {
588 /* conforming code segment */
589 if (cpl < desc.dpl)
590 goto bad;
591 }
592 break;
593 }
Avi Kivity9dac77f2011-06-01 15:34:25 +0300594 if (fetch ? ctxt->mode != X86EMUL_MODE_PROT64 : ctxt->ad_bytes != 8)
Avi Kivity52fd8b42011-04-03 12:33:12 +0300595 la &= (u32)-1;
596 *linear = la;
597 return X86EMUL_CONTINUE;
Avi Kivity618ff152011-04-03 12:32:09 +0300598bad:
599 if (addr.seg == VCPU_SREG_SS)
600 return emulate_ss(ctxt, addr.seg);
601 else
602 return emulate_gp(ctxt, addr.seg);
Avi Kivity52fd8b42011-04-03 12:33:12 +0300603}
604
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400605static int linearize(struct x86_emulate_ctxt *ctxt,
606 struct segmented_address addr,
607 unsigned size, bool write,
608 ulong *linear)
609{
610 return __linearize(ctxt, addr, size, write, false, linear);
611}
612
613
Avi Kivity3ca3ac42011-03-31 16:52:26 +0200614static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
615 struct segmented_address addr,
616 void *data,
617 unsigned size)
618{
Avi Kivity9fa088f2011-03-31 18:54:30 +0200619 int rc;
620 ulong linear;
621
Avi Kivity83b87952011-04-03 11:31:19 +0300622 rc = linearize(ctxt, addr, size, false, &linear);
Avi Kivity9fa088f2011-03-31 18:54:30 +0200623 if (rc != X86EMUL_CONTINUE)
624 return rc;
Avi Kivity0f65dd72011-04-20 13:37:53 +0300625 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception);
Avi Kivity3ca3ac42011-03-31 16:52:26 +0200626}
627
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900628/*
629 * Fetch the next byte of the instruction being emulated which is pointed to
630 * by ctxt->_eip, then increment ctxt->_eip.
631 *
632 * Also prefetch the remaining bytes of the instruction without crossing page
633 * boundary if they are not in fetch_cache yet.
634 */
635static int do_insn_fetch_byte(struct x86_emulate_ctxt *ctxt, u8 *dest)
Avi Kivity62266862007-11-20 13:15:52 +0200636{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300637 struct fetch_cache *fc = &ctxt->fetch;
Avi Kivity62266862007-11-20 13:15:52 +0200638 int rc;
Avi Kivity2fb53ad2010-04-11 13:05:15 +0300639 int size, cur_size;
Avi Kivity62266862007-11-20 13:15:52 +0200640
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900641 if (ctxt->_eip == fc->end) {
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400642 unsigned long linear;
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900643 struct segmented_address addr = { .seg = VCPU_SREG_CS,
644 .ea = ctxt->_eip };
Avi Kivity2fb53ad2010-04-11 13:05:15 +0300645 cur_size = fc->end - fc->start;
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900646 size = min(15UL - cur_size,
647 PAGE_SIZE - offset_in_page(ctxt->_eip));
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400648 rc = __linearize(ctxt, addr, size, false, true, &linear);
Takuya Yoshikawa7d88bb42011-07-30 18:02:29 +0900649 if (unlikely(rc != X86EMUL_CONTINUE))
Nelson Elhage3d9b9382011-04-18 12:05:53 -0400650 return rc;
Takuya Yoshikawaef5d75c2011-05-15 00:57:43 +0900651 rc = ctxt->ops->fetch(ctxt, linear, fc->data + cur_size,
652 size, &ctxt->exception);
Takuya Yoshikawa7d88bb42011-07-30 18:02:29 +0900653 if (unlikely(rc != X86EMUL_CONTINUE))
Avi Kivity62266862007-11-20 13:15:52 +0200654 return rc;
Avi Kivity2fb53ad2010-04-11 13:05:15 +0300655 fc->end += size;
Avi Kivity62266862007-11-20 13:15:52 +0200656 }
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900657 *dest = fc->data[ctxt->_eip - fc->start];
658 ctxt->_eip++;
Takuya Yoshikawa3e2815e2010-02-12 15:53:59 +0900659 return X86EMUL_CONTINUE;
Avi Kivity62266862007-11-20 13:15:52 +0200660}
661
662static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900663 void *dest, unsigned size)
Avi Kivity62266862007-11-20 13:15:52 +0200664{
Takuya Yoshikawa3e2815e2010-02-12 15:53:59 +0900665 int rc;
Avi Kivity62266862007-11-20 13:15:52 +0200666
Avi Kivityeb3c79e2009-11-24 15:20:15 +0200667 /* x86 instructions are limited to 15 bytes. */
Takuya Yoshikawa7d88bb42011-07-30 18:02:29 +0900668 if (unlikely(ctxt->_eip + size - ctxt->eip > 15))
Avi Kivityeb3c79e2009-11-24 15:20:15 +0200669 return X86EMUL_UNHANDLEABLE;
Avi Kivity62266862007-11-20 13:15:52 +0200670 while (size--) {
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900671 rc = do_insn_fetch_byte(ctxt, dest++);
Takuya Yoshikawa3e2815e2010-02-12 15:53:59 +0900672 if (rc != X86EMUL_CONTINUE)
Avi Kivity62266862007-11-20 13:15:52 +0200673 return rc;
674 }
Takuya Yoshikawa3e2815e2010-02-12 15:53:59 +0900675 return X86EMUL_CONTINUE;
Avi Kivity62266862007-11-20 13:15:52 +0200676}
677
Takuya Yoshikawa67cbc902011-05-15 00:54:58 +0900678/* Fetch next part of the instruction being emulated. */
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900679#define insn_fetch(_type, _ctxt) \
Takuya Yoshikawa67cbc902011-05-15 00:54:58 +0900680({ unsigned long _x; \
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900681 rc = do_insn_fetch(_ctxt, &_x, sizeof(_type)); \
Takuya Yoshikawa67cbc902011-05-15 00:54:58 +0900682 if (rc != X86EMUL_CONTINUE) \
683 goto done; \
Takuya Yoshikawa67cbc902011-05-15 00:54:58 +0900684 (_type)_x; \
685})
686
Takuya Yoshikawa807941b2011-07-30 18:00:17 +0900687#define insn_fetch_arr(_arr, _size, _ctxt) \
688({ rc = do_insn_fetch(_ctxt, _arr, (_size)); \
Takuya Yoshikawa67cbc902011-05-15 00:54:58 +0900689 if (rc != X86EMUL_CONTINUE) \
690 goto done; \
Takuya Yoshikawa67cbc902011-05-15 00:54:58 +0900691})
692
Rusty Russell1e3c5cb2007-07-17 23:16:11 +1000693/*
694 * Given the 'reg' portion of a ModRM byte, and a register block, return a
695 * pointer into the block that addresses the relevant register.
696 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
697 */
698static void *decode_register(u8 modrm_reg, unsigned long *regs,
699 int highbyte_regs)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800700{
701 void *p;
702
703 p = &regs[modrm_reg];
704 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
705 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
706 return p;
707}
708
709static int read_descriptor(struct x86_emulate_ctxt *ctxt,
Avi Kivity90de84f2010-11-17 15:28:21 +0200710 struct segmented_address addr,
Avi Kivity6aa8b732006-12-10 02:21:36 -0800711 u16 *size, unsigned long *address, int op_bytes)
712{
713 int rc;
714
715 if (op_bytes == 2)
716 op_bytes = 3;
717 *address = 0;
Avi Kivity3ca3ac42011-03-31 16:52:26 +0200718 rc = segmented_read_std(ctxt, addr, size, 2);
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +0900719 if (rc != X86EMUL_CONTINUE)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800720 return rc;
Avi Kivity30b31ab2010-11-17 15:28:22 +0200721 addr.ea += 2;
Avi Kivity3ca3ac42011-03-31 16:52:26 +0200722 rc = segmented_read_std(ctxt, addr, address, op_bytes);
Avi Kivity6aa8b732006-12-10 02:21:36 -0800723 return rc;
724}
725
Nitin A Kamblebbe9abb2007-09-15 10:23:07 +0300726static int test_cc(unsigned int condition, unsigned int flags)
727{
728 int rc = 0;
729
730 switch ((condition & 15) >> 1) {
731 case 0: /* o */
732 rc |= (flags & EFLG_OF);
733 break;
734 case 1: /* b/c/nae */
735 rc |= (flags & EFLG_CF);
736 break;
737 case 2: /* z/e */
738 rc |= (flags & EFLG_ZF);
739 break;
740 case 3: /* be/na */
741 rc |= (flags & (EFLG_CF|EFLG_ZF));
742 break;
743 case 4: /* s */
744 rc |= (flags & EFLG_SF);
745 break;
746 case 5: /* p/pe */
747 rc |= (flags & EFLG_PF);
748 break;
749 case 7: /* le/ng */
750 rc |= (flags & EFLG_ZF);
751 /* fall through */
752 case 6: /* l/nge */
753 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
754 break;
755 }
756
757 /* Odd condition identifiers (lsb == 1) have inverted sense. */
758 return (!!rc ^ (condition & 1));
759}
760
Avi Kivity91ff3cb2010-08-01 12:53:09 +0300761static void fetch_register_operand(struct operand *op)
762{
763 switch (op->bytes) {
764 case 1:
765 op->val = *(u8 *)op->addr.reg;
766 break;
767 case 2:
768 op->val = *(u16 *)op->addr.reg;
769 break;
770 case 4:
771 op->val = *(u32 *)op->addr.reg;
772 break;
773 case 8:
774 op->val = *(u64 *)op->addr.reg;
775 break;
776 }
777}
778
Avi Kivity12537912011-03-29 11:41:27 +0200779static void read_sse_reg(struct x86_emulate_ctxt *ctxt, sse128_t *data, int reg)
780{
781 ctxt->ops->get_fpu(ctxt);
782 switch (reg) {
783 case 0: asm("movdqu %%xmm0, %0" : "=m"(*data)); break;
784 case 1: asm("movdqu %%xmm1, %0" : "=m"(*data)); break;
785 case 2: asm("movdqu %%xmm2, %0" : "=m"(*data)); break;
786 case 3: asm("movdqu %%xmm3, %0" : "=m"(*data)); break;
787 case 4: asm("movdqu %%xmm4, %0" : "=m"(*data)); break;
788 case 5: asm("movdqu %%xmm5, %0" : "=m"(*data)); break;
789 case 6: asm("movdqu %%xmm6, %0" : "=m"(*data)); break;
790 case 7: asm("movdqu %%xmm7, %0" : "=m"(*data)); break;
791#ifdef CONFIG_X86_64
792 case 8: asm("movdqu %%xmm8, %0" : "=m"(*data)); break;
793 case 9: asm("movdqu %%xmm9, %0" : "=m"(*data)); break;
794 case 10: asm("movdqu %%xmm10, %0" : "=m"(*data)); break;
795 case 11: asm("movdqu %%xmm11, %0" : "=m"(*data)); break;
796 case 12: asm("movdqu %%xmm12, %0" : "=m"(*data)); break;
797 case 13: asm("movdqu %%xmm13, %0" : "=m"(*data)); break;
798 case 14: asm("movdqu %%xmm14, %0" : "=m"(*data)); break;
799 case 15: asm("movdqu %%xmm15, %0" : "=m"(*data)); break;
800#endif
801 default: BUG();
802 }
803 ctxt->ops->put_fpu(ctxt);
804}
805
806static void write_sse_reg(struct x86_emulate_ctxt *ctxt, sse128_t *data,
807 int reg)
808{
809 ctxt->ops->get_fpu(ctxt);
810 switch (reg) {
811 case 0: asm("movdqu %0, %%xmm0" : : "m"(*data)); break;
812 case 1: asm("movdqu %0, %%xmm1" : : "m"(*data)); break;
813 case 2: asm("movdqu %0, %%xmm2" : : "m"(*data)); break;
814 case 3: asm("movdqu %0, %%xmm3" : : "m"(*data)); break;
815 case 4: asm("movdqu %0, %%xmm4" : : "m"(*data)); break;
816 case 5: asm("movdqu %0, %%xmm5" : : "m"(*data)); break;
817 case 6: asm("movdqu %0, %%xmm6" : : "m"(*data)); break;
818 case 7: asm("movdqu %0, %%xmm7" : : "m"(*data)); break;
819#ifdef CONFIG_X86_64
820 case 8: asm("movdqu %0, %%xmm8" : : "m"(*data)); break;
821 case 9: asm("movdqu %0, %%xmm9" : : "m"(*data)); break;
822 case 10: asm("movdqu %0, %%xmm10" : : "m"(*data)); break;
823 case 11: asm("movdqu %0, %%xmm11" : : "m"(*data)); break;
824 case 12: asm("movdqu %0, %%xmm12" : : "m"(*data)); break;
825 case 13: asm("movdqu %0, %%xmm13" : : "m"(*data)); break;
826 case 14: asm("movdqu %0, %%xmm14" : : "m"(*data)); break;
827 case 15: asm("movdqu %0, %%xmm15" : : "m"(*data)); break;
828#endif
829 default: BUG();
830 }
831 ctxt->ops->put_fpu(ctxt);
832}
833
834static void decode_register_operand(struct x86_emulate_ctxt *ctxt,
835 struct operand *op,
Avi Kivity3c118e22007-10-31 10:27:04 +0200836 int inhibit_bytereg)
837{
Avi Kivity9dac77f2011-06-01 15:34:25 +0300838 unsigned reg = ctxt->modrm_reg;
839 int highbyte_regs = ctxt->rex_prefix == 0;
Avi Kivity33615aa2007-10-31 11:15:56 +0200840
Avi Kivity9dac77f2011-06-01 15:34:25 +0300841 if (!(ctxt->d & ModRM))
842 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3);
Avi Kivity12537912011-03-29 11:41:27 +0200843
Avi Kivity9dac77f2011-06-01 15:34:25 +0300844 if (ctxt->d & Sse) {
Avi Kivity12537912011-03-29 11:41:27 +0200845 op->type = OP_XMM;
846 op->bytes = 16;
847 op->addr.xmm = reg;
848 read_sse_reg(ctxt, &op->vec_val, reg);
849 return;
850 }
851
Avi Kivity3c118e22007-10-31 10:27:04 +0200852 op->type = OP_REG;
Avi Kivity9dac77f2011-06-01 15:34:25 +0300853 if ((ctxt->d & ByteOp) && !inhibit_bytereg) {
854 op->addr.reg = decode_register(reg, ctxt->regs, highbyte_regs);
Avi Kivity3c118e22007-10-31 10:27:04 +0200855 op->bytes = 1;
856 } else {
Avi Kivity9dac77f2011-06-01 15:34:25 +0300857 op->addr.reg = decode_register(reg, ctxt->regs, 0);
858 op->bytes = ctxt->op_bytes;
Avi Kivity3c118e22007-10-31 10:27:04 +0200859 }
Avi Kivity91ff3cb2010-08-01 12:53:09 +0300860 fetch_register_operand(op);
Avi Kivity3c118e22007-10-31 10:27:04 +0200861 op->orig_val = op->val;
862}
863
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200864static int decode_modrm(struct x86_emulate_ctxt *ctxt,
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300865 struct operand *op)
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200866{
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200867 u8 sib;
Avi Kivityf5b4edc2008-06-15 22:09:11 -0700868 int index_reg = 0, base_reg = 0, scale;
Takuya Yoshikawa3e2815e2010-02-12 15:53:59 +0900869 int rc = X86EMUL_CONTINUE;
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300870 ulong modrm_ea = 0;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200871
Avi Kivity9dac77f2011-06-01 15:34:25 +0300872 if (ctxt->rex_prefix) {
873 ctxt->modrm_reg = (ctxt->rex_prefix & 4) << 1; /* REX.R */
874 index_reg = (ctxt->rex_prefix & 2) << 2; /* REX.X */
875 ctxt->modrm_rm = base_reg = (ctxt->rex_prefix & 1) << 3; /* REG.B */
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200876 }
877
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900878 ctxt->modrm = insn_fetch(u8, ctxt);
Avi Kivity9dac77f2011-06-01 15:34:25 +0300879 ctxt->modrm_mod |= (ctxt->modrm & 0xc0) >> 6;
880 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3;
881 ctxt->modrm_rm |= (ctxt->modrm & 0x07);
882 ctxt->modrm_seg = VCPU_SREG_DS;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200883
Avi Kivity9dac77f2011-06-01 15:34:25 +0300884 if (ctxt->modrm_mod == 3) {
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300885 op->type = OP_REG;
Avi Kivity9dac77f2011-06-01 15:34:25 +0300886 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
887 op->addr.reg = decode_register(ctxt->modrm_rm,
888 ctxt->regs, ctxt->d & ByteOp);
889 if (ctxt->d & Sse) {
Avi Kivity12537912011-03-29 11:41:27 +0200890 op->type = OP_XMM;
891 op->bytes = 16;
Avi Kivity9dac77f2011-06-01 15:34:25 +0300892 op->addr.xmm = ctxt->modrm_rm;
893 read_sse_reg(ctxt, &op->vec_val, ctxt->modrm_rm);
Avi Kivity12537912011-03-29 11:41:27 +0200894 return rc;
895 }
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300896 fetch_register_operand(op);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200897 return rc;
898 }
899
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300900 op->type = OP_MEM;
901
Avi Kivity9dac77f2011-06-01 15:34:25 +0300902 if (ctxt->ad_bytes == 2) {
903 unsigned bx = ctxt->regs[VCPU_REGS_RBX];
904 unsigned bp = ctxt->regs[VCPU_REGS_RBP];
905 unsigned si = ctxt->regs[VCPU_REGS_RSI];
906 unsigned di = ctxt->regs[VCPU_REGS_RDI];
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200907
908 /* 16-bit ModR/M decode. */
Avi Kivity9dac77f2011-06-01 15:34:25 +0300909 switch (ctxt->modrm_mod) {
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200910 case 0:
Avi Kivity9dac77f2011-06-01 15:34:25 +0300911 if (ctxt->modrm_rm == 6)
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900912 modrm_ea += insn_fetch(u16, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200913 break;
914 case 1:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900915 modrm_ea += insn_fetch(s8, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200916 break;
917 case 2:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900918 modrm_ea += insn_fetch(u16, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200919 break;
920 }
Avi Kivity9dac77f2011-06-01 15:34:25 +0300921 switch (ctxt->modrm_rm) {
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200922 case 0:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300923 modrm_ea += bx + si;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200924 break;
925 case 1:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300926 modrm_ea += bx + di;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200927 break;
928 case 2:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300929 modrm_ea += bp + si;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200930 break;
931 case 3:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300932 modrm_ea += bp + di;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200933 break;
934 case 4:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300935 modrm_ea += si;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200936 break;
937 case 5:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300938 modrm_ea += di;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200939 break;
940 case 6:
Avi Kivity9dac77f2011-06-01 15:34:25 +0300941 if (ctxt->modrm_mod != 0)
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300942 modrm_ea += bp;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200943 break;
944 case 7:
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300945 modrm_ea += bx;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200946 break;
947 }
Avi Kivity9dac77f2011-06-01 15:34:25 +0300948 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 ||
949 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0))
950 ctxt->modrm_seg = VCPU_SREG_SS;
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300951 modrm_ea = (u16)modrm_ea;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200952 } else {
953 /* 32/64-bit ModR/M decode. */
Avi Kivity9dac77f2011-06-01 15:34:25 +0300954 if ((ctxt->modrm_rm & 7) == 4) {
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900955 sib = insn_fetch(u8, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200956 index_reg |= (sib >> 3) & 7;
957 base_reg |= sib & 7;
958 scale = sib >> 6;
959
Avi Kivity9dac77f2011-06-01 15:34:25 +0300960 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0)
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900961 modrm_ea += insn_fetch(s32, ctxt);
Avi Kivitydc71d0f2008-06-15 21:23:17 -0700962 else
Avi Kivity9dac77f2011-06-01 15:34:25 +0300963 modrm_ea += ctxt->regs[base_reg];
Avi Kivitydc71d0f2008-06-15 21:23:17 -0700964 if (index_reg != 4)
Avi Kivity9dac77f2011-06-01 15:34:25 +0300965 modrm_ea += ctxt->regs[index_reg] << scale;
966 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) {
Avi Kivity84411d82008-06-15 21:53:26 -0700967 if (ctxt->mode == X86EMUL_MODE_PROT64)
Avi Kivity9dac77f2011-06-01 15:34:25 +0300968 ctxt->rip_relative = 1;
Avi Kivity84411d82008-06-15 21:53:26 -0700969 } else
Avi Kivity9dac77f2011-06-01 15:34:25 +0300970 modrm_ea += ctxt->regs[ctxt->modrm_rm];
971 switch (ctxt->modrm_mod) {
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200972 case 0:
Avi Kivity9dac77f2011-06-01 15:34:25 +0300973 if (ctxt->modrm_rm == 5)
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900974 modrm_ea += insn_fetch(s32, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200975 break;
976 case 1:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900977 modrm_ea += insn_fetch(s8, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200978 break;
979 case 2:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900980 modrm_ea += insn_fetch(s32, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200981 break;
982 }
983 }
Avi Kivity90de84f2010-11-17 15:28:21 +0200984 op->addr.mem.ea = modrm_ea;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200985done:
986 return rc;
987}
988
989static int decode_abs(struct x86_emulate_ctxt *ctxt,
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300990 struct operand *op)
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200991{
Takuya Yoshikawa3e2815e2010-02-12 15:53:59 +0900992 int rc = X86EMUL_CONTINUE;
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200993
Avi Kivity2dbd0dd2010-08-01 15:40:19 +0300994 op->type = OP_MEM;
Avi Kivity9dac77f2011-06-01 15:34:25 +0300995 switch (ctxt->ad_bytes) {
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200996 case 2:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +0900997 op->addr.mem.ea = insn_fetch(u16, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +0200998 break;
999 case 4:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09001000 op->addr.mem.ea = insn_fetch(u32, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +02001001 break;
1002 case 8:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09001003 op->addr.mem.ea = insn_fetch(u64, ctxt);
Avi Kivity1c73ef6652007-11-01 06:31:28 +02001004 break;
1005 }
1006done:
1007 return rc;
1008}
1009
Avi Kivity9dac77f2011-06-01 15:34:25 +03001010static void fetch_bit_operand(struct x86_emulate_ctxt *ctxt)
Wei Yongjun35c843c2010-08-09 11:34:56 +08001011{
Sheng Yang7129eec2010-09-28 16:33:32 +08001012 long sv = 0, mask;
Wei Yongjun35c843c2010-08-09 11:34:56 +08001013
Avi Kivity9dac77f2011-06-01 15:34:25 +03001014 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) {
1015 mask = ~(ctxt->dst.bytes * 8 - 1);
Wei Yongjun35c843c2010-08-09 11:34:56 +08001016
Avi Kivity9dac77f2011-06-01 15:34:25 +03001017 if (ctxt->src.bytes == 2)
1018 sv = (s16)ctxt->src.val & (s16)mask;
1019 else if (ctxt->src.bytes == 4)
1020 sv = (s32)ctxt->src.val & (s32)mask;
Wei Yongjun35c843c2010-08-09 11:34:56 +08001021
Avi Kivity9dac77f2011-06-01 15:34:25 +03001022 ctxt->dst.addr.mem.ea += (sv >> 3);
Wei Yongjun35c843c2010-08-09 11:34:56 +08001023 }
Wei Yongjunba7ff2b2010-08-09 11:39:14 +08001024
1025 /* only subword offset */
Avi Kivity9dac77f2011-06-01 15:34:25 +03001026 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1;
Wei Yongjun35c843c2010-08-09 11:34:56 +08001027}
1028
Gleb Natapov9de41572010-04-28 19:15:22 +03001029static int read_emulated(struct x86_emulate_ctxt *ctxt,
Gleb Natapov9de41572010-04-28 19:15:22 +03001030 unsigned long addr, void *dest, unsigned size)
1031{
1032 int rc;
Avi Kivity9dac77f2011-06-01 15:34:25 +03001033 struct read_cache *mc = &ctxt->mem_read;
Gleb Natapov9de41572010-04-28 19:15:22 +03001034
1035 while (size) {
1036 int n = min(size, 8u);
1037 size -= n;
1038 if (mc->pos < mc->end)
1039 goto read_cached;
1040
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001041 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, n,
1042 &ctxt->exception);
Gleb Natapov9de41572010-04-28 19:15:22 +03001043 if (rc != X86EMUL_CONTINUE)
1044 return rc;
1045 mc->end += n;
1046
1047 read_cached:
1048 memcpy(dest, mc->data + mc->pos, n);
1049 mc->pos += n;
1050 dest += n;
1051 addr += n;
1052 }
1053 return X86EMUL_CONTINUE;
1054}
1055
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001056static int segmented_read(struct x86_emulate_ctxt *ctxt,
1057 struct segmented_address addr,
1058 void *data,
1059 unsigned size)
1060{
Avi Kivity9fa088f2011-03-31 18:54:30 +02001061 int rc;
1062 ulong linear;
1063
Avi Kivity83b87952011-04-03 11:31:19 +03001064 rc = linearize(ctxt, addr, size, false, &linear);
Avi Kivity9fa088f2011-03-31 18:54:30 +02001065 if (rc != X86EMUL_CONTINUE)
1066 return rc;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001067 return read_emulated(ctxt, linear, data, size);
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001068}
1069
1070static int segmented_write(struct x86_emulate_ctxt *ctxt,
1071 struct segmented_address addr,
1072 const void *data,
1073 unsigned size)
1074{
Avi Kivity9fa088f2011-03-31 18:54:30 +02001075 int rc;
1076 ulong linear;
1077
Avi Kivity83b87952011-04-03 11:31:19 +03001078 rc = linearize(ctxt, addr, size, true, &linear);
Avi Kivity9fa088f2011-03-31 18:54:30 +02001079 if (rc != X86EMUL_CONTINUE)
1080 return rc;
Avi Kivity0f65dd72011-04-20 13:37:53 +03001081 return ctxt->ops->write_emulated(ctxt, linear, data, size,
1082 &ctxt->exception);
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001083}
1084
1085static int segmented_cmpxchg(struct x86_emulate_ctxt *ctxt,
1086 struct segmented_address addr,
1087 const void *orig_data, const void *data,
1088 unsigned size)
1089{
Avi Kivity9fa088f2011-03-31 18:54:30 +02001090 int rc;
1091 ulong linear;
1092
Avi Kivity83b87952011-04-03 11:31:19 +03001093 rc = linearize(ctxt, addr, size, true, &linear);
Avi Kivity9fa088f2011-03-31 18:54:30 +02001094 if (rc != X86EMUL_CONTINUE)
1095 return rc;
Avi Kivity0f65dd72011-04-20 13:37:53 +03001096 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data,
1097 size, &ctxt->exception);
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001098}
1099
Gleb Natapov7b262e92010-03-18 15:20:27 +02001100static int pio_in_emulated(struct x86_emulate_ctxt *ctxt,
Gleb Natapov7b262e92010-03-18 15:20:27 +02001101 unsigned int size, unsigned short port,
1102 void *dest)
1103{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001104 struct read_cache *rc = &ctxt->io_read;
Gleb Natapov7b262e92010-03-18 15:20:27 +02001105
1106 if (rc->pos == rc->end) { /* refill pio read ahead */
Gleb Natapov7b262e92010-03-18 15:20:27 +02001107 unsigned int in_page, n;
Avi Kivity9dac77f2011-06-01 15:34:25 +03001108 unsigned int count = ctxt->rep_prefix ?
1109 address_mask(ctxt, ctxt->regs[VCPU_REGS_RCX]) : 1;
Gleb Natapov7b262e92010-03-18 15:20:27 +02001110 in_page = (ctxt->eflags & EFLG_DF) ?
Avi Kivity9dac77f2011-06-01 15:34:25 +03001111 offset_in_page(ctxt->regs[VCPU_REGS_RDI]) :
1112 PAGE_SIZE - offset_in_page(ctxt->regs[VCPU_REGS_RDI]);
Gleb Natapov7b262e92010-03-18 15:20:27 +02001113 n = min(min(in_page, (unsigned int)sizeof(rc->data)) / size,
1114 count);
1115 if (n == 0)
1116 n = 1;
1117 rc->pos = rc->end = 0;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001118 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n))
Gleb Natapov7b262e92010-03-18 15:20:27 +02001119 return 0;
1120 rc->end = n * size;
1121 }
1122
1123 memcpy(dest, rc->data + rc->pos, size);
1124 rc->pos += size;
1125 return 1;
1126}
1127
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001128static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001129 u16 selector, struct desc_ptr *dt)
1130{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001131 struct x86_emulate_ops *ops = ctxt->ops;
1132
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001133 if (selector & 1 << 2) {
1134 struct desc_struct desc;
Avi Kivity1aa36612011-04-27 13:20:30 +03001135 u16 sel;
1136
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001137 memset (dt, 0, sizeof *dt);
Avi Kivity1aa36612011-04-27 13:20:30 +03001138 if (!ops->get_segment(ctxt, &sel, &desc, NULL, VCPU_SREG_LDTR))
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001139 return;
1140
1141 dt->size = desc_limit_scaled(&desc); /* what if limit > 65535? */
1142 dt->address = get_desc_base(&desc);
1143 } else
Avi Kivity4bff1e862011-04-20 13:37:53 +03001144 ops->get_gdt(ctxt, dt);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001145}
1146
1147/* allowed just for 8 bytes segments */
1148static int read_segment_descriptor(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001149 u16 selector, struct desc_struct *desc)
1150{
1151 struct desc_ptr dt;
1152 u16 index = selector >> 3;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001153 ulong addr;
1154
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001155 get_descriptor_table_ptr(ctxt, selector, &dt);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001156
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001157 if (dt.size < index * 8 + 7)
1158 return emulate_gp(ctxt, selector & 0xfffc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001159
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001160 addr = dt.address + index * 8;
1161 return ctxt->ops->read_std(ctxt, addr, desc, sizeof *desc,
1162 &ctxt->exception);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001163}
1164
1165/* allowed just for 8 bytes segments */
1166static int write_segment_descriptor(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001167 u16 selector, struct desc_struct *desc)
1168{
1169 struct desc_ptr dt;
1170 u16 index = selector >> 3;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001171 ulong addr;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001172
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001173 get_descriptor_table_ptr(ctxt, selector, &dt);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001174
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001175 if (dt.size < index * 8 + 7)
1176 return emulate_gp(ctxt, selector & 0xfffc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001177
1178 addr = dt.address + index * 8;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001179 return ctxt->ops->write_std(ctxt, addr, desc, sizeof *desc,
1180 &ctxt->exception);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001181}
1182
Gleb Natapov5601d052011-03-07 14:55:06 +02001183/* Does not support long mode */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001184static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001185 u16 selector, int seg)
1186{
1187 struct desc_struct seg_desc;
1188 u8 dpl, rpl, cpl;
1189 unsigned err_vec = GP_VECTOR;
1190 u32 err_code = 0;
1191 bool null_selector = !(selector & ~0x3); /* 0000-0003 are null */
1192 int ret;
1193
1194 memset(&seg_desc, 0, sizeof seg_desc);
1195
1196 if ((seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86)
1197 || ctxt->mode == X86EMUL_MODE_REAL) {
1198 /* set real mode segment descriptor */
1199 set_desc_base(&seg_desc, selector << 4);
1200 set_desc_limit(&seg_desc, 0xffff);
1201 seg_desc.type = 3;
1202 seg_desc.p = 1;
1203 seg_desc.s = 1;
1204 goto load;
1205 }
1206
1207 /* NULL selector is not valid for TR, CS and SS */
1208 if ((seg == VCPU_SREG_CS || seg == VCPU_SREG_SS || seg == VCPU_SREG_TR)
1209 && null_selector)
1210 goto exception;
1211
1212 /* TR should be in GDT only */
1213 if (seg == VCPU_SREG_TR && (selector & (1 << 2)))
1214 goto exception;
1215
1216 if (null_selector) /* for NULL selector skip all following checks */
1217 goto load;
1218
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001219 ret = read_segment_descriptor(ctxt, selector, &seg_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001220 if (ret != X86EMUL_CONTINUE)
1221 return ret;
1222
1223 err_code = selector & 0xfffc;
1224 err_vec = GP_VECTOR;
1225
1226 /* can't load system descriptor into segment selecor */
1227 if (seg <= VCPU_SREG_GS && !seg_desc.s)
1228 goto exception;
1229
1230 if (!seg_desc.p) {
1231 err_vec = (seg == VCPU_SREG_SS) ? SS_VECTOR : NP_VECTOR;
1232 goto exception;
1233 }
1234
1235 rpl = selector & 3;
1236 dpl = seg_desc.dpl;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001237 cpl = ctxt->ops->cpl(ctxt);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001238
1239 switch (seg) {
1240 case VCPU_SREG_SS:
1241 /*
1242 * segment is not a writable data segment or segment
1243 * selector's RPL != CPL or segment selector's RPL != CPL
1244 */
1245 if (rpl != cpl || (seg_desc.type & 0xa) != 0x2 || dpl != cpl)
1246 goto exception;
1247 break;
1248 case VCPU_SREG_CS:
1249 if (!(seg_desc.type & 8))
1250 goto exception;
1251
1252 if (seg_desc.type & 4) {
1253 /* conforming */
1254 if (dpl > cpl)
1255 goto exception;
1256 } else {
1257 /* nonconforming */
1258 if (rpl > cpl || dpl != cpl)
1259 goto exception;
1260 }
1261 /* CS(RPL) <- CPL */
1262 selector = (selector & 0xfffc) | cpl;
1263 break;
1264 case VCPU_SREG_TR:
1265 if (seg_desc.s || (seg_desc.type != 1 && seg_desc.type != 9))
1266 goto exception;
1267 break;
1268 case VCPU_SREG_LDTR:
1269 if (seg_desc.s || seg_desc.type != 2)
1270 goto exception;
1271 break;
1272 default: /* DS, ES, FS, or GS */
1273 /*
1274 * segment is not a data or readable code segment or
1275 * ((segment is a data or nonconforming code segment)
1276 * and (both RPL and CPL > DPL))
1277 */
1278 if ((seg_desc.type & 0xa) == 0x8 ||
1279 (((seg_desc.type & 0xc) != 0xc) &&
1280 (rpl > dpl && cpl > dpl)))
1281 goto exception;
1282 break;
1283 }
1284
1285 if (seg_desc.s) {
1286 /* mark segment as accessed */
1287 seg_desc.type |= 1;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001288 ret = write_segment_descriptor(ctxt, selector, &seg_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001289 if (ret != X86EMUL_CONTINUE)
1290 return ret;
1291 }
1292load:
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001293 ctxt->ops->set_segment(ctxt, selector, &seg_desc, 0, seg);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001294 return X86EMUL_CONTINUE;
1295exception:
Gleb Natapov54b84862010-04-28 19:15:44 +03001296 emulate_exception(ctxt, err_vec, err_code, true);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02001297 return X86EMUL_PROPAGATE_FAULT;
1298}
1299
Wei Yongjun31be40b2010-08-17 09:17:30 +08001300static void write_register_operand(struct operand *op)
1301{
1302 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1303 switch (op->bytes) {
1304 case 1:
1305 *(u8 *)op->addr.reg = (u8)op->val;
1306 break;
1307 case 2:
1308 *(u16 *)op->addr.reg = (u16)op->val;
1309 break;
1310 case 4:
1311 *op->addr.reg = (u32)op->val;
1312 break; /* 64b: zero-extend */
1313 case 8:
1314 *op->addr.reg = op->val;
1315 break;
1316 }
1317}
1318
Takuya Yoshikawaadddcec2011-05-02 02:26:23 +09001319static int writeback(struct x86_emulate_ctxt *ctxt)
Wei Yongjunc37eda12010-06-15 09:03:33 +08001320{
1321 int rc;
Wei Yongjunc37eda12010-06-15 09:03:33 +08001322
Avi Kivity9dac77f2011-06-01 15:34:25 +03001323 switch (ctxt->dst.type) {
Wei Yongjunc37eda12010-06-15 09:03:33 +08001324 case OP_REG:
Avi Kivity9dac77f2011-06-01 15:34:25 +03001325 write_register_operand(&ctxt->dst);
Wei Yongjunc37eda12010-06-15 09:03:33 +08001326 break;
1327 case OP_MEM:
Avi Kivity9dac77f2011-06-01 15:34:25 +03001328 if (ctxt->lock_prefix)
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001329 rc = segmented_cmpxchg(ctxt,
Avi Kivity9dac77f2011-06-01 15:34:25 +03001330 ctxt->dst.addr.mem,
1331 &ctxt->dst.orig_val,
1332 &ctxt->dst.val,
1333 ctxt->dst.bytes);
Wei Yongjunc37eda12010-06-15 09:03:33 +08001334 else
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001335 rc = segmented_write(ctxt,
Avi Kivity9dac77f2011-06-01 15:34:25 +03001336 ctxt->dst.addr.mem,
1337 &ctxt->dst.val,
1338 ctxt->dst.bytes);
Wei Yongjunc37eda12010-06-15 09:03:33 +08001339 if (rc != X86EMUL_CONTINUE)
1340 return rc;
1341 break;
Avi Kivity12537912011-03-29 11:41:27 +02001342 case OP_XMM:
Avi Kivity9dac77f2011-06-01 15:34:25 +03001343 write_sse_reg(ctxt, &ctxt->dst.vec_val, ctxt->dst.addr.xmm);
Avi Kivity12537912011-03-29 11:41:27 +02001344 break;
Wei Yongjunc37eda12010-06-15 09:03:33 +08001345 case OP_NONE:
1346 /* no writeback */
1347 break;
1348 default:
1349 break;
1350 }
1351 return X86EMUL_CONTINUE;
1352}
1353
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001354static int em_push(struct x86_emulate_ctxt *ctxt)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001355{
Takuya Yoshikawa4179bb02011-04-13 00:29:09 +09001356 struct segmented_address addr;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001357
Avi Kivity9dac77f2011-06-01 15:34:25 +03001358 register_address_increment(ctxt, &ctxt->regs[VCPU_REGS_RSP], -ctxt->op_bytes);
1359 addr.ea = register_address(ctxt, ctxt->regs[VCPU_REGS_RSP]);
Takuya Yoshikawa4179bb02011-04-13 00:29:09 +09001360 addr.seg = VCPU_SREG_SS;
1361
1362 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03001363 ctxt->dst.type = OP_NONE;
1364 return segmented_write(ctxt, addr, &ctxt->src.val, ctxt->op_bytes);
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001365}
1366
Avi Kivityfaa5a3a2008-11-27 17:36:41 +02001367static int emulate_pop(struct x86_emulate_ctxt *ctxt,
Avi Kivity350f69d2009-01-05 11:12:40 +02001368 void *dest, int len)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001369{
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001370 int rc;
Avi Kivity90de84f2010-11-17 15:28:21 +02001371 struct segmented_address addr;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001372
Avi Kivity9dac77f2011-06-01 15:34:25 +03001373 addr.ea = register_address(ctxt, ctxt->regs[VCPU_REGS_RSP]);
Avi Kivity90de84f2010-11-17 15:28:21 +02001374 addr.seg = VCPU_SREG_SS;
Avi Kivity3ca3ac42011-03-31 16:52:26 +02001375 rc = segmented_read(ctxt, addr, dest, len);
Takuya Yoshikawab60d5132010-01-20 16:47:21 +09001376 if (rc != X86EMUL_CONTINUE)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001377 return rc;
1378
Avi Kivity9dac77f2011-06-01 15:34:25 +03001379 register_address_increment(ctxt, &ctxt->regs[VCPU_REGS_RSP], len);
Avi Kivityfaa5a3a2008-11-27 17:36:41 +02001380 return rc;
1381}
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001382
Takuya Yoshikawac54fe502011-04-23 18:49:40 +09001383static int em_pop(struct x86_emulate_ctxt *ctxt)
1384{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001385 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes);
Takuya Yoshikawac54fe502011-04-23 18:49:40 +09001386}
1387
Gleb Natapovd4c6a152010-02-10 14:21:34 +02001388static int emulate_popf(struct x86_emulate_ctxt *ctxt,
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001389 void *dest, int len)
Gleb Natapovd4c6a152010-02-10 14:21:34 +02001390{
1391 int rc;
1392 unsigned long val, change_mask;
1393 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> IOPL_SHIFT;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001394 int cpl = ctxt->ops->cpl(ctxt);
Gleb Natapovd4c6a152010-02-10 14:21:34 +02001395
Takuya Yoshikawa3b9be3b2011-05-02 02:27:55 +09001396 rc = emulate_pop(ctxt, &val, len);
Gleb Natapovd4c6a152010-02-10 14:21:34 +02001397 if (rc != X86EMUL_CONTINUE)
1398 return rc;
1399
1400 change_mask = EFLG_CF | EFLG_PF | EFLG_AF | EFLG_ZF | EFLG_SF | EFLG_OF
1401 | EFLG_TF | EFLG_DF | EFLG_NT | EFLG_RF | EFLG_AC | EFLG_ID;
1402
1403 switch(ctxt->mode) {
1404 case X86EMUL_MODE_PROT64:
1405 case X86EMUL_MODE_PROT32:
1406 case X86EMUL_MODE_PROT16:
1407 if (cpl == 0)
1408 change_mask |= EFLG_IOPL;
1409 if (cpl <= iopl)
1410 change_mask |= EFLG_IF;
1411 break;
1412 case X86EMUL_MODE_VM86:
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001413 if (iopl < 3)
1414 return emulate_gp(ctxt, 0);
Gleb Natapovd4c6a152010-02-10 14:21:34 +02001415 change_mask |= EFLG_IF;
1416 break;
1417 default: /* real mode */
1418 change_mask |= (EFLG_IOPL | EFLG_IF);
1419 break;
1420 }
1421
1422 *(unsigned long *)dest =
1423 (ctxt->eflags & ~change_mask) | (val & change_mask);
1424
1425 return rc;
1426}
1427
Takuya Yoshikawa62aaa2f2011-04-23 18:52:56 +09001428static int em_popf(struct x86_emulate_ctxt *ctxt)
1429{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001430 ctxt->dst.type = OP_REG;
1431 ctxt->dst.addr.reg = &ctxt->eflags;
1432 ctxt->dst.bytes = ctxt->op_bytes;
1433 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes);
Takuya Yoshikawa62aaa2f2011-04-23 18:52:56 +09001434}
1435
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001436static int emulate_push_sreg(struct x86_emulate_ctxt *ctxt, int seg)
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001437{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001438 ctxt->src.val = get_segment_selector(ctxt, seg);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001439
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001440 return em_push(ctxt);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001441}
1442
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001443static int emulate_pop_sreg(struct x86_emulate_ctxt *ctxt, int seg)
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001444{
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001445 unsigned long selector;
1446 int rc;
1447
Avi Kivity9dac77f2011-06-01 15:34:25 +03001448 rc = emulate_pop(ctxt, &selector, ctxt->op_bytes);
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09001449 if (rc != X86EMUL_CONTINUE)
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001450 return rc;
1451
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001452 rc = load_segment_descriptor(ctxt, (u16)selector, seg);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03001453 return rc;
1454}
1455
Takuya Yoshikawab96a7fa2011-04-23 18:51:07 +09001456static int em_pusha(struct x86_emulate_ctxt *ctxt)
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001457{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001458 unsigned long old_esp = ctxt->regs[VCPU_REGS_RSP];
Wei Yongjunc37eda12010-06-15 09:03:33 +08001459 int rc = X86EMUL_CONTINUE;
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001460 int reg = VCPU_REGS_RAX;
1461
1462 while (reg <= VCPU_REGS_RDI) {
1463 (reg == VCPU_REGS_RSP) ?
Avi Kivity9dac77f2011-06-01 15:34:25 +03001464 (ctxt->src.val = old_esp) : (ctxt->src.val = ctxt->regs[reg]);
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001465
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001466 rc = em_push(ctxt);
Wei Yongjunc37eda12010-06-15 09:03:33 +08001467 if (rc != X86EMUL_CONTINUE)
1468 return rc;
1469
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001470 ++reg;
1471 }
Wei Yongjunc37eda12010-06-15 09:03:33 +08001472
Wei Yongjunc37eda12010-06-15 09:03:33 +08001473 return rc;
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001474}
1475
Takuya Yoshikawa62aaa2f2011-04-23 18:52:56 +09001476static int em_pushf(struct x86_emulate_ctxt *ctxt)
1477{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001478 ctxt->src.val = (unsigned long)ctxt->eflags;
Takuya Yoshikawa62aaa2f2011-04-23 18:52:56 +09001479 return em_push(ctxt);
1480}
1481
Takuya Yoshikawab96a7fa2011-04-23 18:51:07 +09001482static int em_popa(struct x86_emulate_ctxt *ctxt)
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001483{
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09001484 int rc = X86EMUL_CONTINUE;
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001485 int reg = VCPU_REGS_RDI;
1486
1487 while (reg >= VCPU_REGS_RAX) {
1488 if (reg == VCPU_REGS_RSP) {
Avi Kivity9dac77f2011-06-01 15:34:25 +03001489 register_address_increment(ctxt, &ctxt->regs[VCPU_REGS_RSP],
1490 ctxt->op_bytes);
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001491 --reg;
1492 }
1493
Avi Kivity9dac77f2011-06-01 15:34:25 +03001494 rc = emulate_pop(ctxt, &ctxt->regs[reg], ctxt->op_bytes);
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09001495 if (rc != X86EMUL_CONTINUE)
Mohammed Gamalabcf14b2009-09-01 15:28:11 +02001496 break;
1497 --reg;
1498 }
1499 return rc;
1500}
1501
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001502int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001503{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001504 struct x86_emulate_ops *ops = ctxt->ops;
Avi Kivity5c56e1c2010-08-17 11:17:51 +03001505 int rc;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001506 struct desc_ptr dt;
1507 gva_t cs_addr;
1508 gva_t eip_addr;
1509 u16 cs, eip;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001510
1511 /* TODO: Add limit checks */
Avi Kivity9dac77f2011-06-01 15:34:25 +03001512 ctxt->src.val = ctxt->eflags;
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001513 rc = em_push(ctxt);
Avi Kivity5c56e1c2010-08-17 11:17:51 +03001514 if (rc != X86EMUL_CONTINUE)
1515 return rc;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001516
1517 ctxt->eflags &= ~(EFLG_IF | EFLG_TF | EFLG_AC);
1518
Avi Kivity9dac77f2011-06-01 15:34:25 +03001519 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS);
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001520 rc = em_push(ctxt);
Avi Kivity5c56e1c2010-08-17 11:17:51 +03001521 if (rc != X86EMUL_CONTINUE)
1522 return rc;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001523
Avi Kivity9dac77f2011-06-01 15:34:25 +03001524 ctxt->src.val = ctxt->_eip;
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001525 rc = em_push(ctxt);
Avi Kivity5c56e1c2010-08-17 11:17:51 +03001526 if (rc != X86EMUL_CONTINUE)
1527 return rc;
1528
Avi Kivity4bff1e862011-04-20 13:37:53 +03001529 ops->get_idt(ctxt, &dt);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001530
1531 eip_addr = dt.address + (irq << 2);
1532 cs_addr = dt.address + (irq << 2) + 2;
1533
Avi Kivity0f65dd72011-04-20 13:37:53 +03001534 rc = ops->read_std(ctxt, cs_addr, &cs, 2, &ctxt->exception);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001535 if (rc != X86EMUL_CONTINUE)
1536 return rc;
1537
Avi Kivity0f65dd72011-04-20 13:37:53 +03001538 rc = ops->read_std(ctxt, eip_addr, &eip, 2, &ctxt->exception);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001539 if (rc != X86EMUL_CONTINUE)
1540 return rc;
1541
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001542 rc = load_segment_descriptor(ctxt, cs, VCPU_SREG_CS);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001543 if (rc != X86EMUL_CONTINUE)
1544 return rc;
1545
Avi Kivity9dac77f2011-06-01 15:34:25 +03001546 ctxt->_eip = eip;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001547
1548 return rc;
1549}
1550
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001551static int emulate_int(struct x86_emulate_ctxt *ctxt, int irq)
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001552{
1553 switch(ctxt->mode) {
1554 case X86EMUL_MODE_REAL:
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001555 return emulate_int_real(ctxt, irq);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03001556 case X86EMUL_MODE_VM86:
1557 case X86EMUL_MODE_PROT16:
1558 case X86EMUL_MODE_PROT32:
1559 case X86EMUL_MODE_PROT64:
1560 default:
1561 /* Protected mode interrupts unimplemented yet */
1562 return X86EMUL_UNHANDLEABLE;
1563 }
1564}
1565
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001566static int emulate_iret_real(struct x86_emulate_ctxt *ctxt)
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001567{
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001568 int rc = X86EMUL_CONTINUE;
1569 unsigned long temp_eip = 0;
1570 unsigned long temp_eflags = 0;
1571 unsigned long cs = 0;
1572 unsigned long mask = EFLG_CF | EFLG_PF | EFLG_AF | EFLG_ZF | EFLG_SF | EFLG_TF |
1573 EFLG_IF | EFLG_DF | EFLG_OF | EFLG_IOPL | EFLG_NT | EFLG_RF |
1574 EFLG_AC | EFLG_ID | (1 << 1); /* Last one is the reserved bit */
1575 unsigned long vm86_mask = EFLG_VM | EFLG_VIF | EFLG_VIP;
1576
1577 /* TODO: Add stack limit check */
1578
Avi Kivity9dac77f2011-06-01 15:34:25 +03001579 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes);
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001580
1581 if (rc != X86EMUL_CONTINUE)
1582 return rc;
1583
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001584 if (temp_eip & ~0xffff)
1585 return emulate_gp(ctxt, 0);
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001586
Avi Kivity9dac77f2011-06-01 15:34:25 +03001587 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001588
1589 if (rc != X86EMUL_CONTINUE)
1590 return rc;
1591
Avi Kivity9dac77f2011-06-01 15:34:25 +03001592 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes);
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001593
1594 if (rc != X86EMUL_CONTINUE)
1595 return rc;
1596
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001597 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS);
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001598
1599 if (rc != X86EMUL_CONTINUE)
1600 return rc;
1601
Avi Kivity9dac77f2011-06-01 15:34:25 +03001602 ctxt->_eip = temp_eip;
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001603
1604
Avi Kivity9dac77f2011-06-01 15:34:25 +03001605 if (ctxt->op_bytes == 4)
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001606 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask));
Avi Kivity9dac77f2011-06-01 15:34:25 +03001607 else if (ctxt->op_bytes == 2) {
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001608 ctxt->eflags &= ~0xffff;
1609 ctxt->eflags |= temp_eflags;
1610 }
1611
1612 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */
1613 ctxt->eflags |= EFLG_RESERVED_ONE_MASK;
1614
1615 return rc;
1616}
1617
Takuya Yoshikawae01991e2011-05-29 21:55:10 +09001618static int em_iret(struct x86_emulate_ctxt *ctxt)
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001619{
1620 switch(ctxt->mode) {
1621 case X86EMUL_MODE_REAL:
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001622 return emulate_iret_real(ctxt);
Mohammed Gamal62bd4302010-07-28 12:38:40 +03001623 case X86EMUL_MODE_VM86:
1624 case X86EMUL_MODE_PROT16:
1625 case X86EMUL_MODE_PROT32:
1626 case X86EMUL_MODE_PROT64:
1627 default:
1628 /* iret from protected mode unimplemented yet */
1629 return X86EMUL_UNHANDLEABLE;
1630 }
1631}
1632
Takuya Yoshikawad2f62762011-05-02 02:30:48 +09001633static int em_jmp_far(struct x86_emulate_ctxt *ctxt)
1634{
Takuya Yoshikawad2f62762011-05-02 02:30:48 +09001635 int rc;
1636 unsigned short sel;
1637
Avi Kivity9dac77f2011-06-01 15:34:25 +03001638 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
Takuya Yoshikawad2f62762011-05-02 02:30:48 +09001639
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001640 rc = load_segment_descriptor(ctxt, sel, VCPU_SREG_CS);
Takuya Yoshikawad2f62762011-05-02 02:30:48 +09001641 if (rc != X86EMUL_CONTINUE)
1642 return rc;
1643
Avi Kivity9dac77f2011-06-01 15:34:25 +03001644 ctxt->_eip = 0;
1645 memcpy(&ctxt->_eip, ctxt->src.valptr, ctxt->op_bytes);
Takuya Yoshikawad2f62762011-05-02 02:30:48 +09001646 return X86EMUL_CONTINUE;
1647}
1648
Takuya Yoshikawa51187682011-05-02 02:29:17 +09001649static int em_grp1a(struct x86_emulate_ctxt *ctxt)
Avi Kivityfaa5a3a2008-11-27 17:36:41 +02001650{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001651 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->dst.bytes);
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001652}
1653
Takuya Yoshikawa51187682011-05-02 02:29:17 +09001654static int em_grp2(struct x86_emulate_ctxt *ctxt)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001655{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001656 switch (ctxt->modrm_reg) {
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001657 case 0: /* rol */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001658 emulate_2op_SrcB(ctxt, "rol");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001659 break;
1660 case 1: /* ror */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001661 emulate_2op_SrcB(ctxt, "ror");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001662 break;
1663 case 2: /* rcl */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001664 emulate_2op_SrcB(ctxt, "rcl");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001665 break;
1666 case 3: /* rcr */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001667 emulate_2op_SrcB(ctxt, "rcr");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001668 break;
1669 case 4: /* sal/shl */
1670 case 6: /* sal/shl */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001671 emulate_2op_SrcB(ctxt, "sal");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001672 break;
1673 case 5: /* shr */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001674 emulate_2op_SrcB(ctxt, "shr");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001675 break;
1676 case 7: /* sar */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03001677 emulate_2op_SrcB(ctxt, "sar");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001678 break;
1679 }
Takuya Yoshikawa51187682011-05-02 02:29:17 +09001680 return X86EMUL_CONTINUE;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001681}
1682
Avi Kivity3329ece2011-09-13 10:45:39 +03001683static int em_not(struct x86_emulate_ctxt *ctxt)
1684{
1685 ctxt->dst.val = ~ctxt->dst.val;
1686 return X86EMUL_CONTINUE;
1687}
1688
1689static int em_neg(struct x86_emulate_ctxt *ctxt)
1690{
1691 emulate_1op(ctxt, "neg");
1692 return X86EMUL_CONTINUE;
1693}
1694
1695static int em_mul_ex(struct x86_emulate_ctxt *ctxt)
1696{
1697 u8 ex = 0;
1698
1699 emulate_1op_rax_rdx(ctxt, "mul", ex);
1700 return X86EMUL_CONTINUE;
1701}
1702
1703static int em_imul_ex(struct x86_emulate_ctxt *ctxt)
1704{
1705 u8 ex = 0;
1706
1707 emulate_1op_rax_rdx(ctxt, "imul", ex);
1708 return X86EMUL_CONTINUE;
1709}
1710
1711static int em_div_ex(struct x86_emulate_ctxt *ctxt)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001712{
Avi Kivity34d1f492010-08-26 11:59:01 +03001713 u8 de = 0;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001714
Avi Kivity3329ece2011-09-13 10:45:39 +03001715 emulate_1op_rax_rdx(ctxt, "div", de);
1716 if (de)
1717 return emulate_de(ctxt);
1718 return X86EMUL_CONTINUE;
1719}
1720
1721static int em_idiv_ex(struct x86_emulate_ctxt *ctxt)
1722{
1723 u8 de = 0;
1724
1725 emulate_1op_rax_rdx(ctxt, "idiv", de);
Avi Kivity34d1f492010-08-26 11:59:01 +03001726 if (de)
1727 return emulate_de(ctxt);
Mohammed Gamal8c5eee32010-08-08 21:11:38 +03001728 return X86EMUL_CONTINUE;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001729}
1730
Takuya Yoshikawa51187682011-05-02 02:29:17 +09001731static int em_grp45(struct x86_emulate_ctxt *ctxt)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001732{
Takuya Yoshikawa4179bb02011-04-13 00:29:09 +09001733 int rc = X86EMUL_CONTINUE;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001734
Avi Kivity9dac77f2011-06-01 15:34:25 +03001735 switch (ctxt->modrm_reg) {
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001736 case 0: /* inc */
Avi Kivityd1eef452011-09-07 16:41:38 +03001737 emulate_1op(ctxt, "inc");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001738 break;
1739 case 1: /* dec */
Avi Kivityd1eef452011-09-07 16:41:38 +03001740 emulate_1op(ctxt, "dec");
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001741 break;
Mohammed Gamald19292e2008-09-08 21:47:19 +03001742 case 2: /* call near abs */ {
1743 long int old_eip;
Avi Kivity9dac77f2011-06-01 15:34:25 +03001744 old_eip = ctxt->_eip;
1745 ctxt->_eip = ctxt->src.val;
1746 ctxt->src.val = old_eip;
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001747 rc = em_push(ctxt);
Mohammed Gamald19292e2008-09-08 21:47:19 +03001748 break;
1749 }
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001750 case 4: /* jmp abs */
Avi Kivity9dac77f2011-06-01 15:34:25 +03001751 ctxt->_eip = ctxt->src.val;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001752 break;
Takuya Yoshikawad2f62762011-05-02 02:30:48 +09001753 case 5: /* jmp far */
1754 rc = em_jmp_far(ctxt);
1755 break;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001756 case 6: /* push */
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09001757 rc = em_push(ctxt);
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001758 break;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001759 }
Takuya Yoshikawa4179bb02011-04-13 00:29:09 +09001760 return rc;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001761}
1762
Takuya Yoshikawa51187682011-05-02 02:29:17 +09001763static int em_grp9(struct x86_emulate_ctxt *ctxt)
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001764{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001765 u64 old = ctxt->dst.orig_val64;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001766
Avi Kivity9dac77f2011-06-01 15:34:25 +03001767 if (((u32) (old >> 0) != (u32) ctxt->regs[VCPU_REGS_RAX]) ||
1768 ((u32) (old >> 32) != (u32) ctxt->regs[VCPU_REGS_RDX])) {
1769 ctxt->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1770 ctxt->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
Laurent Vivier05f086f2007-09-24 11:10:55 +02001771 ctxt->eflags &= ~EFLG_ZF;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001772 } else {
Avi Kivity9dac77f2011-06-01 15:34:25 +03001773 ctxt->dst.val64 = ((u64)ctxt->regs[VCPU_REGS_RCX] << 32) |
1774 (u32) ctxt->regs[VCPU_REGS_RBX];
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001775
Laurent Vivier05f086f2007-09-24 11:10:55 +02001776 ctxt->eflags |= EFLG_ZF;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001777 }
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09001778 return X86EMUL_CONTINUE;
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02001779}
1780
Takuya Yoshikawaebda02c2011-05-29 22:00:22 +09001781static int em_ret(struct x86_emulate_ctxt *ctxt)
1782{
Avi Kivity9dac77f2011-06-01 15:34:25 +03001783 ctxt->dst.type = OP_REG;
1784 ctxt->dst.addr.reg = &ctxt->_eip;
1785 ctxt->dst.bytes = ctxt->op_bytes;
Takuya Yoshikawaebda02c2011-05-29 22:00:22 +09001786 return em_pop(ctxt);
1787}
1788
Takuya Yoshikawae01991e2011-05-29 21:55:10 +09001789static int em_ret_far(struct x86_emulate_ctxt *ctxt)
Avi Kivitya77ab5e2009-01-05 13:27:34 +02001790{
Avi Kivitya77ab5e2009-01-05 13:27:34 +02001791 int rc;
1792 unsigned long cs;
1793
Avi Kivity9dac77f2011-06-01 15:34:25 +03001794 rc = emulate_pop(ctxt, &ctxt->_eip, ctxt->op_bytes);
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09001795 if (rc != X86EMUL_CONTINUE)
Avi Kivitya77ab5e2009-01-05 13:27:34 +02001796 return rc;
Avi Kivity9dac77f2011-06-01 15:34:25 +03001797 if (ctxt->op_bytes == 4)
1798 ctxt->_eip = (u32)ctxt->_eip;
1799 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09001800 if (rc != X86EMUL_CONTINUE)
Avi Kivitya77ab5e2009-01-05 13:27:34 +02001801 return rc;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001802 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS);
Avi Kivitya77ab5e2009-01-05 13:27:34 +02001803 return rc;
1804}
1805
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001806static int emulate_load_segment(struct x86_emulate_ctxt *ctxt, int seg)
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08001807{
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08001808 unsigned short sel;
1809 int rc;
1810
Avi Kivity9dac77f2011-06-01 15:34:25 +03001811 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08001812
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001813 rc = load_segment_descriptor(ctxt, sel, seg);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08001814 if (rc != X86EMUL_CONTINUE)
1815 return rc;
1816
Avi Kivity9dac77f2011-06-01 15:34:25 +03001817 ctxt->dst.val = ctxt->src.val;
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08001818 return rc;
1819}
1820
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001821static void
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001822setup_syscalls_segments(struct x86_emulate_ctxt *ctxt,
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001823 struct desc_struct *cs, struct desc_struct *ss)
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001824{
Avi Kivity1aa36612011-04-27 13:20:30 +03001825 u16 selector;
1826
Gleb Natapov79168fd2010-04-28 19:15:30 +03001827 memset(cs, 0, sizeof(struct desc_struct));
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001828 ctxt->ops->get_segment(ctxt, &selector, cs, NULL, VCPU_SREG_CS);
Gleb Natapov79168fd2010-04-28 19:15:30 +03001829 memset(ss, 0, sizeof(struct desc_struct));
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001830
1831 cs->l = 0; /* will be adjusted later */
Gleb Natapov79168fd2010-04-28 19:15:30 +03001832 set_desc_base(cs, 0); /* flat segment */
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001833 cs->g = 1; /* 4kb granularity */
Gleb Natapov79168fd2010-04-28 19:15:30 +03001834 set_desc_limit(cs, 0xfffff); /* 4GB limit */
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001835 cs->type = 0x0b; /* Read, Execute, Accessed */
1836 cs->s = 1;
1837 cs->dpl = 0; /* will be adjusted later */
Gleb Natapov79168fd2010-04-28 19:15:30 +03001838 cs->p = 1;
1839 cs->d = 1;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001840
Gleb Natapov79168fd2010-04-28 19:15:30 +03001841 set_desc_base(ss, 0); /* flat segment */
1842 set_desc_limit(ss, 0xfffff); /* 4GB limit */
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001843 ss->g = 1; /* 4kb granularity */
1844 ss->s = 1;
1845 ss->type = 0x03; /* Read/Write, Accessed */
Gleb Natapov79168fd2010-04-28 19:15:30 +03001846 ss->d = 1; /* 32bit stack segment */
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001847 ss->dpl = 0;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001848 ss->p = 1;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001849}
1850
Takuya Yoshikawae01991e2011-05-29 21:55:10 +09001851static int em_syscall(struct x86_emulate_ctxt *ctxt)
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001852{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001853 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001854 struct desc_struct cs, ss;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001855 u64 msr_data;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001856 u16 cs_sel, ss_sel;
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03001857 u64 efer = 0;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001858
1859 /* syscall is not available in real mode */
Gleb Natapov2e901c42010-03-18 15:20:12 +02001860 if (ctxt->mode == X86EMUL_MODE_REAL ||
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001861 ctxt->mode == X86EMUL_MODE_VM86)
1862 return emulate_ud(ctxt);
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001863
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03001864 ops->get_msr(ctxt, MSR_EFER, &efer);
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001865 setup_syscalls_segments(ctxt, &cs, &ss);
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001866
Avi Kivity717746e2011-04-20 13:37:53 +03001867 ops->get_msr(ctxt, MSR_STAR, &msr_data);
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001868 msr_data >>= 32;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001869 cs_sel = (u16)(msr_data & 0xfffc);
1870 ss_sel = (u16)(msr_data + 8);
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001871
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03001872 if (efer & EFER_LMA) {
Gleb Natapov79168fd2010-04-28 19:15:30 +03001873 cs.d = 0;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001874 cs.l = 1;
1875 }
Avi Kivity1aa36612011-04-27 13:20:30 +03001876 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
1877 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001878
Avi Kivity9dac77f2011-06-01 15:34:25 +03001879 ctxt->regs[VCPU_REGS_RCX] = ctxt->_eip;
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03001880 if (efer & EFER_LMA) {
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001881#ifdef CONFIG_X86_64
Avi Kivity9dac77f2011-06-01 15:34:25 +03001882 ctxt->regs[VCPU_REGS_R11] = ctxt->eflags & ~EFLG_RF;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001883
Avi Kivity717746e2011-04-20 13:37:53 +03001884 ops->get_msr(ctxt,
Gleb Natapov3fb1b5d2010-04-28 19:15:28 +03001885 ctxt->mode == X86EMUL_MODE_PROT64 ?
1886 MSR_LSTAR : MSR_CSTAR, &msr_data);
Avi Kivity9dac77f2011-06-01 15:34:25 +03001887 ctxt->_eip = msr_data;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001888
Avi Kivity717746e2011-04-20 13:37:53 +03001889 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001890 ctxt->eflags &= ~(msr_data | EFLG_RF);
1891#endif
1892 } else {
1893 /* legacy mode */
Avi Kivity717746e2011-04-20 13:37:53 +03001894 ops->get_msr(ctxt, MSR_STAR, &msr_data);
Avi Kivity9dac77f2011-06-01 15:34:25 +03001895 ctxt->_eip = (u32)msr_data;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001896
1897 ctxt->eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
1898 }
1899
Takuya Yoshikawae54cfa92010-02-18 12:15:02 +02001900 return X86EMUL_CONTINUE;
Andre Przywarae66bb2c2009-06-18 12:56:00 +02001901}
1902
Takuya Yoshikawae01991e2011-05-29 21:55:10 +09001903static int em_sysenter(struct x86_emulate_ctxt *ctxt)
Andre Przywara8c604352009-06-18 12:56:01 +02001904{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001905 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001906 struct desc_struct cs, ss;
Andre Przywara8c604352009-06-18 12:56:01 +02001907 u64 msr_data;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001908 u16 cs_sel, ss_sel;
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03001909 u64 efer = 0;
Andre Przywara8c604352009-06-18 12:56:01 +02001910
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001911 ops->get_msr(ctxt, MSR_EFER, &efer);
Gleb Natapova0044752010-02-10 14:21:31 +02001912 /* inject #GP if in real mode */
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001913 if (ctxt->mode == X86EMUL_MODE_REAL)
1914 return emulate_gp(ctxt, 0);
Andre Przywara8c604352009-06-18 12:56:01 +02001915
1916 /* XXX sysenter/sysexit have not been tested in 64bit mode.
1917 * Therefore, we inject an #UD.
1918 */
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001919 if (ctxt->mode == X86EMUL_MODE_PROT64)
1920 return emulate_ud(ctxt);
Andre Przywara8c604352009-06-18 12:56:01 +02001921
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001922 setup_syscalls_segments(ctxt, &cs, &ss);
Andre Przywara8c604352009-06-18 12:56:01 +02001923
Avi Kivity717746e2011-04-20 13:37:53 +03001924 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
Andre Przywara8c604352009-06-18 12:56:01 +02001925 switch (ctxt->mode) {
1926 case X86EMUL_MODE_PROT32:
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001927 if ((msr_data & 0xfffc) == 0x0)
1928 return emulate_gp(ctxt, 0);
Andre Przywara8c604352009-06-18 12:56:01 +02001929 break;
1930 case X86EMUL_MODE_PROT64:
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001931 if (msr_data == 0x0)
1932 return emulate_gp(ctxt, 0);
Andre Przywara8c604352009-06-18 12:56:01 +02001933 break;
1934 }
1935
1936 ctxt->eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
Gleb Natapov79168fd2010-04-28 19:15:30 +03001937 cs_sel = (u16)msr_data;
1938 cs_sel &= ~SELECTOR_RPL_MASK;
1939 ss_sel = cs_sel + 8;
1940 ss_sel &= ~SELECTOR_RPL_MASK;
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03001941 if (ctxt->mode == X86EMUL_MODE_PROT64 || (efer & EFER_LMA)) {
Gleb Natapov79168fd2010-04-28 19:15:30 +03001942 cs.d = 0;
Andre Przywara8c604352009-06-18 12:56:01 +02001943 cs.l = 1;
1944 }
1945
Avi Kivity1aa36612011-04-27 13:20:30 +03001946 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
1947 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
Andre Przywara8c604352009-06-18 12:56:01 +02001948
Avi Kivity717746e2011-04-20 13:37:53 +03001949 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
Avi Kivity9dac77f2011-06-01 15:34:25 +03001950 ctxt->_eip = msr_data;
Andre Przywara8c604352009-06-18 12:56:01 +02001951
Avi Kivity717746e2011-04-20 13:37:53 +03001952 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
Avi Kivity9dac77f2011-06-01 15:34:25 +03001953 ctxt->regs[VCPU_REGS_RSP] = msr_data;
Andre Przywara8c604352009-06-18 12:56:01 +02001954
Takuya Yoshikawae54cfa92010-02-18 12:15:02 +02001955 return X86EMUL_CONTINUE;
Andre Przywara8c604352009-06-18 12:56:01 +02001956}
1957
Takuya Yoshikawae01991e2011-05-29 21:55:10 +09001958static int em_sysexit(struct x86_emulate_ctxt *ctxt)
Andre Przywara4668f052009-06-18 12:56:02 +02001959{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001960 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov79168fd2010-04-28 19:15:30 +03001961 struct desc_struct cs, ss;
Andre Przywara4668f052009-06-18 12:56:02 +02001962 u64 msr_data;
1963 int usermode;
Xiao Guangrong1249b962011-05-15 23:25:10 +08001964 u16 cs_sel = 0, ss_sel = 0;
Andre Przywara4668f052009-06-18 12:56:02 +02001965
Gleb Natapova0044752010-02-10 14:21:31 +02001966 /* inject #GP if in real mode or Virtual 8086 mode */
1967 if (ctxt->mode == X86EMUL_MODE_REAL ||
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001968 ctxt->mode == X86EMUL_MODE_VM86)
1969 return emulate_gp(ctxt, 0);
Andre Przywara4668f052009-06-18 12:56:02 +02001970
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09001971 setup_syscalls_segments(ctxt, &cs, &ss);
Andre Przywara4668f052009-06-18 12:56:02 +02001972
Avi Kivity9dac77f2011-06-01 15:34:25 +03001973 if ((ctxt->rex_prefix & 0x8) != 0x0)
Andre Przywara4668f052009-06-18 12:56:02 +02001974 usermode = X86EMUL_MODE_PROT64;
1975 else
1976 usermode = X86EMUL_MODE_PROT32;
1977
1978 cs.dpl = 3;
1979 ss.dpl = 3;
Avi Kivity717746e2011-04-20 13:37:53 +03001980 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
Andre Przywara4668f052009-06-18 12:56:02 +02001981 switch (usermode) {
1982 case X86EMUL_MODE_PROT32:
Gleb Natapov79168fd2010-04-28 19:15:30 +03001983 cs_sel = (u16)(msr_data + 16);
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001984 if ((msr_data & 0xfffc) == 0x0)
1985 return emulate_gp(ctxt, 0);
Gleb Natapov79168fd2010-04-28 19:15:30 +03001986 ss_sel = (u16)(msr_data + 24);
Andre Przywara4668f052009-06-18 12:56:02 +02001987 break;
1988 case X86EMUL_MODE_PROT64:
Gleb Natapov79168fd2010-04-28 19:15:30 +03001989 cs_sel = (u16)(msr_data + 32);
Avi Kivity35d3d4a2010-11-22 17:53:25 +02001990 if (msr_data == 0x0)
1991 return emulate_gp(ctxt, 0);
Gleb Natapov79168fd2010-04-28 19:15:30 +03001992 ss_sel = cs_sel + 8;
1993 cs.d = 0;
Andre Przywara4668f052009-06-18 12:56:02 +02001994 cs.l = 1;
1995 break;
1996 }
Gleb Natapov79168fd2010-04-28 19:15:30 +03001997 cs_sel |= SELECTOR_RPL_MASK;
1998 ss_sel |= SELECTOR_RPL_MASK;
Andre Przywara4668f052009-06-18 12:56:02 +02001999
Avi Kivity1aa36612011-04-27 13:20:30 +03002000 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2001 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
Andre Przywara4668f052009-06-18 12:56:02 +02002002
Avi Kivity9dac77f2011-06-01 15:34:25 +03002003 ctxt->_eip = ctxt->regs[VCPU_REGS_RDX];
2004 ctxt->regs[VCPU_REGS_RSP] = ctxt->regs[VCPU_REGS_RCX];
Andre Przywara4668f052009-06-18 12:56:02 +02002005
Takuya Yoshikawae54cfa92010-02-18 12:15:02 +02002006 return X86EMUL_CONTINUE;
Andre Przywara4668f052009-06-18 12:56:02 +02002007}
2008
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002009static bool emulator_bad_iopl(struct x86_emulate_ctxt *ctxt)
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002010{
2011 int iopl;
2012 if (ctxt->mode == X86EMUL_MODE_REAL)
2013 return false;
2014 if (ctxt->mode == X86EMUL_MODE_VM86)
2015 return true;
2016 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> IOPL_SHIFT;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002017 return ctxt->ops->cpl(ctxt) > iopl;
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002018}
2019
2020static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt,
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002021 u16 port, u16 len)
2022{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002023 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov79168fd2010-04-28 19:15:30 +03002024 struct desc_struct tr_seg;
Gleb Natapov5601d052011-03-07 14:55:06 +02002025 u32 base3;
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002026 int r;
Avi Kivity1aa36612011-04-27 13:20:30 +03002027 u16 tr, io_bitmap_ptr, perm, bit_idx = port & 0x7;
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002028 unsigned mask = (1 << len) - 1;
Gleb Natapov5601d052011-03-07 14:55:06 +02002029 unsigned long base;
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002030
Avi Kivity1aa36612011-04-27 13:20:30 +03002031 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR);
Gleb Natapov79168fd2010-04-28 19:15:30 +03002032 if (!tr_seg.p)
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002033 return false;
Gleb Natapov79168fd2010-04-28 19:15:30 +03002034 if (desc_limit_scaled(&tr_seg) < 103)
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002035 return false;
Gleb Natapov5601d052011-03-07 14:55:06 +02002036 base = get_desc_base(&tr_seg);
2037#ifdef CONFIG_X86_64
2038 base |= ((u64)base3) << 32;
2039#endif
Avi Kivity0f65dd72011-04-20 13:37:53 +03002040 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL);
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002041 if (r != X86EMUL_CONTINUE)
2042 return false;
Gleb Natapov79168fd2010-04-28 19:15:30 +03002043 if (io_bitmap_ptr + port/8 > desc_limit_scaled(&tr_seg))
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002044 return false;
Avi Kivity0f65dd72011-04-20 13:37:53 +03002045 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL);
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002046 if (r != X86EMUL_CONTINUE)
2047 return false;
2048 if ((perm >> bit_idx) & mask)
2049 return false;
2050 return true;
2051}
2052
2053static bool emulator_io_permited(struct x86_emulate_ctxt *ctxt,
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002054 u16 port, u16 len)
2055{
Gleb Natapov4fc40f02010-08-02 12:47:51 +03002056 if (ctxt->perm_ok)
2057 return true;
2058
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002059 if (emulator_bad_iopl(ctxt))
2060 if (!emulator_io_port_access_allowed(ctxt, port, len))
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002061 return false;
Gleb Natapov4fc40f02010-08-02 12:47:51 +03002062
2063 ctxt->perm_ok = true;
2064
Gleb Natapovf850e2e2010-02-10 14:21:33 +02002065 return true;
2066}
2067
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002068static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002069 struct tss_segment_16 *tss)
2070{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002071 tss->ip = ctxt->_eip;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002072 tss->flag = ctxt->eflags;
Avi Kivity9dac77f2011-06-01 15:34:25 +03002073 tss->ax = ctxt->regs[VCPU_REGS_RAX];
2074 tss->cx = ctxt->regs[VCPU_REGS_RCX];
2075 tss->dx = ctxt->regs[VCPU_REGS_RDX];
2076 tss->bx = ctxt->regs[VCPU_REGS_RBX];
2077 tss->sp = ctxt->regs[VCPU_REGS_RSP];
2078 tss->bp = ctxt->regs[VCPU_REGS_RBP];
2079 tss->si = ctxt->regs[VCPU_REGS_RSI];
2080 tss->di = ctxt->regs[VCPU_REGS_RDI];
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002081
Avi Kivity1aa36612011-04-27 13:20:30 +03002082 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
2083 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2084 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
2085 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
2086 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002087}
2088
2089static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002090 struct tss_segment_16 *tss)
2091{
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002092 int ret;
2093
Avi Kivity9dac77f2011-06-01 15:34:25 +03002094 ctxt->_eip = tss->ip;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002095 ctxt->eflags = tss->flag | 2;
Avi Kivity9dac77f2011-06-01 15:34:25 +03002096 ctxt->regs[VCPU_REGS_RAX] = tss->ax;
2097 ctxt->regs[VCPU_REGS_RCX] = tss->cx;
2098 ctxt->regs[VCPU_REGS_RDX] = tss->dx;
2099 ctxt->regs[VCPU_REGS_RBX] = tss->bx;
2100 ctxt->regs[VCPU_REGS_RSP] = tss->sp;
2101 ctxt->regs[VCPU_REGS_RBP] = tss->bp;
2102 ctxt->regs[VCPU_REGS_RSI] = tss->si;
2103 ctxt->regs[VCPU_REGS_RDI] = tss->di;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002104
2105 /*
2106 * SDM says that segment selectors are loaded before segment
2107 * descriptors
2108 */
Avi Kivity1aa36612011-04-27 13:20:30 +03002109 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR);
2110 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
2111 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
2112 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
2113 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002114
2115 /*
2116 * Now load segment descriptors. If fault happenes at this stage
2117 * it is handled in a context of new task
2118 */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002119 ret = load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002120 if (ret != X86EMUL_CONTINUE)
2121 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002122 ret = load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002123 if (ret != X86EMUL_CONTINUE)
2124 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002125 ret = load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002126 if (ret != X86EMUL_CONTINUE)
2127 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002128 ret = load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002129 if (ret != X86EMUL_CONTINUE)
2130 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002131 ret = load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002132 if (ret != X86EMUL_CONTINUE)
2133 return ret;
2134
2135 return X86EMUL_CONTINUE;
2136}
2137
2138static int task_switch_16(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002139 u16 tss_selector, u16 old_tss_sel,
2140 ulong old_tss_base, struct desc_struct *new_desc)
2141{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002142 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002143 struct tss_segment_16 tss_seg;
2144 int ret;
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002145 u32 new_tss_base = get_desc_base(new_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002146
Avi Kivity0f65dd72011-04-20 13:37:53 +03002147 ret = ops->read_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg,
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002148 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002149 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002150 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002151 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002152
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002153 save_state_to_tss16(ctxt, &tss_seg);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002154
Avi Kivity0f65dd72011-04-20 13:37:53 +03002155 ret = ops->write_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg,
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002156 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002157 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002158 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002159 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002160
Avi Kivity0f65dd72011-04-20 13:37:53 +03002161 ret = ops->read_std(ctxt, new_tss_base, &tss_seg, sizeof tss_seg,
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002162 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002163 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002164 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002165 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002166
2167 if (old_tss_sel != 0xffff) {
2168 tss_seg.prev_task_link = old_tss_sel;
2169
Avi Kivity0f65dd72011-04-20 13:37:53 +03002170 ret = ops->write_std(ctxt, new_tss_base,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002171 &tss_seg.prev_task_link,
2172 sizeof tss_seg.prev_task_link,
Avi Kivity0f65dd72011-04-20 13:37:53 +03002173 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002174 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002175 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002176 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002177 }
2178
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002179 return load_state_from_tss16(ctxt, &tss_seg);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002180}
2181
2182static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002183 struct tss_segment_32 *tss)
2184{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002185 tss->cr3 = ctxt->ops->get_cr(ctxt, 3);
Avi Kivity9dac77f2011-06-01 15:34:25 +03002186 tss->eip = ctxt->_eip;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002187 tss->eflags = ctxt->eflags;
Avi Kivity9dac77f2011-06-01 15:34:25 +03002188 tss->eax = ctxt->regs[VCPU_REGS_RAX];
2189 tss->ecx = ctxt->regs[VCPU_REGS_RCX];
2190 tss->edx = ctxt->regs[VCPU_REGS_RDX];
2191 tss->ebx = ctxt->regs[VCPU_REGS_RBX];
2192 tss->esp = ctxt->regs[VCPU_REGS_RSP];
2193 tss->ebp = ctxt->regs[VCPU_REGS_RBP];
2194 tss->esi = ctxt->regs[VCPU_REGS_RSI];
2195 tss->edi = ctxt->regs[VCPU_REGS_RDI];
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002196
Avi Kivity1aa36612011-04-27 13:20:30 +03002197 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
2198 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2199 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
2200 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
2201 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS);
2202 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS);
2203 tss->ldt_selector = get_segment_selector(ctxt, VCPU_SREG_LDTR);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002204}
2205
2206static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002207 struct tss_segment_32 *tss)
2208{
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002209 int ret;
2210
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002211 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
Avi Kivity35d3d4a2010-11-22 17:53:25 +02002212 return emulate_gp(ctxt, 0);
Avi Kivity9dac77f2011-06-01 15:34:25 +03002213 ctxt->_eip = tss->eip;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002214 ctxt->eflags = tss->eflags | 2;
Avi Kivity9dac77f2011-06-01 15:34:25 +03002215 ctxt->regs[VCPU_REGS_RAX] = tss->eax;
2216 ctxt->regs[VCPU_REGS_RCX] = tss->ecx;
2217 ctxt->regs[VCPU_REGS_RDX] = tss->edx;
2218 ctxt->regs[VCPU_REGS_RBX] = tss->ebx;
2219 ctxt->regs[VCPU_REGS_RSP] = tss->esp;
2220 ctxt->regs[VCPU_REGS_RBP] = tss->ebp;
2221 ctxt->regs[VCPU_REGS_RSI] = tss->esi;
2222 ctxt->regs[VCPU_REGS_RDI] = tss->edi;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002223
2224 /*
2225 * SDM says that segment selectors are loaded before segment
2226 * descriptors
2227 */
Avi Kivity1aa36612011-04-27 13:20:30 +03002228 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR);
2229 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
2230 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
2231 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
2232 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
2233 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS);
2234 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002235
2236 /*
2237 * Now load segment descriptors. If fault happenes at this stage
2238 * it is handled in a context of new task
2239 */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002240 ret = load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002241 if (ret != X86EMUL_CONTINUE)
2242 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002243 ret = load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002244 if (ret != X86EMUL_CONTINUE)
2245 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002246 ret = load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002247 if (ret != X86EMUL_CONTINUE)
2248 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002249 ret = load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002250 if (ret != X86EMUL_CONTINUE)
2251 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002252 ret = load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002253 if (ret != X86EMUL_CONTINUE)
2254 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002255 ret = load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002256 if (ret != X86EMUL_CONTINUE)
2257 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002258 ret = load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002259 if (ret != X86EMUL_CONTINUE)
2260 return ret;
2261
2262 return X86EMUL_CONTINUE;
2263}
2264
2265static int task_switch_32(struct x86_emulate_ctxt *ctxt,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002266 u16 tss_selector, u16 old_tss_sel,
2267 ulong old_tss_base, struct desc_struct *new_desc)
2268{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002269 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002270 struct tss_segment_32 tss_seg;
2271 int ret;
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002272 u32 new_tss_base = get_desc_base(new_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002273
Avi Kivity0f65dd72011-04-20 13:37:53 +03002274 ret = ops->read_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg,
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002275 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002276 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002277 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002278 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002279
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002280 save_state_to_tss32(ctxt, &tss_seg);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002281
Avi Kivity0f65dd72011-04-20 13:37:53 +03002282 ret = ops->write_std(ctxt, old_tss_base, &tss_seg, sizeof tss_seg,
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002283 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002284 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002285 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002286 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002287
Avi Kivity0f65dd72011-04-20 13:37:53 +03002288 ret = ops->read_std(ctxt, new_tss_base, &tss_seg, sizeof tss_seg,
Avi Kivitybcc55cb2010-11-22 17:53:22 +02002289 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002290 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002291 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002292 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002293
2294 if (old_tss_sel != 0xffff) {
2295 tss_seg.prev_task_link = old_tss_sel;
2296
Avi Kivity0f65dd72011-04-20 13:37:53 +03002297 ret = ops->write_std(ctxt, new_tss_base,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002298 &tss_seg.prev_task_link,
2299 sizeof tss_seg.prev_task_link,
Avi Kivity0f65dd72011-04-20 13:37:53 +03002300 &ctxt->exception);
Avi Kivitydb297e32010-11-22 17:53:24 +02002301 if (ret != X86EMUL_CONTINUE)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002302 /* FIXME: need to provide precise fault address */
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002303 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002304 }
2305
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002306 return load_state_from_tss32(ctxt, &tss_seg);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002307}
2308
2309static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt,
Jan Kiszkae269fb22010-04-14 15:51:09 +02002310 u16 tss_selector, int reason,
2311 bool has_error_code, u32 error_code)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002312{
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002313 struct x86_emulate_ops *ops = ctxt->ops;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002314 struct desc_struct curr_tss_desc, next_tss_desc;
2315 int ret;
Avi Kivity1aa36612011-04-27 13:20:30 +03002316 u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002317 ulong old_tss_base =
Avi Kivity4bff1e862011-04-20 13:37:53 +03002318 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR);
Gleb Natapovceffb452010-03-18 15:20:19 +02002319 u32 desc_limit;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002320
2321 /* FIXME: old_tss_base == ~0 ? */
2322
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002323 ret = read_segment_descriptor(ctxt, tss_selector, &next_tss_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002324 if (ret != X86EMUL_CONTINUE)
2325 return ret;
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002326 ret = read_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002327 if (ret != X86EMUL_CONTINUE)
2328 return ret;
2329
2330 /* FIXME: check that next_tss_desc is tss */
2331
2332 if (reason != TASK_SWITCH_IRET) {
2333 if ((tss_selector & 3) > next_tss_desc.dpl ||
Avi Kivity717746e2011-04-20 13:37:53 +03002334 ops->cpl(ctxt) > next_tss_desc.dpl)
Avi Kivity35d3d4a2010-11-22 17:53:25 +02002335 return emulate_gp(ctxt, 0);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002336 }
2337
Gleb Natapovceffb452010-03-18 15:20:19 +02002338 desc_limit = desc_limit_scaled(&next_tss_desc);
2339 if (!next_tss_desc.p ||
2340 ((desc_limit < 0x67 && (next_tss_desc.type & 8)) ||
2341 desc_limit < 0x2b)) {
Gleb Natapov54b84862010-04-28 19:15:44 +03002342 emulate_ts(ctxt, tss_selector & 0xfffc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002343 return X86EMUL_PROPAGATE_FAULT;
2344 }
2345
2346 if (reason == TASK_SWITCH_IRET || reason == TASK_SWITCH_JMP) {
2347 curr_tss_desc.type &= ~(1 << 1); /* clear busy flag */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002348 write_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002349 }
2350
2351 if (reason == TASK_SWITCH_IRET)
2352 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT;
2353
2354 /* set back link to prev task only if NT bit is set in eflags
2355 note that old_tss_sel is not used afetr this point */
2356 if (reason != TASK_SWITCH_CALL && reason != TASK_SWITCH_GATE)
2357 old_tss_sel = 0xffff;
2358
2359 if (next_tss_desc.type & 8)
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002360 ret = task_switch_32(ctxt, tss_selector, old_tss_sel,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002361 old_tss_base, &next_tss_desc);
2362 else
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002363 ret = task_switch_16(ctxt, tss_selector, old_tss_sel,
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002364 old_tss_base, &next_tss_desc);
Jan Kiszka0760d442010-04-14 15:50:57 +02002365 if (ret != X86EMUL_CONTINUE)
2366 return ret;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002367
2368 if (reason == TASK_SWITCH_CALL || reason == TASK_SWITCH_GATE)
2369 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT;
2370
2371 if (reason != TASK_SWITCH_IRET) {
2372 next_tss_desc.type |= (1 << 1); /* set busy flag */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002373 write_segment_descriptor(ctxt, tss_selector, &next_tss_desc);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002374 }
2375
Avi Kivity717746e2011-04-20 13:37:53 +03002376 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS);
Avi Kivity1aa36612011-04-27 13:20:30 +03002377 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002378
Jan Kiszkae269fb22010-04-14 15:51:09 +02002379 if (has_error_code) {
Avi Kivity9dac77f2011-06-01 15:34:25 +03002380 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2;
2381 ctxt->lock_prefix = 0;
2382 ctxt->src.val = (unsigned long) error_code;
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09002383 ret = em_push(ctxt);
Jan Kiszkae269fb22010-04-14 15:51:09 +02002384 }
2385
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002386 return ret;
2387}
2388
2389int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
Jan Kiszkae269fb22010-04-14 15:51:09 +02002390 u16 tss_selector, int reason,
2391 bool has_error_code, u32 error_code)
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002392{
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002393 int rc;
2394
Avi Kivity9dac77f2011-06-01 15:34:25 +03002395 ctxt->_eip = ctxt->eip;
2396 ctxt->dst.type = OP_NONE;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002397
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002398 rc = emulator_do_task_switch(ctxt, tss_selector, reason,
Jan Kiszkae269fb22010-04-14 15:51:09 +02002399 has_error_code, error_code);
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002400
Takuya Yoshikawa4179bb02011-04-13 00:29:09 +09002401 if (rc == X86EMUL_CONTINUE)
Avi Kivity9dac77f2011-06-01 15:34:25 +03002402 ctxt->eip = ctxt->_eip;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002403
Gleb Natapova0c0ab22011-03-28 16:57:49 +02002404 return (rc == X86EMUL_UNHANDLEABLE) ? EMULATION_FAILED : EMULATION_OK;
Gleb Natapov38ba30b2010-03-18 15:20:17 +02002405}
2406
Avi Kivity90de84f2010-11-17 15:28:21 +02002407static void string_addr_inc(struct x86_emulate_ctxt *ctxt, unsigned seg,
Gleb Natapovd9271122010-03-18 15:20:22 +02002408 int reg, struct operand *op)
Gleb Natapova682e352010-03-18 15:20:21 +02002409{
Gleb Natapova682e352010-03-18 15:20:21 +02002410 int df = (ctxt->eflags & EFLG_DF) ? -1 : 1;
2411
Avi Kivity9dac77f2011-06-01 15:34:25 +03002412 register_address_increment(ctxt, &ctxt->regs[reg], df * op->bytes);
2413 op->addr.mem.ea = register_address(ctxt, ctxt->regs[reg]);
Avi Kivity90de84f2010-11-17 15:28:21 +02002414 op->addr.mem.seg = seg;
Gleb Natapova682e352010-03-18 15:20:21 +02002415}
2416
Avi Kivity7af04fc2010-08-18 14:16:35 +03002417static int em_das(struct x86_emulate_ctxt *ctxt)
2418{
Avi Kivity7af04fc2010-08-18 14:16:35 +03002419 u8 al, old_al;
2420 bool af, cf, old_cf;
2421
2422 cf = ctxt->eflags & X86_EFLAGS_CF;
Avi Kivity9dac77f2011-06-01 15:34:25 +03002423 al = ctxt->dst.val;
Avi Kivity7af04fc2010-08-18 14:16:35 +03002424
2425 old_al = al;
2426 old_cf = cf;
2427 cf = false;
2428 af = ctxt->eflags & X86_EFLAGS_AF;
2429 if ((al & 0x0f) > 9 || af) {
2430 al -= 6;
2431 cf = old_cf | (al >= 250);
2432 af = true;
2433 } else {
2434 af = false;
2435 }
2436 if (old_al > 0x99 || old_cf) {
2437 al -= 0x60;
2438 cf = true;
2439 }
2440
Avi Kivity9dac77f2011-06-01 15:34:25 +03002441 ctxt->dst.val = al;
Avi Kivity7af04fc2010-08-18 14:16:35 +03002442 /* Set PF, ZF, SF */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002443 ctxt->src.type = OP_IMM;
2444 ctxt->src.val = 0;
2445 ctxt->src.bytes = 1;
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002446 emulate_2op_SrcV(ctxt, "or");
Avi Kivity7af04fc2010-08-18 14:16:35 +03002447 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF);
2448 if (cf)
2449 ctxt->eflags |= X86_EFLAGS_CF;
2450 if (af)
2451 ctxt->eflags |= X86_EFLAGS_AF;
2452 return X86EMUL_CONTINUE;
2453}
2454
Avi Kivity0ef753b2010-08-18 14:51:45 +03002455static int em_call_far(struct x86_emulate_ctxt *ctxt)
2456{
Avi Kivity0ef753b2010-08-18 14:51:45 +03002457 u16 sel, old_cs;
2458 ulong old_eip;
2459 int rc;
2460
Avi Kivity1aa36612011-04-27 13:20:30 +03002461 old_cs = get_segment_selector(ctxt, VCPU_SREG_CS);
Avi Kivity9dac77f2011-06-01 15:34:25 +03002462 old_eip = ctxt->_eip;
Avi Kivity0ef753b2010-08-18 14:51:45 +03002463
Avi Kivity9dac77f2011-06-01 15:34:25 +03002464 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09002465 if (load_segment_descriptor(ctxt, sel, VCPU_SREG_CS))
Avi Kivity0ef753b2010-08-18 14:51:45 +03002466 return X86EMUL_CONTINUE;
2467
Avi Kivity9dac77f2011-06-01 15:34:25 +03002468 ctxt->_eip = 0;
2469 memcpy(&ctxt->_eip, ctxt->src.valptr, ctxt->op_bytes);
Avi Kivity0ef753b2010-08-18 14:51:45 +03002470
Avi Kivity9dac77f2011-06-01 15:34:25 +03002471 ctxt->src.val = old_cs;
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09002472 rc = em_push(ctxt);
Avi Kivity0ef753b2010-08-18 14:51:45 +03002473 if (rc != X86EMUL_CONTINUE)
2474 return rc;
2475
Avi Kivity9dac77f2011-06-01 15:34:25 +03002476 ctxt->src.val = old_eip;
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09002477 return em_push(ctxt);
Avi Kivity0ef753b2010-08-18 14:51:45 +03002478}
2479
Avi Kivity40ece7c2010-08-18 15:12:09 +03002480static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt)
2481{
Avi Kivity40ece7c2010-08-18 15:12:09 +03002482 int rc;
2483
Avi Kivity9dac77f2011-06-01 15:34:25 +03002484 ctxt->dst.type = OP_REG;
2485 ctxt->dst.addr.reg = &ctxt->_eip;
2486 ctxt->dst.bytes = ctxt->op_bytes;
2487 rc = emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes);
Avi Kivity40ece7c2010-08-18 15:12:09 +03002488 if (rc != X86EMUL_CONTINUE)
2489 return rc;
Avi Kivity9dac77f2011-06-01 15:34:25 +03002490 register_address_increment(ctxt, &ctxt->regs[VCPU_REGS_RSP], ctxt->src.val);
Avi Kivity40ece7c2010-08-18 15:12:09 +03002491 return X86EMUL_CONTINUE;
2492}
2493
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002494static int em_add(struct x86_emulate_ctxt *ctxt)
2495{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002496 emulate_2op_SrcV(ctxt, "add");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002497 return X86EMUL_CONTINUE;
2498}
2499
2500static int em_or(struct x86_emulate_ctxt *ctxt)
2501{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002502 emulate_2op_SrcV(ctxt, "or");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002503 return X86EMUL_CONTINUE;
2504}
2505
2506static int em_adc(struct x86_emulate_ctxt *ctxt)
2507{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002508 emulate_2op_SrcV(ctxt, "adc");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002509 return X86EMUL_CONTINUE;
2510}
2511
2512static int em_sbb(struct x86_emulate_ctxt *ctxt)
2513{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002514 emulate_2op_SrcV(ctxt, "sbb");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002515 return X86EMUL_CONTINUE;
2516}
2517
2518static int em_and(struct x86_emulate_ctxt *ctxt)
2519{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002520 emulate_2op_SrcV(ctxt, "and");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002521 return X86EMUL_CONTINUE;
2522}
2523
2524static int em_sub(struct x86_emulate_ctxt *ctxt)
2525{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002526 emulate_2op_SrcV(ctxt, "sub");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002527 return X86EMUL_CONTINUE;
2528}
2529
2530static int em_xor(struct x86_emulate_ctxt *ctxt)
2531{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002532 emulate_2op_SrcV(ctxt, "xor");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002533 return X86EMUL_CONTINUE;
2534}
2535
2536static int em_cmp(struct x86_emulate_ctxt *ctxt)
2537{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002538 emulate_2op_SrcV(ctxt, "cmp");
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002539 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002540 ctxt->dst.type = OP_NONE;
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002541 return X86EMUL_CONTINUE;
2542}
2543
Takuya Yoshikawa9f21ca52011-05-29 21:57:53 +09002544static int em_test(struct x86_emulate_ctxt *ctxt)
2545{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002546 emulate_2op_SrcV(ctxt, "test");
Avi Kivitycaa8a162011-09-11 11:23:02 +03002547 /* Disable writeback. */
2548 ctxt->dst.type = OP_NONE;
Takuya Yoshikawa9f21ca52011-05-29 21:57:53 +09002549 return X86EMUL_CONTINUE;
2550}
2551
Takuya Yoshikawae4f973a2011-05-29 21:59:09 +09002552static int em_xchg(struct x86_emulate_ctxt *ctxt)
2553{
Takuya Yoshikawae4f973a2011-05-29 21:59:09 +09002554 /* Write back the register source. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002555 ctxt->src.val = ctxt->dst.val;
2556 write_register_operand(&ctxt->src);
Takuya Yoshikawae4f973a2011-05-29 21:59:09 +09002557
2558 /* Write back the memory destination with implicit LOCK prefix. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002559 ctxt->dst.val = ctxt->src.orig_val;
2560 ctxt->lock_prefix = 1;
Takuya Yoshikawae4f973a2011-05-29 21:59:09 +09002561 return X86EMUL_CONTINUE;
2562}
2563
Avi Kivity5c82aa22010-08-18 18:31:43 +03002564static int em_imul(struct x86_emulate_ctxt *ctxt)
2565{
Avi Kivitya31b9ce2011-09-07 16:41:35 +03002566 emulate_2op_SrcV_nobyte(ctxt, "imul");
Avi Kivity5c82aa22010-08-18 18:31:43 +03002567 return X86EMUL_CONTINUE;
2568}
2569
Avi Kivityf3a1b9f2010-08-18 18:25:25 +03002570static int em_imul_3op(struct x86_emulate_ctxt *ctxt)
2571{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002572 ctxt->dst.val = ctxt->src2.val;
Avi Kivity5c82aa22010-08-18 18:31:43 +03002573 return em_imul(ctxt);
Avi Kivityf3a1b9f2010-08-18 18:25:25 +03002574}
2575
Avi Kivity61429142010-08-19 15:13:00 +03002576static int em_cwd(struct x86_emulate_ctxt *ctxt)
2577{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002578 ctxt->dst.type = OP_REG;
2579 ctxt->dst.bytes = ctxt->src.bytes;
2580 ctxt->dst.addr.reg = &ctxt->regs[VCPU_REGS_RDX];
2581 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1);
Avi Kivity61429142010-08-19 15:13:00 +03002582
2583 return X86EMUL_CONTINUE;
2584}
2585
Avi Kivity48bb5d32010-08-18 18:54:34 +03002586static int em_rdtsc(struct x86_emulate_ctxt *ctxt)
2587{
Avi Kivity48bb5d32010-08-18 18:54:34 +03002588 u64 tsc = 0;
2589
Avi Kivity717746e2011-04-20 13:37:53 +03002590 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc);
Avi Kivity9dac77f2011-06-01 15:34:25 +03002591 ctxt->regs[VCPU_REGS_RAX] = (u32)tsc;
2592 ctxt->regs[VCPU_REGS_RDX] = tsc >> 32;
Avi Kivity48bb5d32010-08-18 18:54:34 +03002593 return X86EMUL_CONTINUE;
2594}
2595
Avi Kivityb9eac5f2010-08-03 14:46:56 +03002596static int em_mov(struct x86_emulate_ctxt *ctxt)
2597{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002598 ctxt->dst.val = ctxt->src.val;
Avi Kivityb9eac5f2010-08-03 14:46:56 +03002599 return X86EMUL_CONTINUE;
2600}
2601
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002602static int em_mov_rm_sreg(struct x86_emulate_ctxt *ctxt)
2603{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002604 if (ctxt->modrm_reg > VCPU_SREG_GS)
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002605 return emulate_ud(ctxt);
2606
Avi Kivity9dac77f2011-06-01 15:34:25 +03002607 ctxt->dst.val = get_segment_selector(ctxt, ctxt->modrm_reg);
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002608 return X86EMUL_CONTINUE;
2609}
2610
2611static int em_mov_sreg_rm(struct x86_emulate_ctxt *ctxt)
2612{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002613 u16 sel = ctxt->src.val;
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002614
Avi Kivity9dac77f2011-06-01 15:34:25 +03002615 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS)
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002616 return emulate_ud(ctxt);
2617
Avi Kivity9dac77f2011-06-01 15:34:25 +03002618 if (ctxt->modrm_reg == VCPU_SREG_SS)
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002619 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
2620
2621 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002622 ctxt->dst.type = OP_NONE;
2623 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg);
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09002624}
2625
Avi Kivityaa97bb42010-01-20 18:09:23 +02002626static int em_movdqu(struct x86_emulate_ctxt *ctxt)
2627{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002628 memcpy(&ctxt->dst.vec_val, &ctxt->src.vec_val, ctxt->op_bytes);
Avi Kivityaa97bb42010-01-20 18:09:23 +02002629 return X86EMUL_CONTINUE;
2630}
2631
Avi Kivity38503912011-03-31 18:48:09 +02002632static int em_invlpg(struct x86_emulate_ctxt *ctxt)
2633{
Avi Kivity9fa088f2011-03-31 18:54:30 +02002634 int rc;
2635 ulong linear;
2636
Avi Kivity9dac77f2011-06-01 15:34:25 +03002637 rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear);
Avi Kivity9fa088f2011-03-31 18:54:30 +02002638 if (rc == X86EMUL_CONTINUE)
Avi Kivity3cb16fe2011-04-20 15:38:44 +03002639 ctxt->ops->invlpg(ctxt, linear);
Avi Kivity38503912011-03-31 18:48:09 +02002640 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002641 ctxt->dst.type = OP_NONE;
Avi Kivity38503912011-03-31 18:48:09 +02002642 return X86EMUL_CONTINUE;
2643}
2644
Avi Kivity2d04a052011-04-20 15:32:49 +03002645static int em_clts(struct x86_emulate_ctxt *ctxt)
2646{
2647 ulong cr0;
2648
2649 cr0 = ctxt->ops->get_cr(ctxt, 0);
2650 cr0 &= ~X86_CR0_TS;
2651 ctxt->ops->set_cr(ctxt, 0, cr0);
2652 return X86EMUL_CONTINUE;
2653}
2654
Avi Kivity26d05cc2011-04-21 12:07:59 +03002655static int em_vmcall(struct x86_emulate_ctxt *ctxt)
2656{
Avi Kivity26d05cc2011-04-21 12:07:59 +03002657 int rc;
2658
Avi Kivity9dac77f2011-06-01 15:34:25 +03002659 if (ctxt->modrm_mod != 3 || ctxt->modrm_rm != 1)
Avi Kivity26d05cc2011-04-21 12:07:59 +03002660 return X86EMUL_UNHANDLEABLE;
2661
2662 rc = ctxt->ops->fix_hypercall(ctxt);
2663 if (rc != X86EMUL_CONTINUE)
2664 return rc;
2665
2666 /* Let the processor re-execute the fixed hypercall */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002667 ctxt->_eip = ctxt->eip;
Avi Kivity26d05cc2011-04-21 12:07:59 +03002668 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002669 ctxt->dst.type = OP_NONE;
Avi Kivity26d05cc2011-04-21 12:07:59 +03002670 return X86EMUL_CONTINUE;
2671}
2672
2673static int em_lgdt(struct x86_emulate_ctxt *ctxt)
2674{
Avi Kivity26d05cc2011-04-21 12:07:59 +03002675 struct desc_ptr desc_ptr;
2676 int rc;
2677
Avi Kivity9dac77f2011-06-01 15:34:25 +03002678 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
Avi Kivity26d05cc2011-04-21 12:07:59 +03002679 &desc_ptr.size, &desc_ptr.address,
Avi Kivity9dac77f2011-06-01 15:34:25 +03002680 ctxt->op_bytes);
Avi Kivity26d05cc2011-04-21 12:07:59 +03002681 if (rc != X86EMUL_CONTINUE)
2682 return rc;
2683 ctxt->ops->set_gdt(ctxt, &desc_ptr);
2684 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002685 ctxt->dst.type = OP_NONE;
Avi Kivity26d05cc2011-04-21 12:07:59 +03002686 return X86EMUL_CONTINUE;
2687}
2688
Avi Kivity5ef39c72011-04-21 12:21:50 +03002689static int em_vmmcall(struct x86_emulate_ctxt *ctxt)
Avi Kivity26d05cc2011-04-21 12:07:59 +03002690{
Avi Kivity26d05cc2011-04-21 12:07:59 +03002691 int rc;
2692
Avi Kivity5ef39c72011-04-21 12:21:50 +03002693 rc = ctxt->ops->fix_hypercall(ctxt);
2694
Avi Kivity26d05cc2011-04-21 12:07:59 +03002695 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002696 ctxt->dst.type = OP_NONE;
Avi Kivity26d05cc2011-04-21 12:07:59 +03002697 return rc;
2698}
2699
2700static int em_lidt(struct x86_emulate_ctxt *ctxt)
2701{
Avi Kivity26d05cc2011-04-21 12:07:59 +03002702 struct desc_ptr desc_ptr;
2703 int rc;
2704
Avi Kivity9dac77f2011-06-01 15:34:25 +03002705 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
Takuya Yoshikawa509cf9f2011-05-02 02:25:07 +09002706 &desc_ptr.size, &desc_ptr.address,
Avi Kivity9dac77f2011-06-01 15:34:25 +03002707 ctxt->op_bytes);
Avi Kivity26d05cc2011-04-21 12:07:59 +03002708 if (rc != X86EMUL_CONTINUE)
2709 return rc;
2710 ctxt->ops->set_idt(ctxt, &desc_ptr);
2711 /* Disable writeback. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03002712 ctxt->dst.type = OP_NONE;
Avi Kivity26d05cc2011-04-21 12:07:59 +03002713 return X86EMUL_CONTINUE;
2714}
2715
2716static int em_smsw(struct x86_emulate_ctxt *ctxt)
2717{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002718 ctxt->dst.bytes = 2;
2719 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0);
Avi Kivity26d05cc2011-04-21 12:07:59 +03002720 return X86EMUL_CONTINUE;
2721}
2722
2723static int em_lmsw(struct x86_emulate_ctxt *ctxt)
2724{
Avi Kivity26d05cc2011-04-21 12:07:59 +03002725 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul)
Avi Kivity9dac77f2011-06-01 15:34:25 +03002726 | (ctxt->src.val & 0x0f));
2727 ctxt->dst.type = OP_NONE;
Avi Kivity26d05cc2011-04-21 12:07:59 +03002728 return X86EMUL_CONTINUE;
2729}
2730
Takuya Yoshikawad06e03a2011-05-29 22:04:08 +09002731static int em_loop(struct x86_emulate_ctxt *ctxt)
2732{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002733 register_address_increment(ctxt, &ctxt->regs[VCPU_REGS_RCX], -1);
2734 if ((address_mask(ctxt, ctxt->regs[VCPU_REGS_RCX]) != 0) &&
2735 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags)))
2736 jmp_rel(ctxt, ctxt->src.val);
Takuya Yoshikawad06e03a2011-05-29 22:04:08 +09002737
2738 return X86EMUL_CONTINUE;
2739}
2740
2741static int em_jcxz(struct x86_emulate_ctxt *ctxt)
2742{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002743 if (address_mask(ctxt, ctxt->regs[VCPU_REGS_RCX]) == 0)
2744 jmp_rel(ctxt, ctxt->src.val);
Takuya Yoshikawad06e03a2011-05-29 22:04:08 +09002745
2746 return X86EMUL_CONTINUE;
2747}
2748
Takuya Yoshikawaf411e6c2011-05-29 22:05:15 +09002749static int em_cli(struct x86_emulate_ctxt *ctxt)
2750{
2751 if (emulator_bad_iopl(ctxt))
2752 return emulate_gp(ctxt, 0);
2753
2754 ctxt->eflags &= ~X86_EFLAGS_IF;
2755 return X86EMUL_CONTINUE;
2756}
2757
2758static int em_sti(struct x86_emulate_ctxt *ctxt)
2759{
2760 if (emulator_bad_iopl(ctxt))
2761 return emulate_gp(ctxt, 0);
2762
2763 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI;
2764 ctxt->eflags |= X86_EFLAGS_IF;
2765 return X86EMUL_CONTINUE;
2766}
2767
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002768static bool valid_cr(int nr)
2769{
2770 switch (nr) {
2771 case 0:
2772 case 2 ... 4:
2773 case 8:
2774 return true;
2775 default:
2776 return false;
2777 }
2778}
2779
2780static int check_cr_read(struct x86_emulate_ctxt *ctxt)
2781{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002782 if (!valid_cr(ctxt->modrm_reg))
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002783 return emulate_ud(ctxt);
2784
2785 return X86EMUL_CONTINUE;
2786}
2787
2788static int check_cr_write(struct x86_emulate_ctxt *ctxt)
2789{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002790 u64 new_val = ctxt->src.val64;
2791 int cr = ctxt->modrm_reg;
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03002792 u64 efer = 0;
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002793
2794 static u64 cr_reserved_bits[] = {
2795 0xffffffff00000000ULL,
2796 0, 0, 0, /* CR3 checked later */
2797 CR4_RESERVED_BITS,
2798 0, 0, 0,
2799 CR8_RESERVED_BITS,
2800 };
2801
2802 if (!valid_cr(cr))
2803 return emulate_ud(ctxt);
2804
2805 if (new_val & cr_reserved_bits[cr])
2806 return emulate_gp(ctxt, 0);
2807
2808 switch (cr) {
2809 case 0: {
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03002810 u64 cr4;
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002811 if (((new_val & X86_CR0_PG) && !(new_val & X86_CR0_PE)) ||
2812 ((new_val & X86_CR0_NW) && !(new_val & X86_CR0_CD)))
2813 return emulate_gp(ctxt, 0);
2814
Avi Kivity717746e2011-04-20 13:37:53 +03002815 cr4 = ctxt->ops->get_cr(ctxt, 4);
2816 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002817
2818 if ((new_val & X86_CR0_PG) && (efer & EFER_LME) &&
2819 !(cr4 & X86_CR4_PAE))
2820 return emulate_gp(ctxt, 0);
2821
2822 break;
2823 }
2824 case 3: {
2825 u64 rsvd = 0;
2826
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03002827 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
2828 if (efer & EFER_LMA)
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002829 rsvd = CR3_L_MODE_RESERVED_BITS;
Avi Kivityfd72c412011-04-20 15:24:32 +03002830 else if (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_PAE)
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002831 rsvd = CR3_PAE_RESERVED_BITS;
Avi Kivityfd72c412011-04-20 15:24:32 +03002832 else if (ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PG)
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002833 rsvd = CR3_NONPAE_RESERVED_BITS;
2834
2835 if (new_val & rsvd)
2836 return emulate_gp(ctxt, 0);
2837
2838 break;
2839 }
2840 case 4: {
Avi Kivityc2ad2bb2011-04-20 15:21:35 +03002841 u64 cr4;
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002842
Avi Kivity717746e2011-04-20 13:37:53 +03002843 cr4 = ctxt->ops->get_cr(ctxt, 4);
2844 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
Joerg Roedelcfec82c2011-04-04 12:39:28 +02002845
2846 if ((efer & EFER_LMA) && !(new_val & X86_CR4_PAE))
2847 return emulate_gp(ctxt, 0);
2848
2849 break;
2850 }
2851 }
2852
2853 return X86EMUL_CONTINUE;
2854}
2855
Joerg Roedel3b88e412011-04-04 12:39:29 +02002856static int check_dr7_gd(struct x86_emulate_ctxt *ctxt)
2857{
2858 unsigned long dr7;
2859
Avi Kivity717746e2011-04-20 13:37:53 +03002860 ctxt->ops->get_dr(ctxt, 7, &dr7);
Joerg Roedel3b88e412011-04-04 12:39:29 +02002861
2862 /* Check if DR7.Global_Enable is set */
2863 return dr7 & (1 << 13);
2864}
2865
2866static int check_dr_read(struct x86_emulate_ctxt *ctxt)
2867{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002868 int dr = ctxt->modrm_reg;
Joerg Roedel3b88e412011-04-04 12:39:29 +02002869 u64 cr4;
2870
2871 if (dr > 7)
2872 return emulate_ud(ctxt);
2873
Avi Kivity717746e2011-04-20 13:37:53 +03002874 cr4 = ctxt->ops->get_cr(ctxt, 4);
Joerg Roedel3b88e412011-04-04 12:39:29 +02002875 if ((cr4 & X86_CR4_DE) && (dr == 4 || dr == 5))
2876 return emulate_ud(ctxt);
2877
2878 if (check_dr7_gd(ctxt))
2879 return emulate_db(ctxt);
2880
2881 return X86EMUL_CONTINUE;
2882}
2883
2884static int check_dr_write(struct x86_emulate_ctxt *ctxt)
2885{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002886 u64 new_val = ctxt->src.val64;
2887 int dr = ctxt->modrm_reg;
Joerg Roedel3b88e412011-04-04 12:39:29 +02002888
2889 if ((dr == 6 || dr == 7) && (new_val & 0xffffffff00000000ULL))
2890 return emulate_gp(ctxt, 0);
2891
2892 return check_dr_read(ctxt);
2893}
2894
Joerg Roedel01de8b02011-04-04 12:39:31 +02002895static int check_svme(struct x86_emulate_ctxt *ctxt)
2896{
2897 u64 efer;
2898
Avi Kivity717746e2011-04-20 13:37:53 +03002899 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
Joerg Roedel01de8b02011-04-04 12:39:31 +02002900
2901 if (!(efer & EFER_SVME))
2902 return emulate_ud(ctxt);
2903
2904 return X86EMUL_CONTINUE;
2905}
2906
2907static int check_svme_pa(struct x86_emulate_ctxt *ctxt)
2908{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002909 u64 rax = ctxt->regs[VCPU_REGS_RAX];
Joerg Roedel01de8b02011-04-04 12:39:31 +02002910
2911 /* Valid physical address? */
Randy Dunlapd4224442011-04-21 09:09:22 -07002912 if (rax & 0xffff000000000000ULL)
Joerg Roedel01de8b02011-04-04 12:39:31 +02002913 return emulate_gp(ctxt, 0);
2914
2915 return check_svme(ctxt);
2916}
2917
Joerg Roedeld7eb8202011-04-04 12:39:32 +02002918static int check_rdtsc(struct x86_emulate_ctxt *ctxt)
2919{
Avi Kivity717746e2011-04-20 13:37:53 +03002920 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
Joerg Roedeld7eb8202011-04-04 12:39:32 +02002921
Avi Kivity717746e2011-04-20 13:37:53 +03002922 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt))
Joerg Roedeld7eb8202011-04-04 12:39:32 +02002923 return emulate_ud(ctxt);
2924
2925 return X86EMUL_CONTINUE;
2926}
2927
Joerg Roedel80612522011-04-04 12:39:33 +02002928static int check_rdpmc(struct x86_emulate_ctxt *ctxt)
2929{
Avi Kivity717746e2011-04-20 13:37:53 +03002930 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
Avi Kivity9dac77f2011-06-01 15:34:25 +03002931 u64 rcx = ctxt->regs[VCPU_REGS_RCX];
Joerg Roedel80612522011-04-04 12:39:33 +02002932
Avi Kivity717746e2011-04-20 13:37:53 +03002933 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) ||
Joerg Roedel80612522011-04-04 12:39:33 +02002934 (rcx > 3))
2935 return emulate_gp(ctxt, 0);
2936
2937 return X86EMUL_CONTINUE;
2938}
2939
Joerg Roedelf6511932011-04-04 12:39:35 +02002940static int check_perm_in(struct x86_emulate_ctxt *ctxt)
2941{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002942 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u);
2943 if (!emulator_io_permited(ctxt, ctxt->src.val, ctxt->dst.bytes))
Joerg Roedelf6511932011-04-04 12:39:35 +02002944 return emulate_gp(ctxt, 0);
2945
2946 return X86EMUL_CONTINUE;
2947}
2948
2949static int check_perm_out(struct x86_emulate_ctxt *ctxt)
2950{
Avi Kivity9dac77f2011-06-01 15:34:25 +03002951 ctxt->src.bytes = min(ctxt->src.bytes, 4u);
2952 if (!emulator_io_permited(ctxt, ctxt->dst.val, ctxt->src.bytes))
Joerg Roedelf6511932011-04-04 12:39:35 +02002953 return emulate_gp(ctxt, 0);
2954
2955 return X86EMUL_CONTINUE;
2956}
2957
Avi Kivity73fba5f2010-07-29 15:11:53 +03002958#define D(_y) { .flags = (_y) }
Avi Kivityc4f035c2011-04-04 12:39:22 +02002959#define DI(_y, _i) { .flags = (_y), .intercept = x86_intercept_##_i }
Joerg Roedeld09beab2011-04-04 12:39:25 +02002960#define DIP(_y, _i, _p) { .flags = (_y), .intercept = x86_intercept_##_i, \
2961 .check_perm = (_p) }
Avi Kivity73fba5f2010-07-29 15:11:53 +03002962#define N D(0)
Joerg Roedel01de8b02011-04-04 12:39:31 +02002963#define EXT(_f, _e) { .flags = ((_f) | RMExt), .u.group = (_e) }
Avi Kivity73fba5f2010-07-29 15:11:53 +03002964#define G(_f, _g) { .flags = ((_f) | Group), .u.group = (_g) }
Avi Kivity46561642011-04-24 14:09:59 +03002965#define GD(_f, _g) { .flags = ((_f) | GroupDual), .u.gdual = (_g) }
Avi Kivity73fba5f2010-07-29 15:11:53 +03002966#define I(_f, _e) { .flags = (_f), .u.execute = (_e) }
Avi Kivityc4f035c2011-04-04 12:39:22 +02002967#define II(_f, _e, _i) \
2968 { .flags = (_f), .u.execute = (_e), .intercept = x86_intercept_##_i }
Joerg Roedeld09beab2011-04-04 12:39:25 +02002969#define IIP(_f, _e, _i, _p) \
2970 { .flags = (_f), .u.execute = (_e), .intercept = x86_intercept_##_i, \
2971 .check_perm = (_p) }
Avi Kivityaa97bb42010-01-20 18:09:23 +02002972#define GP(_f, _g) { .flags = ((_f) | Prefix), .u.gprefix = (_g) }
Avi Kivity73fba5f2010-07-29 15:11:53 +03002973
Avi Kivity8d8f4e92010-08-26 11:56:06 +03002974#define D2bv(_f) D((_f) | ByteOp), D(_f)
Joerg Roedelf6511932011-04-04 12:39:35 +02002975#define D2bvIP(_f, _i, _p) DIP((_f) | ByteOp, _i, _p), DIP(_f, _i, _p)
Avi Kivity8d8f4e92010-08-26 11:56:06 +03002976#define I2bv(_f, _e) I((_f) | ByteOp, _e), I(_f, _e)
2977
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09002978#define I6ALU(_f, _e) I2bv((_f) | DstMem | SrcReg | ModRM, _e), \
2979 I2bv(((_f) | DstReg | SrcMem | ModRM) & ~Lock, _e), \
2980 I2bv(((_f) & ~Lock) | DstAcc | SrcImm, _e)
Avi Kivity6230f7f2010-08-26 18:34:55 +03002981
Joerg Roedeld7eb8202011-04-04 12:39:32 +02002982static struct opcode group7_rm1[] = {
2983 DI(SrcNone | ModRM | Priv, monitor),
2984 DI(SrcNone | ModRM | Priv, mwait),
2985 N, N, N, N, N, N,
2986};
2987
Joerg Roedel01de8b02011-04-04 12:39:31 +02002988static struct opcode group7_rm3[] = {
2989 DIP(SrcNone | ModRM | Prot | Priv, vmrun, check_svme_pa),
Avi Kivity5ef39c72011-04-21 12:21:50 +03002990 II(SrcNone | ModRM | Prot | VendorSpecific, em_vmmcall, vmmcall),
Joerg Roedel01de8b02011-04-04 12:39:31 +02002991 DIP(SrcNone | ModRM | Prot | Priv, vmload, check_svme_pa),
2992 DIP(SrcNone | ModRM | Prot | Priv, vmsave, check_svme_pa),
2993 DIP(SrcNone | ModRM | Prot | Priv, stgi, check_svme),
2994 DIP(SrcNone | ModRM | Prot | Priv, clgi, check_svme),
2995 DIP(SrcNone | ModRM | Prot | Priv, skinit, check_svme),
2996 DIP(SrcNone | ModRM | Prot | Priv, invlpga, check_svme),
2997};
Avi Kivity6230f7f2010-08-26 18:34:55 +03002998
Joerg Roedeld7eb8202011-04-04 12:39:32 +02002999static struct opcode group7_rm7[] = {
3000 N,
3001 DIP(SrcNone | ModRM, rdtscp, check_rdtsc),
3002 N, N, N, N, N, N,
3003};
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003004
Avi Kivity73fba5f2010-07-29 15:11:53 +03003005static struct opcode group1[] = {
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003006 I(Lock, em_add),
3007 I(Lock, em_or),
3008 I(Lock, em_adc),
3009 I(Lock, em_sbb),
3010 I(Lock, em_and),
3011 I(Lock, em_sub),
3012 I(Lock, em_xor),
3013 I(0, em_cmp),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003014};
3015
3016static struct opcode group1A[] = {
3017 D(DstMem | SrcNone | ModRM | Mov | Stack), N, N, N, N, N, N, N,
3018};
3019
3020static struct opcode group3[] = {
Avi Kivity3329ece2011-09-13 10:45:39 +03003021 I(DstMem | SrcImm | ModRM, em_test),
3022 I(DstMem | SrcImm | ModRM, em_test),
3023 I(DstMem | SrcNone | ModRM | Lock, em_not),
3024 I(DstMem | SrcNone | ModRM | Lock, em_neg),
3025 I(SrcMem | ModRM, em_mul_ex),
3026 I(SrcMem | ModRM, em_imul_ex),
3027 I(SrcMem | ModRM, em_div_ex),
3028 I(SrcMem | ModRM, em_idiv_ex),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003029};
3030
3031static struct opcode group4[] = {
3032 D(ByteOp | DstMem | SrcNone | ModRM | Lock), D(ByteOp | DstMem | SrcNone | ModRM | Lock),
3033 N, N, N, N, N, N,
3034};
3035
3036static struct opcode group5[] = {
3037 D(DstMem | SrcNone | ModRM | Lock), D(DstMem | SrcNone | ModRM | Lock),
Avi Kivity0ef753b2010-08-18 14:51:45 +03003038 D(SrcMem | ModRM | Stack),
3039 I(SrcMemFAddr | ModRM | ImplicitOps | Stack, em_call_far),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003040 D(SrcMem | ModRM | Stack), D(SrcMemFAddr | ModRM | ImplicitOps),
3041 D(SrcMem | ModRM | Stack), N,
3042};
3043
Joerg Roedeldee6bb72011-04-04 12:39:30 +02003044static struct opcode group6[] = {
3045 DI(ModRM | Prot, sldt),
3046 DI(ModRM | Prot, str),
3047 DI(ModRM | Prot | Priv, lldt),
3048 DI(ModRM | Prot | Priv, ltr),
3049 N, N, N, N,
3050};
3051
Avi Kivity73fba5f2010-07-29 15:11:53 +03003052static struct group_dual group7 = { {
Joerg Roedeldee6bb72011-04-04 12:39:30 +02003053 DI(ModRM | Mov | DstMem | Priv, sgdt),
3054 DI(ModRM | Mov | DstMem | Priv, sidt),
Avi Kivity5ef39c72011-04-21 12:21:50 +03003055 II(ModRM | SrcMem | Priv, em_lgdt, lgdt),
3056 II(ModRM | SrcMem | Priv, em_lidt, lidt),
3057 II(SrcNone | ModRM | DstMem | Mov, em_smsw, smsw), N,
3058 II(SrcMem16 | ModRM | Mov | Priv, em_lmsw, lmsw),
3059 II(SrcMem | ModRM | ByteOp | Priv | NoAccess, em_invlpg, invlpg),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003060}, {
Avi Kivity5ef39c72011-04-21 12:21:50 +03003061 I(SrcNone | ModRM | Priv | VendorSpecific, em_vmcall),
3062 EXT(0, group7_rm1),
Joerg Roedel01de8b02011-04-04 12:39:31 +02003063 N, EXT(0, group7_rm3),
Avi Kivity5ef39c72011-04-21 12:21:50 +03003064 II(SrcNone | ModRM | DstMem | Mov, em_smsw, smsw), N,
3065 II(SrcMem16 | ModRM | Mov | Priv, em_lmsw, lmsw), EXT(0, group7_rm7),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003066} };
3067
3068static struct opcode group8[] = {
3069 N, N, N, N,
3070 D(DstMem | SrcImmByte | ModRM), D(DstMem | SrcImmByte | ModRM | Lock),
3071 D(DstMem | SrcImmByte | ModRM | Lock), D(DstMem | SrcImmByte | ModRM | Lock),
3072};
3073
3074static struct group_dual group9 = { {
3075 N, D(DstMem64 | ModRM | Lock), N, N, N, N, N, N,
3076}, {
3077 N, N, N, N, N, N, N, N,
3078} };
3079
Avi Kivitya4d4a7c2010-08-03 15:05:46 +03003080static struct opcode group11[] = {
3081 I(DstMem | SrcImm | ModRM | Mov, em_mov), X7(D(Undefined)),
3082};
3083
Avi Kivityaa97bb42010-01-20 18:09:23 +02003084static struct gprefix pfx_0f_6f_0f_7f = {
3085 N, N, N, I(Sse, em_movdqu),
3086};
3087
Avi Kivity73fba5f2010-07-29 15:11:53 +03003088static struct opcode opcode_table[256] = {
3089 /* 0x00 - 0x07 */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003090 I6ALU(Lock, em_add),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003091 D(ImplicitOps | Stack | No64), D(ImplicitOps | Stack | No64),
3092 /* 0x08 - 0x0F */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003093 I6ALU(Lock, em_or),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003094 D(ImplicitOps | Stack | No64), N,
3095 /* 0x10 - 0x17 */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003096 I6ALU(Lock, em_adc),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003097 D(ImplicitOps | Stack | No64), D(ImplicitOps | Stack | No64),
3098 /* 0x18 - 0x1F */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003099 I6ALU(Lock, em_sbb),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003100 D(ImplicitOps | Stack | No64), D(ImplicitOps | Stack | No64),
3101 /* 0x20 - 0x27 */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003102 I6ALU(Lock, em_and), N, N,
Avi Kivity73fba5f2010-07-29 15:11:53 +03003103 /* 0x28 - 0x2F */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003104 I6ALU(Lock, em_sub), N, I(ByteOp | DstAcc | No64, em_das),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003105 /* 0x30 - 0x37 */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003106 I6ALU(Lock, em_xor), N, N,
Avi Kivity73fba5f2010-07-29 15:11:53 +03003107 /* 0x38 - 0x3F */
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003108 I6ALU(0, em_cmp), N, N,
Avi Kivity73fba5f2010-07-29 15:11:53 +03003109 /* 0x40 - 0x4F */
3110 X16(D(DstReg)),
3111 /* 0x50 - 0x57 */
Avi Kivity63540382010-07-29 15:11:55 +03003112 X8(I(SrcReg | Stack, em_push)),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003113 /* 0x58 - 0x5F */
Takuya Yoshikawac54fe502011-04-23 18:49:40 +09003114 X8(I(DstReg | Stack, em_pop)),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003115 /* 0x60 - 0x67 */
Takuya Yoshikawab96a7fa2011-04-23 18:51:07 +09003116 I(ImplicitOps | Stack | No64, em_pusha),
3117 I(ImplicitOps | Stack | No64, em_popa),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003118 N, D(DstReg | SrcMem32 | ModRM | Mov) /* movsxd (x86/64) */ ,
3119 N, N, N, N,
3120 /* 0x68 - 0x6F */
Avi Kivityd46164d2010-08-18 19:29:33 +03003121 I(SrcImm | Mov | Stack, em_push),
3122 I(DstReg | SrcMem | ModRM | Src2Imm, em_imul_3op),
Avi Kivityf3a1b9f2010-08-18 18:25:25 +03003123 I(SrcImmByte | Mov | Stack, em_push),
3124 I(DstReg | SrcMem | ModRM | Src2ImmByte, em_imul_3op),
Marcelo Tosatti221192b2011-05-30 15:23:14 -03003125 D2bvIP(DstDI | SrcDX | Mov | String, ins, check_perm_in), /* insb, insw/insd */
3126 D2bvIP(SrcSI | DstDX | String, outs, check_perm_out), /* outsb, outsw/outsd */
Avi Kivity73fba5f2010-07-29 15:11:53 +03003127 /* 0x70 - 0x7F */
3128 X16(D(SrcImmByte)),
3129 /* 0x80 - 0x87 */
3130 G(ByteOp | DstMem | SrcImm | ModRM | Group, group1),
3131 G(DstMem | SrcImm | ModRM | Group, group1),
3132 G(ByteOp | DstMem | SrcImm | ModRM | No64 | Group, group1),
3133 G(DstMem | SrcImmByte | ModRM | Group, group1),
Takuya Yoshikawa9f21ca52011-05-29 21:57:53 +09003134 I2bv(DstMem | SrcReg | ModRM, em_test),
Takuya Yoshikawae4f973a2011-05-29 21:59:09 +09003135 I2bv(DstMem | SrcReg | ModRM | Lock, em_xchg),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003136 /* 0x88 - 0x8F */
Avi Kivityb9eac5f2010-08-03 14:46:56 +03003137 I2bv(DstMem | SrcReg | ModRM | Mov, em_mov),
3138 I2bv(DstReg | SrcMem | ModRM | Mov, em_mov),
Takuya Yoshikawa1bd5f462011-05-29 22:01:33 +09003139 I(DstMem | SrcNone | ModRM | Mov, em_mov_rm_sreg),
3140 D(ModRM | SrcMem | NoAccess | DstReg),
3141 I(ImplicitOps | SrcMem16 | ModRM, em_mov_sreg_rm),
3142 G(0, group1A),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003143 /* 0x90 - 0x97 */
Joerg Roedelbf608f82011-04-04 12:39:34 +02003144 DI(SrcAcc | DstReg, pause), X7(D(SrcAcc | DstReg)),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003145 /* 0x98 - 0x9F */
Avi Kivity61429142010-08-19 15:13:00 +03003146 D(DstAcc | SrcNone), I(ImplicitOps | SrcAcc, em_cwd),
Wei Yongjuncc4feed2010-08-25 14:10:53 +08003147 I(SrcImmFAddr | No64, em_call_far), N,
Takuya Yoshikawa62aaa2f2011-04-23 18:52:56 +09003148 II(ImplicitOps | Stack, em_pushf, pushf),
3149 II(ImplicitOps | Stack, em_popf, popf), N, N,
Avi Kivity73fba5f2010-07-29 15:11:53 +03003150 /* 0xA0 - 0xA7 */
Avi Kivityb9eac5f2010-08-03 14:46:56 +03003151 I2bv(DstAcc | SrcMem | Mov | MemAbs, em_mov),
3152 I2bv(DstMem | SrcAcc | Mov | MemAbs, em_mov),
3153 I2bv(SrcSI | DstDI | Mov | String, em_mov),
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003154 I2bv(SrcSI | DstDI | String, em_cmp),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003155 /* 0xA8 - 0xAF */
Takuya Yoshikawa9f21ca52011-05-29 21:57:53 +09003156 I2bv(DstAcc | SrcImm, em_test),
Avi Kivityb9eac5f2010-08-03 14:46:56 +03003157 I2bv(SrcAcc | DstDI | Mov | String, em_mov),
3158 I2bv(SrcSI | DstAcc | Mov | String, em_mov),
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003159 I2bv(SrcAcc | DstDI | String, em_cmp),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003160 /* 0xB0 - 0xB7 */
Avi Kivityb9eac5f2010-08-03 14:46:56 +03003161 X8(I(ByteOp | DstReg | SrcImm | Mov, em_mov)),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003162 /* 0xB8 - 0xBF */
Avi Kivityb9eac5f2010-08-03 14:46:56 +03003163 X8(I(DstReg | SrcImm | Mov, em_mov)),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003164 /* 0xC0 - 0xC7 */
Avi Kivityd2c6c7a2010-08-26 11:56:11 +03003165 D2bv(DstMem | SrcImmByte | ModRM),
Avi Kivity40ece7c2010-08-18 15:12:09 +03003166 I(ImplicitOps | Stack | SrcImmU16, em_ret_near_imm),
Takuya Yoshikawaebda02c2011-05-29 22:00:22 +09003167 I(ImplicitOps | Stack, em_ret),
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08003168 D(DstReg | SrcMemFAddr | ModRM | No64), D(DstReg | SrcMemFAddr | ModRM | No64),
Avi Kivitya4d4a7c2010-08-03 15:05:46 +03003169 G(ByteOp, group11), G(0, group11),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003170 /* 0xC8 - 0xCF */
Takuya Yoshikawadb5b0762011-05-29 21:56:26 +09003171 N, N, N, I(ImplicitOps | Stack, em_ret_far),
Avi Kivity3c6e2762011-04-04 12:39:23 +02003172 D(ImplicitOps), DI(SrcImmByte, intn),
Takuya Yoshikawadb5b0762011-05-29 21:56:26 +09003173 D(ImplicitOps | No64), II(ImplicitOps, em_iret, iret),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003174 /* 0xD0 - 0xD7 */
Avi Kivityd2c6c7a2010-08-26 11:56:11 +03003175 D2bv(DstMem | SrcOne | ModRM), D2bv(DstMem | ModRM),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003176 N, N, N, N,
3177 /* 0xD8 - 0xDF */
3178 N, N, N, N, N, N, N, N,
3179 /* 0xE0 - 0xE7 */
Takuya Yoshikawad06e03a2011-05-29 22:04:08 +09003180 X3(I(SrcImmByte, em_loop)),
3181 I(SrcImmByte, em_jcxz),
Joerg Roedelf6511932011-04-04 12:39:35 +02003182 D2bvIP(SrcImmUByte | DstAcc, in, check_perm_in),
3183 D2bvIP(SrcAcc | DstImmUByte, out, check_perm_out),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003184 /* 0xE8 - 0xEF */
3185 D(SrcImm | Stack), D(SrcImm | ImplicitOps),
Takuya Yoshikawadb5b0762011-05-29 21:56:26 +09003186 I(SrcImmFAddr | No64, em_jmp_far), D(SrcImmByte | ImplicitOps),
Marcelo Tosatti221192b2011-05-30 15:23:14 -03003187 D2bvIP(SrcDX | DstAcc, in, check_perm_in),
3188 D2bvIP(SrcAcc | DstDX, out, check_perm_out),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003189 /* 0xF0 - 0xF7 */
Joerg Roedelbf608f82011-04-04 12:39:34 +02003190 N, DI(ImplicitOps, icebp), N, N,
Avi Kivity3c6e2762011-04-04 12:39:23 +02003191 DI(ImplicitOps | Priv, hlt), D(ImplicitOps),
3192 G(ByteOp, group3), G(0, group3),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003193 /* 0xF8 - 0xFF */
Takuya Yoshikawaf411e6c2011-05-29 22:05:15 +09003194 D(ImplicitOps), D(ImplicitOps),
3195 I(ImplicitOps, em_cli), I(ImplicitOps, em_sti),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003196 D(ImplicitOps), D(ImplicitOps), G(0, group4), G(0, group5),
3197};
3198
3199static struct opcode twobyte_table[256] = {
3200 /* 0x00 - 0x0F */
Joerg Roedeldee6bb72011-04-04 12:39:30 +02003201 G(0, group6), GD(0, &group7), N, N,
Takuya Yoshikawadb5b0762011-05-29 21:56:26 +09003202 N, I(ImplicitOps | VendorSpecific, em_syscall),
3203 II(ImplicitOps | Priv, em_clts, clts), N,
Avi Kivity3c6e2762011-04-04 12:39:23 +02003204 DI(ImplicitOps | Priv, invd), DI(ImplicitOps | Priv, wbinvd), N, N,
Avi Kivity73fba5f2010-07-29 15:11:53 +03003205 N, D(ImplicitOps | ModRM), N, N,
3206 /* 0x10 - 0x1F */
3207 N, N, N, N, N, N, N, N, D(ImplicitOps | ModRM), N, N, N, N, N, N, N,
3208 /* 0x20 - 0x2F */
Joerg Roedelcfec82c2011-04-04 12:39:28 +02003209 DIP(ModRM | DstMem | Priv | Op3264, cr_read, check_cr_read),
Joerg Roedel3b88e412011-04-04 12:39:29 +02003210 DIP(ModRM | DstMem | Priv | Op3264, dr_read, check_dr_read),
Joerg Roedelcfec82c2011-04-04 12:39:28 +02003211 DIP(ModRM | SrcMem | Priv | Op3264, cr_write, check_cr_write),
Joerg Roedel3b88e412011-04-04 12:39:29 +02003212 DIP(ModRM | SrcMem | Priv | Op3264, dr_write, check_dr_write),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003213 N, N, N, N,
3214 N, N, N, N, N, N, N, N,
3215 /* 0x30 - 0x3F */
Joerg Roedel80612522011-04-04 12:39:33 +02003216 DI(ImplicitOps | Priv, wrmsr),
3217 IIP(ImplicitOps, em_rdtsc, rdtsc, check_rdtsc),
3218 DI(ImplicitOps | Priv, rdmsr),
3219 DIP(ImplicitOps | Priv, rdpmc, check_rdpmc),
Takuya Yoshikawadb5b0762011-05-29 21:56:26 +09003220 I(ImplicitOps | VendorSpecific, em_sysenter),
3221 I(ImplicitOps | Priv | VendorSpecific, em_sysexit),
Avi Kivityd8671622011-02-01 16:32:03 +02003222 N, N,
Avi Kivity73fba5f2010-07-29 15:11:53 +03003223 N, N, N, N, N, N, N, N,
3224 /* 0x40 - 0x4F */
3225 X16(D(DstReg | SrcMem | ModRM | Mov)),
3226 /* 0x50 - 0x5F */
3227 N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
3228 /* 0x60 - 0x6F */
Avi Kivityaa97bb42010-01-20 18:09:23 +02003229 N, N, N, N,
3230 N, N, N, N,
3231 N, N, N, N,
3232 N, N, N, GP(SrcMem | DstReg | ModRM | Mov, &pfx_0f_6f_0f_7f),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003233 /* 0x70 - 0x7F */
Avi Kivityaa97bb42010-01-20 18:09:23 +02003234 N, N, N, N,
3235 N, N, N, N,
3236 N, N, N, N,
3237 N, N, N, GP(SrcReg | DstMem | ModRM | Mov, &pfx_0f_6f_0f_7f),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003238 /* 0x80 - 0x8F */
3239 X16(D(SrcImm)),
3240 /* 0x90 - 0x9F */
Wei Yongjunee45b582010-08-06 17:10:07 +08003241 X16(D(ByteOp | DstMem | SrcNone | ModRM| Mov)),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003242 /* 0xA0 - 0xA7 */
3243 D(ImplicitOps | Stack), D(ImplicitOps | Stack),
Joerg Roedel80612522011-04-04 12:39:33 +02003244 DI(ImplicitOps, cpuid), D(DstMem | SrcReg | ModRM | BitOp),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003245 D(DstMem | SrcReg | Src2ImmByte | ModRM),
3246 D(DstMem | SrcReg | Src2CL | ModRM), N, N,
3247 /* 0xA8 - 0xAF */
3248 D(ImplicitOps | Stack), D(ImplicitOps | Stack),
Joerg Roedel80612522011-04-04 12:39:33 +02003249 DI(ImplicitOps, rsm), D(DstMem | SrcReg | ModRM | BitOp | Lock),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003250 D(DstMem | SrcReg | Src2ImmByte | ModRM),
3251 D(DstMem | SrcReg | Src2CL | ModRM),
Avi Kivity5c82aa22010-08-18 18:31:43 +03003252 D(ModRM), I(DstReg | SrcMem | ModRM, em_imul),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003253 /* 0xB0 - 0xB7 */
Avi Kivity739ae402010-08-26 11:56:13 +03003254 D2bv(DstMem | SrcReg | ModRM | Lock),
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08003255 D(DstReg | SrcMemFAddr | ModRM), D(DstMem | SrcReg | ModRM | BitOp | Lock),
3256 D(DstReg | SrcMemFAddr | ModRM), D(DstReg | SrcMemFAddr | ModRM),
3257 D(ByteOp | DstReg | SrcMem | ModRM | Mov), D(DstReg | SrcMem16 | ModRM | Mov),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003258 /* 0xB8 - 0xBF */
3259 N, N,
Wei Yongjunba7ff2b2010-08-09 11:39:14 +08003260 G(BitOp, group8), D(DstMem | SrcReg | ModRM | BitOp | Lock),
Wei Yongjund9574a22010-08-10 13:48:22 +08003261 D(DstReg | SrcMem | ModRM), D(DstReg | SrcMem | ModRM),
3262 D(ByteOp | DstReg | SrcMem | ModRM | Mov), D(DstReg | SrcMem16 | ModRM | Mov),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003263 /* 0xC0 - 0xCF */
Avi Kivity739ae402010-08-26 11:56:13 +03003264 D2bv(DstMem | SrcReg | ModRM | Lock),
Wei Yongjun92f738a2010-08-17 09:19:34 +08003265 N, D(DstMem | SrcReg | ModRM | Mov),
Avi Kivity73fba5f2010-07-29 15:11:53 +03003266 N, N, N, GD(0, &group9),
3267 N, N, N, N, N, N, N, N,
3268 /* 0xD0 - 0xDF */
3269 N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
3270 /* 0xE0 - 0xEF */
3271 N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
3272 /* 0xF0 - 0xFF */
3273 N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N
3274};
3275
3276#undef D
3277#undef N
3278#undef G
3279#undef GD
3280#undef I
Avi Kivityaa97bb42010-01-20 18:09:23 +02003281#undef GP
Joerg Roedel01de8b02011-04-04 12:39:31 +02003282#undef EXT
Avi Kivity73fba5f2010-07-29 15:11:53 +03003283
Avi Kivity8d8f4e92010-08-26 11:56:06 +03003284#undef D2bv
Joerg Roedelf6511932011-04-04 12:39:35 +02003285#undef D2bvIP
Avi Kivity8d8f4e92010-08-26 11:56:06 +03003286#undef I2bv
Takuya Yoshikawad67fc272011-04-23 18:48:02 +09003287#undef I6ALU
Avi Kivity8d8f4e92010-08-26 11:56:06 +03003288
Avi Kivity9dac77f2011-06-01 15:34:25 +03003289static unsigned imm_size(struct x86_emulate_ctxt *ctxt)
Avi Kivity39f21ee2010-08-18 19:20:21 +03003290{
3291 unsigned size;
3292
Avi Kivity9dac77f2011-06-01 15:34:25 +03003293 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
Avi Kivity39f21ee2010-08-18 19:20:21 +03003294 if (size == 8)
3295 size = 4;
3296 return size;
3297}
3298
3299static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op,
3300 unsigned size, bool sign_extension)
3301{
Avi Kivity39f21ee2010-08-18 19:20:21 +03003302 int rc = X86EMUL_CONTINUE;
3303
3304 op->type = OP_IMM;
3305 op->bytes = size;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003306 op->addr.mem.ea = ctxt->_eip;
Avi Kivity39f21ee2010-08-18 19:20:21 +03003307 /* NB. Immediates are sign-extended as necessary. */
3308 switch (op->bytes) {
3309 case 1:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003310 op->val = insn_fetch(s8, ctxt);
Avi Kivity39f21ee2010-08-18 19:20:21 +03003311 break;
3312 case 2:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003313 op->val = insn_fetch(s16, ctxt);
Avi Kivity39f21ee2010-08-18 19:20:21 +03003314 break;
3315 case 4:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003316 op->val = insn_fetch(s32, ctxt);
Avi Kivity39f21ee2010-08-18 19:20:21 +03003317 break;
3318 }
3319 if (!sign_extension) {
3320 switch (op->bytes) {
3321 case 1:
3322 op->val &= 0xff;
3323 break;
3324 case 2:
3325 op->val &= 0xffff;
3326 break;
3327 case 4:
3328 op->val &= 0xffffffff;
3329 break;
3330 }
3331 }
3332done:
3333 return rc;
3334}
3335
Avi Kivitya9945542011-09-13 10:45:41 +03003336static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op,
3337 unsigned d)
3338{
3339 int rc = X86EMUL_CONTINUE;
3340
3341 switch (d) {
3342 case OpReg:
3343 decode_register_operand(ctxt, op,
3344 ctxt->twobyte && (ctxt->b == 0xb6 || ctxt->b == 0xb7));
3345 break;
3346 case OpImmUByte:
3347 op->type = OP_IMM;
3348 op->addr.mem.ea = ctxt->_eip;
3349 op->bytes = 1;
3350 op->val = insn_fetch(u8, ctxt);
3351 break;
3352 case OpMem:
3353 case OpMem64:
3354 *op = ctxt->memop;
3355 ctxt->memopp = op;
3356 if (d == OpMem64)
3357 op->bytes = 8;
3358 else
3359 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
3360 if (ctxt->d & BitOp)
3361 fetch_bit_operand(ctxt);
3362 op->orig_val = op->val;
3363 break;
3364 case OpAcc:
3365 op->type = OP_REG;
3366 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
3367 op->addr.reg = &ctxt->regs[VCPU_REGS_RAX];
3368 fetch_register_operand(op);
3369 op->orig_val = op->val;
3370 break;
3371 case OpDI:
3372 op->type = OP_MEM;
3373 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
3374 op->addr.mem.ea =
3375 register_address(ctxt, ctxt->regs[VCPU_REGS_RDI]);
3376 op->addr.mem.seg = VCPU_SREG_ES;
3377 op->val = 0;
3378 break;
3379 case OpDX:
3380 op->type = OP_REG;
3381 op->bytes = 2;
3382 op->addr.reg = &ctxt->regs[VCPU_REGS_RDX];
3383 fetch_register_operand(op);
3384 break;
3385 case OpImplicit:
3386 /* Special instructions do their own operand decoding. */
3387 default:
3388 op->type = OP_NONE; /* Disable writeback. */
3389 break;
3390 }
3391
3392done:
3393 return rc;
3394}
3395
Takuya Yoshikawaef5d75c2011-05-15 00:57:43 +09003396int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len)
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003397{
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003398 int rc = X86EMUL_CONTINUE;
3399 int mode = ctxt->mode;
Avi Kivity46561642011-04-24 14:09:59 +03003400 int def_op_bytes, def_ad_bytes, goffset, simd_prefix;
Avi Kivity0d7cdee2011-03-29 11:34:38 +02003401 bool op_prefix = false;
Avi Kivity46561642011-04-24 14:09:59 +03003402 struct opcode opcode;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003403
Avi Kivityf09ed832011-09-13 10:45:40 +03003404 ctxt->memop.type = OP_NONE;
3405 ctxt->memopp = NULL;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003406 ctxt->_eip = ctxt->eip;
3407 ctxt->fetch.start = ctxt->_eip;
3408 ctxt->fetch.end = ctxt->fetch.start + insn_len;
Andre Przywaradc25e892010-12-21 11:12:07 +01003409 if (insn_len > 0)
Avi Kivity9dac77f2011-06-01 15:34:25 +03003410 memcpy(ctxt->fetch.data, insn, insn_len);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003411
3412 switch (mode) {
3413 case X86EMUL_MODE_REAL:
3414 case X86EMUL_MODE_VM86:
3415 case X86EMUL_MODE_PROT16:
3416 def_op_bytes = def_ad_bytes = 2;
3417 break;
3418 case X86EMUL_MODE_PROT32:
3419 def_op_bytes = def_ad_bytes = 4;
3420 break;
3421#ifdef CONFIG_X86_64
3422 case X86EMUL_MODE_PROT64:
3423 def_op_bytes = 4;
3424 def_ad_bytes = 8;
3425 break;
3426#endif
3427 default:
Takuya Yoshikawa1d2887e2011-07-30 18:03:34 +09003428 return EMULATION_FAILED;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003429 }
3430
Avi Kivity9dac77f2011-06-01 15:34:25 +03003431 ctxt->op_bytes = def_op_bytes;
3432 ctxt->ad_bytes = def_ad_bytes;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003433
3434 /* Legacy prefixes. */
3435 for (;;) {
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003436 switch (ctxt->b = insn_fetch(u8, ctxt)) {
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003437 case 0x66: /* operand-size override */
Avi Kivity0d7cdee2011-03-29 11:34:38 +02003438 op_prefix = true;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003439 /* switch between 2/4 bytes */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003440 ctxt->op_bytes = def_op_bytes ^ 6;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003441 break;
3442 case 0x67: /* address-size override */
3443 if (mode == X86EMUL_MODE_PROT64)
3444 /* switch between 4/8 bytes */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003445 ctxt->ad_bytes = def_ad_bytes ^ 12;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003446 else
3447 /* switch between 2/4 bytes */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003448 ctxt->ad_bytes = def_ad_bytes ^ 6;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003449 break;
3450 case 0x26: /* ES override */
3451 case 0x2e: /* CS override */
3452 case 0x36: /* SS override */
3453 case 0x3e: /* DS override */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003454 set_seg_override(ctxt, (ctxt->b >> 3) & 3);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003455 break;
3456 case 0x64: /* FS override */
3457 case 0x65: /* GS override */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003458 set_seg_override(ctxt, ctxt->b & 7);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003459 break;
3460 case 0x40 ... 0x4f: /* REX */
3461 if (mode != X86EMUL_MODE_PROT64)
3462 goto done_prefixes;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003463 ctxt->rex_prefix = ctxt->b;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003464 continue;
3465 case 0xf0: /* LOCK */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003466 ctxt->lock_prefix = 1;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003467 break;
3468 case 0xf2: /* REPNE/REPNZ */
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003469 case 0xf3: /* REP/REPE/REPZ */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003470 ctxt->rep_prefix = ctxt->b;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003471 break;
3472 default:
3473 goto done_prefixes;
3474 }
3475
3476 /* Any legacy prefix after a REX prefix nullifies its effect. */
3477
Avi Kivity9dac77f2011-06-01 15:34:25 +03003478 ctxt->rex_prefix = 0;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003479 }
3480
3481done_prefixes:
3482
3483 /* REX prefix. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003484 if (ctxt->rex_prefix & 8)
3485 ctxt->op_bytes = 8; /* REX.W */
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003486
3487 /* Opcode byte(s). */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003488 opcode = opcode_table[ctxt->b];
Wei Yongjund3ad6242010-08-05 16:34:39 +08003489 /* Two-byte opcode? */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003490 if (ctxt->b == 0x0f) {
3491 ctxt->twobyte = 1;
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003492 ctxt->b = insn_fetch(u8, ctxt);
Avi Kivity9dac77f2011-06-01 15:34:25 +03003493 opcode = twobyte_table[ctxt->b];
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003494 }
Avi Kivity9dac77f2011-06-01 15:34:25 +03003495 ctxt->d = opcode.flags;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003496
Avi Kivity9dac77f2011-06-01 15:34:25 +03003497 while (ctxt->d & GroupMask) {
3498 switch (ctxt->d & GroupMask) {
Avi Kivity46561642011-04-24 14:09:59 +03003499 case Group:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003500 ctxt->modrm = insn_fetch(u8, ctxt);
Avi Kivity9dac77f2011-06-01 15:34:25 +03003501 --ctxt->_eip;
3502 goffset = (ctxt->modrm >> 3) & 7;
Avi Kivity46561642011-04-24 14:09:59 +03003503 opcode = opcode.u.group[goffset];
3504 break;
3505 case GroupDual:
Takuya Yoshikawae85a1082011-07-30 18:01:26 +09003506 ctxt->modrm = insn_fetch(u8, ctxt);
Avi Kivity9dac77f2011-06-01 15:34:25 +03003507 --ctxt->_eip;
3508 goffset = (ctxt->modrm >> 3) & 7;
3509 if ((ctxt->modrm >> 6) == 3)
Avi Kivity46561642011-04-24 14:09:59 +03003510 opcode = opcode.u.gdual->mod3[goffset];
3511 else
3512 opcode = opcode.u.gdual->mod012[goffset];
3513 break;
3514 case RMExt:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003515 goffset = ctxt->modrm & 7;
Joerg Roedel01de8b02011-04-04 12:39:31 +02003516 opcode = opcode.u.group[goffset];
Avi Kivity46561642011-04-24 14:09:59 +03003517 break;
3518 case Prefix:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003519 if (ctxt->rep_prefix && op_prefix)
Takuya Yoshikawa1d2887e2011-07-30 18:03:34 +09003520 return EMULATION_FAILED;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003521 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix;
Avi Kivity46561642011-04-24 14:09:59 +03003522 switch (simd_prefix) {
3523 case 0x00: opcode = opcode.u.gprefix->pfx_no; break;
3524 case 0x66: opcode = opcode.u.gprefix->pfx_66; break;
3525 case 0xf2: opcode = opcode.u.gprefix->pfx_f2; break;
3526 case 0xf3: opcode = opcode.u.gprefix->pfx_f3; break;
3527 }
3528 break;
3529 default:
Takuya Yoshikawa1d2887e2011-07-30 18:03:34 +09003530 return EMULATION_FAILED;
Avi Kivity0d7cdee2011-03-29 11:34:38 +02003531 }
Avi Kivity46561642011-04-24 14:09:59 +03003532
Avi Kivityb1ea50b2011-09-13 10:45:42 +03003533 ctxt->d &= ~(u64)GroupMask;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003534 ctxt->d |= opcode.flags;
Avi Kivity0d7cdee2011-03-29 11:34:38 +02003535 }
3536
Avi Kivity9dac77f2011-06-01 15:34:25 +03003537 ctxt->execute = opcode.u.execute;
3538 ctxt->check_perm = opcode.check_perm;
3539 ctxt->intercept = opcode.intercept;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003540
3541 /* Unrecognised? */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003542 if (ctxt->d == 0 || (ctxt->d & Undefined))
Takuya Yoshikawa1d2887e2011-07-30 18:03:34 +09003543 return EMULATION_FAILED;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003544
Avi Kivity9dac77f2011-06-01 15:34:25 +03003545 if (!(ctxt->d & VendorSpecific) && ctxt->only_vendor_specific_insn)
Takuya Yoshikawa1d2887e2011-07-30 18:03:34 +09003546 return EMULATION_FAILED;
Avi Kivityd8671622011-02-01 16:32:03 +02003547
Avi Kivity9dac77f2011-06-01 15:34:25 +03003548 if (mode == X86EMUL_MODE_PROT64 && (ctxt->d & Stack))
3549 ctxt->op_bytes = 8;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003550
Avi Kivity9dac77f2011-06-01 15:34:25 +03003551 if (ctxt->d & Op3264) {
Avi Kivity7f9b4b72010-08-01 14:46:54 +03003552 if (mode == X86EMUL_MODE_PROT64)
Avi Kivity9dac77f2011-06-01 15:34:25 +03003553 ctxt->op_bytes = 8;
Avi Kivity7f9b4b72010-08-01 14:46:54 +03003554 else
Avi Kivity9dac77f2011-06-01 15:34:25 +03003555 ctxt->op_bytes = 4;
Avi Kivity7f9b4b72010-08-01 14:46:54 +03003556 }
3557
Avi Kivity9dac77f2011-06-01 15:34:25 +03003558 if (ctxt->d & Sse)
3559 ctxt->op_bytes = 16;
Avi Kivity12537912011-03-29 11:41:27 +02003560
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003561 /* ModRM and SIB bytes. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003562 if (ctxt->d & ModRM) {
Avi Kivityf09ed832011-09-13 10:45:40 +03003563 rc = decode_modrm(ctxt, &ctxt->memop);
Avi Kivity9dac77f2011-06-01 15:34:25 +03003564 if (!ctxt->has_seg_override)
3565 set_seg_override(ctxt, ctxt->modrm_seg);
3566 } else if (ctxt->d & MemAbs)
Avi Kivityf09ed832011-09-13 10:45:40 +03003567 rc = decode_abs(ctxt, &ctxt->memop);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003568 if (rc != X86EMUL_CONTINUE)
3569 goto done;
3570
Avi Kivity9dac77f2011-06-01 15:34:25 +03003571 if (!ctxt->has_seg_override)
3572 set_seg_override(ctxt, VCPU_SREG_DS);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003573
Avi Kivityf09ed832011-09-13 10:45:40 +03003574 ctxt->memop.addr.mem.seg = seg_override(ctxt);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003575
Avi Kivityf09ed832011-09-13 10:45:40 +03003576 if (ctxt->memop.type == OP_MEM && ctxt->ad_bytes != 8)
3577 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003578
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003579 /*
3580 * Decode and fetch the source operand: register, memory
3581 * or immediate.
3582 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003583 switch (ctxt->d & SrcMask) {
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003584 case SrcNone:
3585 break;
3586 case SrcReg:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003587 decode_register_operand(ctxt, &ctxt->src, 0);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003588 break;
3589 case SrcMem16:
Avi Kivityf09ed832011-09-13 10:45:40 +03003590 ctxt->memop.bytes = 2;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003591 goto srcmem_common;
3592 case SrcMem32:
Avi Kivityf09ed832011-09-13 10:45:40 +03003593 ctxt->memop.bytes = 4;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003594 goto srcmem_common;
3595 case SrcMem:
Avi Kivityf09ed832011-09-13 10:45:40 +03003596 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003597 srcmem_common:
Avi Kivityf09ed832011-09-13 10:45:40 +03003598 ctxt->src = ctxt->memop;
3599 ctxt->memopp = &ctxt->src;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003600 break;
Avi Kivityb250e602010-08-18 15:11:24 +03003601 case SrcImmU16:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003602 rc = decode_imm(ctxt, &ctxt->src, 2, false);
Avi Kivity39f21ee2010-08-18 19:20:21 +03003603 break;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003604 case SrcImm:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003605 rc = decode_imm(ctxt, &ctxt->src, imm_size(ctxt), true);
Avi Kivity39f21ee2010-08-18 19:20:21 +03003606 break;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003607 case SrcImmU:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003608 rc = decode_imm(ctxt, &ctxt->src, imm_size(ctxt), false);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003609 break;
3610 case SrcImmByte:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003611 rc = decode_imm(ctxt, &ctxt->src, 1, true);
Avi Kivity39f21ee2010-08-18 19:20:21 +03003612 break;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003613 case SrcImmUByte:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003614 rc = decode_imm(ctxt, &ctxt->src, 1, false);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003615 break;
3616 case SrcAcc:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003617 ctxt->src.type = OP_REG;
3618 ctxt->src.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
3619 ctxt->src.addr.reg = &ctxt->regs[VCPU_REGS_RAX];
3620 fetch_register_operand(&ctxt->src);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003621 break;
3622 case SrcOne:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003623 ctxt->src.bytes = 1;
3624 ctxt->src.val = 1;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003625 break;
3626 case SrcSI:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003627 ctxt->src.type = OP_MEM;
3628 ctxt->src.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
3629 ctxt->src.addr.mem.ea =
3630 register_address(ctxt, ctxt->regs[VCPU_REGS_RSI]);
3631 ctxt->src.addr.mem.seg = seg_override(ctxt);
3632 ctxt->src.val = 0;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003633 break;
3634 case SrcImmFAddr:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003635 ctxt->src.type = OP_IMM;
3636 ctxt->src.addr.mem.ea = ctxt->_eip;
3637 ctxt->src.bytes = ctxt->op_bytes + 2;
Takuya Yoshikawa807941b2011-07-30 18:00:17 +09003638 insn_fetch_arr(ctxt->src.valptr, ctxt->src.bytes, ctxt);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003639 break;
3640 case SrcMemFAddr:
Avi Kivityf09ed832011-09-13 10:45:40 +03003641 ctxt->memop.bytes = ctxt->op_bytes + 2;
Avi Kivity2dbd0dd2010-08-01 15:40:19 +03003642 goto srcmem_common;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003643 break;
Marcelo Tosatti221192b2011-05-30 15:23:14 -03003644 case SrcDX:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003645 ctxt->src.type = OP_REG;
3646 ctxt->src.bytes = 2;
3647 ctxt->src.addr.reg = &ctxt->regs[VCPU_REGS_RDX];
3648 fetch_register_operand(&ctxt->src);
Marcelo Tosatti221192b2011-05-30 15:23:14 -03003649 break;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003650 }
3651
Avi Kivity39f21ee2010-08-18 19:20:21 +03003652 if (rc != X86EMUL_CONTINUE)
3653 goto done;
3654
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003655 /*
3656 * Decode and fetch the second source operand: register, memory
3657 * or immediate.
3658 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003659 switch (ctxt->d & Src2Mask) {
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003660 case Src2None:
3661 break;
3662 case Src2CL:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003663 ctxt->src2.bytes = 1;
Avi Kivity9be3be12011-09-13 10:45:38 +03003664 ctxt->src2.val = ctxt->regs[VCPU_REGS_RCX] & 0xff;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003665 break;
3666 case Src2ImmByte:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003667 rc = decode_imm(ctxt, &ctxt->src2, 1, true);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003668 break;
3669 case Src2One:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003670 ctxt->src2.bytes = 1;
3671 ctxt->src2.val = 1;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003672 break;
Avi Kivity7db41eb2010-08-18 19:25:28 +03003673 case Src2Imm:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003674 rc = decode_imm(ctxt, &ctxt->src2, imm_size(ctxt), true);
Avi Kivity7db41eb2010-08-18 19:25:28 +03003675 break;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003676 }
3677
Avi Kivity39f21ee2010-08-18 19:20:21 +03003678 if (rc != X86EMUL_CONTINUE)
3679 goto done;
3680
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003681 /* Decode and fetch the destination operand: register or memory. */
Avi Kivitya9945542011-09-13 10:45:41 +03003682 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask);
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003683
3684done:
Avi Kivityf09ed832011-09-13 10:45:40 +03003685 if (ctxt->memopp && ctxt->memopp->type == OP_MEM && ctxt->rip_relative)
3686 ctxt->memopp->addr.mem.ea += ctxt->_eip;
Avi Kivitycb16c342011-06-19 19:21:11 +03003687
Takuya Yoshikawa1d2887e2011-07-30 18:03:34 +09003688 return (rc != X86EMUL_CONTINUE) ? EMULATION_FAILED : EMULATION_OK;
Avi Kivitydde7e6d2010-07-29 15:11:52 +03003689}
3690
Gleb Natapov3e2f65d2010-08-25 12:47:42 +03003691static bool string_insn_completed(struct x86_emulate_ctxt *ctxt)
3692{
Gleb Natapov3e2f65d2010-08-25 12:47:42 +03003693 /* The second termination condition only applies for REPE
3694 * and REPNE. Test if the repeat string operation prefix is
3695 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
3696 * corresponding termination condition according to:
3697 * - if REPE/REPZ and ZF = 0 then done
3698 * - if REPNE/REPNZ and ZF = 1 then done
3699 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003700 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) ||
3701 (ctxt->b == 0xae) || (ctxt->b == 0xaf))
3702 && (((ctxt->rep_prefix == REPE_PREFIX) &&
Gleb Natapov3e2f65d2010-08-25 12:47:42 +03003703 ((ctxt->eflags & EFLG_ZF) == 0))
Avi Kivity9dac77f2011-06-01 15:34:25 +03003704 || ((ctxt->rep_prefix == REPNE_PREFIX) &&
Gleb Natapov3e2f65d2010-08-25 12:47:42 +03003705 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF))))
3706 return true;
3707
3708 return false;
3709}
3710
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003711int x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003712{
Avi Kivity9aabc882010-07-29 15:11:50 +03003713 struct x86_emulate_ops *ops = ctxt->ops;
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003714 u64 msr_data;
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09003715 int rc = X86EMUL_CONTINUE;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003716 int saved_dst_type = ctxt->dst.type;
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003717
Avi Kivity9dac77f2011-06-01 15:34:25 +03003718 ctxt->mem_read.pos = 0;
Glauber Costa310b5d32009-05-12 16:21:06 -04003719
Avi Kivity9dac77f2011-06-01 15:34:25 +03003720 if (ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) {
Avi Kivity35d3d4a2010-11-22 17:53:25 +02003721 rc = emulate_ud(ctxt);
Gleb Natapov11616242010-02-11 14:43:14 +02003722 goto done;
3723 }
3724
Gleb Natapovd380a5e2010-02-10 14:21:36 +02003725 /* LOCK prefix is allowed only with some instructions */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003726 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) {
Avi Kivity35d3d4a2010-11-22 17:53:25 +02003727 rc = emulate_ud(ctxt);
Gleb Natapovd380a5e2010-02-10 14:21:36 +02003728 goto done;
3729 }
3730
Avi Kivity9dac77f2011-06-01 15:34:25 +03003731 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) {
Avi Kivity35d3d4a2010-11-22 17:53:25 +02003732 rc = emulate_ud(ctxt);
Avi Kivity081bca02010-08-26 11:06:15 +03003733 goto done;
3734 }
3735
Avi Kivity9dac77f2011-06-01 15:34:25 +03003736 if ((ctxt->d & Sse)
Avi Kivity717746e2011-04-20 13:37:53 +03003737 && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)
3738 || !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
Avi Kivity12537912011-03-29 11:41:27 +02003739 rc = emulate_ud(ctxt);
3740 goto done;
3741 }
3742
Avi Kivity9dac77f2011-06-01 15:34:25 +03003743 if ((ctxt->d & Sse) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
Avi Kivity12537912011-03-29 11:41:27 +02003744 rc = emulate_nm(ctxt);
3745 goto done;
3746 }
3747
Avi Kivity9dac77f2011-06-01 15:34:25 +03003748 if (unlikely(ctxt->guest_mode) && ctxt->intercept) {
3749 rc = emulator_check_intercept(ctxt, ctxt->intercept,
Joerg Roedel8a76d7f2011-04-04 12:39:27 +02003750 X86_ICPT_PRE_EXCEPT);
Avi Kivityc4f035c2011-04-04 12:39:22 +02003751 if (rc != X86EMUL_CONTINUE)
3752 goto done;
3753 }
3754
Gleb Natapove92805a2010-02-10 14:21:35 +02003755 /* Privileged instruction can be executed only in CPL=0 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003756 if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
Avi Kivity35d3d4a2010-11-22 17:53:25 +02003757 rc = emulate_gp(ctxt, 0);
Gleb Natapove92805a2010-02-10 14:21:35 +02003758 goto done;
3759 }
3760
Joerg Roedel8ea7d6a2011-04-04 12:39:26 +02003761 /* Instruction can only be executed in protected mode */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003762 if ((ctxt->d & Prot) && !(ctxt->mode & X86EMUL_MODE_PROT)) {
Joerg Roedel8ea7d6a2011-04-04 12:39:26 +02003763 rc = emulate_ud(ctxt);
3764 goto done;
3765 }
3766
Joerg Roedeld09beab2011-04-04 12:39:25 +02003767 /* Do instruction specific permission checks */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003768 if (ctxt->check_perm) {
3769 rc = ctxt->check_perm(ctxt);
Joerg Roedeld09beab2011-04-04 12:39:25 +02003770 if (rc != X86EMUL_CONTINUE)
3771 goto done;
3772 }
3773
Avi Kivity9dac77f2011-06-01 15:34:25 +03003774 if (unlikely(ctxt->guest_mode) && ctxt->intercept) {
3775 rc = emulator_check_intercept(ctxt, ctxt->intercept,
Joerg Roedel8a76d7f2011-04-04 12:39:27 +02003776 X86_ICPT_POST_EXCEPT);
Avi Kivityc4f035c2011-04-04 12:39:22 +02003777 if (rc != X86EMUL_CONTINUE)
3778 goto done;
3779 }
3780
Avi Kivity9dac77f2011-06-01 15:34:25 +03003781 if (ctxt->rep_prefix && (ctxt->d & String)) {
Avi Kivityb9fa9d62007-11-27 19:05:37 +02003782 /* All REP prefixes have the same first termination condition */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003783 if (address_mask(ctxt, ctxt->regs[VCPU_REGS_RCX]) == 0) {
3784 ctxt->eip = ctxt->_eip;
Avi Kivityb9fa9d62007-11-27 19:05:37 +02003785 goto done;
3786 }
Avi Kivityb9fa9d62007-11-27 19:05:37 +02003787 }
3788
Avi Kivity9dac77f2011-06-01 15:34:25 +03003789 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) {
3790 rc = segmented_read(ctxt, ctxt->src.addr.mem,
3791 ctxt->src.valptr, ctxt->src.bytes);
Takuya Yoshikawab60d5132010-01-20 16:47:21 +09003792 if (rc != X86EMUL_CONTINUE)
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003793 goto done;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003794 ctxt->src.orig_val64 = ctxt->src.val64;
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003795 }
3796
Avi Kivity9dac77f2011-06-01 15:34:25 +03003797 if (ctxt->src2.type == OP_MEM) {
3798 rc = segmented_read(ctxt, ctxt->src2.addr.mem,
3799 &ctxt->src2.val, ctxt->src2.bytes);
Gleb Natapove35b7b92010-02-25 16:36:42 +02003800 if (rc != X86EMUL_CONTINUE)
3801 goto done;
3802 }
3803
Avi Kivity9dac77f2011-06-01 15:34:25 +03003804 if ((ctxt->d & DstMask) == ImplicitOps)
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003805 goto special_insn;
3806
3807
Avi Kivity9dac77f2011-06-01 15:34:25 +03003808 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) {
Gleb Natapov69f55cb2010-03-18 15:20:20 +02003809 /* optimisation - avoid slow emulated read if Mov */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003810 rc = segmented_read(ctxt, ctxt->dst.addr.mem,
3811 &ctxt->dst.val, ctxt->dst.bytes);
Gleb Natapov69f55cb2010-03-18 15:20:20 +02003812 if (rc != X86EMUL_CONTINUE)
3813 goto done;
Avi Kivity038e51d2007-01-22 20:40:40 -08003814 }
Avi Kivity9dac77f2011-06-01 15:34:25 +03003815 ctxt->dst.orig_val = ctxt->dst.val;
Avi Kivity038e51d2007-01-22 20:40:40 -08003816
Avi Kivity018a98d2007-11-27 19:30:56 +02003817special_insn:
3818
Avi Kivity9dac77f2011-06-01 15:34:25 +03003819 if (unlikely(ctxt->guest_mode) && ctxt->intercept) {
3820 rc = emulator_check_intercept(ctxt, ctxt->intercept,
Joerg Roedel8a76d7f2011-04-04 12:39:27 +02003821 X86_ICPT_POST_MEMACCESS);
Avi Kivityc4f035c2011-04-04 12:39:22 +02003822 if (rc != X86EMUL_CONTINUE)
3823 goto done;
3824 }
3825
Avi Kivity9dac77f2011-06-01 15:34:25 +03003826 if (ctxt->execute) {
3827 rc = ctxt->execute(ctxt);
Avi Kivityef65c882010-07-29 15:11:51 +03003828 if (rc != X86EMUL_CONTINUE)
3829 goto done;
3830 goto writeback;
3831 }
3832
Avi Kivity9dac77f2011-06-01 15:34:25 +03003833 if (ctxt->twobyte)
Avi Kivity6aa8b732006-12-10 02:21:36 -08003834 goto twobyte_insn;
3835
Avi Kivity9dac77f2011-06-01 15:34:25 +03003836 switch (ctxt->b) {
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003837 case 0x06: /* push es */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003838 rc = emulate_push_sreg(ctxt, VCPU_SREG_ES);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003839 break;
3840 case 0x07: /* pop es */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003841 rc = emulate_pop_sreg(ctxt, VCPU_SREG_ES);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003842 break;
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003843 case 0x0e: /* push cs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003844 rc = emulate_push_sreg(ctxt, VCPU_SREG_CS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003845 break;
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003846 case 0x16: /* push ss */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003847 rc = emulate_push_sreg(ctxt, VCPU_SREG_SS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003848 break;
3849 case 0x17: /* pop ss */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003850 rc = emulate_pop_sreg(ctxt, VCPU_SREG_SS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003851 break;
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003852 case 0x1e: /* push ds */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003853 rc = emulate_push_sreg(ctxt, VCPU_SREG_DS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003854 break;
3855 case 0x1f: /* pop ds */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003856 rc = emulate_pop_sreg(ctxt, VCPU_SREG_DS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03003857 break;
Avi Kivity33615aa2007-10-31 11:15:56 +02003858 case 0x40 ... 0x47: /* inc r16/r32 */
Avi Kivityd1eef452011-09-07 16:41:38 +03003859 emulate_1op(ctxt, "inc");
Avi Kivity33615aa2007-10-31 11:15:56 +02003860 break;
3861 case 0x48 ... 0x4f: /* dec r16/r32 */
Avi Kivityd1eef452011-09-07 16:41:38 +03003862 emulate_1op(ctxt, "dec");
Avi Kivity33615aa2007-10-31 11:15:56 +02003863 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08003864 case 0x63: /* movsxd */
Laurent Vivier8b4caf62007-09-18 11:27:19 +02003865 if (ctxt->mode != X86EMUL_MODE_PROT64)
Avi Kivity6aa8b732006-12-10 02:21:36 -08003866 goto cannot_emulate;
Avi Kivity9dac77f2011-06-01 15:34:25 +03003867 ctxt->dst.val = (s32) ctxt->src.val;
Avi Kivity6aa8b732006-12-10 02:21:36 -08003868 break;
Avi Kivity018a98d2007-11-27 19:30:56 +02003869 case 0x6c: /* insb */
3870 case 0x6d: /* insw/insd */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003871 ctxt->src.val = ctxt->regs[VCPU_REGS_RDX];
Wei Yongjuna13a63f2010-08-06 11:46:12 +08003872 goto do_io_in;
Avi Kivity018a98d2007-11-27 19:30:56 +02003873 case 0x6e: /* outsb */
3874 case 0x6f: /* outsw/outsd */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003875 ctxt->dst.val = ctxt->regs[VCPU_REGS_RDX];
Wei Yongjuna13a63f2010-08-06 11:46:12 +08003876 goto do_io_out;
Gleb Natapov79729952010-03-18 15:20:24 +02003877 break;
Gleb Natapovb2833e32009-04-12 13:36:30 +03003878 case 0x70 ... 0x7f: /* jcc (short) */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003879 if (test_cc(ctxt->b, ctxt->eflags))
3880 jmp_rel(ctxt, ctxt->src.val);
Avi Kivity018a98d2007-11-27 19:30:56 +02003881 break;
Nitin A Kamble7e0b54b2007-09-15 10:35:36 +03003882 case 0x8d: /* lea r16/r32, m */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003883 ctxt->dst.val = ctxt->src.addr.mem.ea;
Nitin A Kamble7e0b54b2007-09-15 10:35:36 +03003884 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08003885 case 0x8f: /* pop (sole member of Grp1a) */
Takuya Yoshikawa51187682011-05-02 02:29:17 +09003886 rc = em_grp1a(ctxt);
Avi Kivity6aa8b732006-12-10 02:21:36 -08003887 break;
Avi Kivity3d9e77d2010-08-01 12:41:59 +03003888 case 0x90 ... 0x97: /* nop / xchg reg, rax */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003889 if (ctxt->dst.addr.reg == &ctxt->regs[VCPU_REGS_RAX])
Mohammed Gamal34698d82010-08-04 14:41:04 +03003890 break;
Takuya Yoshikawae4f973a2011-05-29 21:59:09 +09003891 rc = em_xchg(ctxt);
3892 break;
Wei Yongjune8b6fa72010-08-18 16:43:13 +08003893 case 0x98: /* cbw/cwde/cdqe */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003894 switch (ctxt->op_bytes) {
3895 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break;
3896 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break;
3897 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break;
Wei Yongjune8b6fa72010-08-18 16:43:13 +08003898 }
3899 break;
Avi Kivity018a98d2007-11-27 19:30:56 +02003900 case 0xc0 ... 0xc1:
Takuya Yoshikawa51187682011-05-02 02:29:17 +09003901 rc = em_grp2(ctxt);
Avi Kivity018a98d2007-11-27 19:30:56 +02003902 break;
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08003903 case 0xc4: /* les */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003904 rc = emulate_load_segment(ctxt, VCPU_SREG_ES);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08003905 break;
3906 case 0xc5: /* lds */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09003907 rc = emulate_load_segment(ctxt, VCPU_SREG_DS);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08003908 break;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03003909 case 0xcc: /* int3 */
Takuya Yoshikawa5c5df762011-05-29 22:02:55 +09003910 rc = emulate_int(ctxt, 3);
3911 break;
Mohammed Gamal6e154e52010-08-04 14:38:06 +03003912 case 0xcd: /* int n */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003913 rc = emulate_int(ctxt, ctxt->src.val);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03003914 break;
3915 case 0xce: /* into */
Takuya Yoshikawa5c5df762011-05-29 22:02:55 +09003916 if (ctxt->eflags & EFLG_OF)
3917 rc = emulate_int(ctxt, 4);
Mohammed Gamal6e154e52010-08-04 14:38:06 +03003918 break;
Avi Kivity018a98d2007-11-27 19:30:56 +02003919 case 0xd0 ... 0xd1: /* Grp2 */
Takuya Yoshikawa51187682011-05-02 02:29:17 +09003920 rc = em_grp2(ctxt);
Avi Kivity018a98d2007-11-27 19:30:56 +02003921 break;
3922 case 0xd2 ... 0xd3: /* Grp2 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003923 ctxt->src.val = ctxt->regs[VCPU_REGS_RCX];
Takuya Yoshikawa51187682011-05-02 02:29:17 +09003924 rc = em_grp2(ctxt);
Avi Kivity018a98d2007-11-27 19:30:56 +02003925 break;
Mohammed Gamala6a30342008-09-06 17:22:29 +03003926 case 0xe4: /* inb */
3927 case 0xe5: /* in */
Gleb Natapovcf8f70b2010-03-18 15:20:23 +02003928 goto do_io_in;
Mohammed Gamala6a30342008-09-06 17:22:29 +03003929 case 0xe6: /* outb */
3930 case 0xe7: /* out */
Gleb Natapovcf8f70b2010-03-18 15:20:23 +02003931 goto do_io_out;
Nitin A Kamble1a52e052007-09-18 16:34:25 -07003932 case 0xe8: /* call (near) */ {
Avi Kivity9dac77f2011-06-01 15:34:25 +03003933 long int rel = ctxt->src.val;
3934 ctxt->src.val = (unsigned long) ctxt->_eip;
3935 jmp_rel(ctxt, rel);
Takuya Yoshikawa4487b3b2011-04-13 00:31:23 +09003936 rc = em_push(ctxt);
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02003937 break;
Nitin A Kamble1a52e052007-09-18 16:34:25 -07003938 }
3939 case 0xe9: /* jmp rel */
Takuya Yoshikawadb5b0762011-05-29 21:56:26 +09003940 case 0xeb: /* jmp rel short */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003941 jmp_rel(ctxt, ctxt->src.val);
3942 ctxt->dst.type = OP_NONE; /* Disable writeback. */
Nitin A Kamble1a52e052007-09-18 16:34:25 -07003943 break;
Mohammed Gamala6a30342008-09-06 17:22:29 +03003944 case 0xec: /* in al,dx */
3945 case 0xed: /* in (e/r)ax,dx */
Gleb Natapovcf8f70b2010-03-18 15:20:23 +02003946 do_io_in:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003947 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val,
3948 &ctxt->dst.val))
Gleb Natapovcf8f70b2010-03-18 15:20:23 +02003949 goto done; /* IO is needed */
3950 break;
Wei Yongjunce7a0ad2010-07-06 16:50:21 +08003951 case 0xee: /* out dx,al */
3952 case 0xef: /* out dx,(e/r)ax */
Gleb Natapovcf8f70b2010-03-18 15:20:23 +02003953 do_io_out:
Avi Kivity9dac77f2011-06-01 15:34:25 +03003954 ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val,
3955 &ctxt->src.val, 1);
3956 ctxt->dst.type = OP_NONE; /* Disable writeback. */
Guillaume Thouvenine93f36b2008-10-28 10:51:30 +01003957 break;
Avi Kivity111de5d2007-11-27 19:14:21 +02003958 case 0xf4: /* hlt */
Avi Kivity6c3287f2011-04-20 15:43:05 +03003959 ctxt->ops->halt(ctxt);
Mohammed Gamal19fdfa02008-07-06 16:51:26 +03003960 break;
Avi Kivity111de5d2007-11-27 19:14:21 +02003961 case 0xf5: /* cmc */
3962 /* complement carry flag from eflags reg */
3963 ctxt->eflags ^= EFLG_CF;
Avi Kivity111de5d2007-11-27 19:14:21 +02003964 break;
3965 case 0xf8: /* clc */
3966 ctxt->eflags &= ~EFLG_CF;
Avi Kivity111de5d2007-11-27 19:14:21 +02003967 break;
Mohammed Gamal8744aa92010-08-05 15:42:49 +03003968 case 0xf9: /* stc */
3969 ctxt->eflags |= EFLG_CF;
3970 break;
Mohammed Gamalfb4616f2008-09-01 04:52:24 +03003971 case 0xfc: /* cld */
3972 ctxt->eflags &= ~EFLG_DF;
Mohammed Gamalfb4616f2008-09-01 04:52:24 +03003973 break;
3974 case 0xfd: /* std */
3975 ctxt->eflags |= EFLG_DF;
Mohammed Gamalfb4616f2008-09-01 04:52:24 +03003976 break;
Gleb Natapovea798492010-02-25 16:36:43 +02003977 case 0xfe: /* Grp4 */
Takuya Yoshikawa51187682011-05-02 02:29:17 +09003978 rc = em_grp45(ctxt);
Avi Kivity018a98d2007-11-27 19:30:56 +02003979 break;
Gleb Natapovea798492010-02-25 16:36:43 +02003980 case 0xff: /* Grp5 */
Takuya Yoshikawa51187682011-05-02 02:29:17 +09003981 rc = em_grp45(ctxt);
3982 break;
Avi Kivity91269b82010-07-25 14:51:16 +03003983 default:
3984 goto cannot_emulate;
Avi Kivity6aa8b732006-12-10 02:21:36 -08003985 }
Avi Kivity018a98d2007-11-27 19:30:56 +02003986
Avi Kivity7d9ddae2010-08-30 17:12:28 +03003987 if (rc != X86EMUL_CONTINUE)
3988 goto done;
3989
Avi Kivity018a98d2007-11-27 19:30:56 +02003990writeback:
Takuya Yoshikawaadddcec2011-05-02 02:26:23 +09003991 rc = writeback(ctxt);
Takuya Yoshikawa1b30eaa2010-02-12 15:57:56 +09003992 if (rc != X86EMUL_CONTINUE)
Avi Kivity018a98d2007-11-27 19:30:56 +02003993 goto done;
3994
Gleb Natapov5cd21912010-03-18 15:20:26 +02003995 /*
3996 * restore dst type in case the decoding will be reused
3997 * (happens for string instruction )
3998 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03003999 ctxt->dst.type = saved_dst_type;
Gleb Natapov5cd21912010-03-18 15:20:26 +02004000
Avi Kivity9dac77f2011-06-01 15:34:25 +03004001 if ((ctxt->d & SrcMask) == SrcSI)
4002 string_addr_inc(ctxt, seg_override(ctxt),
4003 VCPU_REGS_RSI, &ctxt->src);
Gleb Natapova682e352010-03-18 15:20:21 +02004004
Avi Kivity9dac77f2011-06-01 15:34:25 +03004005 if ((ctxt->d & DstMask) == DstDI)
Avi Kivity90de84f2010-11-17 15:28:21 +02004006 string_addr_inc(ctxt, VCPU_SREG_ES, VCPU_REGS_RDI,
Avi Kivity9dac77f2011-06-01 15:34:25 +03004007 &ctxt->dst);
Gleb Natapovd9271122010-03-18 15:20:22 +02004008
Avi Kivity9dac77f2011-06-01 15:34:25 +03004009 if (ctxt->rep_prefix && (ctxt->d & String)) {
4010 struct read_cache *r = &ctxt->io_read;
4011 register_address_increment(ctxt, &ctxt->regs[VCPU_REGS_RCX], -1);
Gleb Natapov3e2f65d2010-08-25 12:47:42 +03004012
Gleb Natapovd2ddd1c2010-08-25 12:47:43 +03004013 if (!string_insn_completed(ctxt)) {
4014 /*
4015 * Re-enter guest when pio read ahead buffer is empty
4016 * or, if it is not used, after each 1024 iteration.
4017 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004018 if ((r->end != 0 || ctxt->regs[VCPU_REGS_RCX] & 0x3ff) &&
Gleb Natapovd2ddd1c2010-08-25 12:47:43 +03004019 (r->end == 0 || r->end != r->pos)) {
4020 /*
4021 * Reset read cache. Usually happens before
4022 * decode, but since instruction is restarted
4023 * we have to do it here.
4024 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004025 ctxt->mem_read.end = 0;
Gleb Natapovd2ddd1c2010-08-25 12:47:43 +03004026 return EMULATION_RESTART;
4027 }
4028 goto done; /* skip rip writeback */
Avi Kivity0fa6ccb2010-08-17 11:22:17 +03004029 }
Gleb Natapov5cd21912010-03-18 15:20:26 +02004030 }
Gleb Natapovd2ddd1c2010-08-25 12:47:43 +03004031
Avi Kivity9dac77f2011-06-01 15:34:25 +03004032 ctxt->eip = ctxt->_eip;
Avi Kivity018a98d2007-11-27 19:30:56 +02004033
4034done:
Avi Kivityda9cb572010-11-22 17:53:21 +02004035 if (rc == X86EMUL_PROPAGATE_FAULT)
4036 ctxt->have_exception = true;
Joerg Roedel775fde82011-04-04 12:39:24 +02004037 if (rc == X86EMUL_INTERCEPTED)
4038 return EMULATION_INTERCEPTED;
4039
Gleb Natapovd2ddd1c2010-08-25 12:47:43 +03004040 return (rc == X86EMUL_UNHANDLEABLE) ? EMULATION_FAILED : EMULATION_OK;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004041
4042twobyte_insn:
Avi Kivity9dac77f2011-06-01 15:34:25 +03004043 switch (ctxt->b) {
Avi Kivity018a98d2007-11-27 19:30:56 +02004044 case 0x09: /* wbinvd */
Clemens Nosscfb22372011-04-21 21:16:05 +02004045 (ctxt->ops->wbinvd)(ctxt);
Sheng Yangf5f48ee2010-06-30 12:25:15 +08004046 break;
4047 case 0x08: /* invd */
Avi Kivity018a98d2007-11-27 19:30:56 +02004048 case 0x0d: /* GrpP (prefetch) */
4049 case 0x18: /* Grp16 (prefetch/nop) */
Avi Kivity018a98d2007-11-27 19:30:56 +02004050 break;
4051 case 0x20: /* mov cr, reg */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004052 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg);
Avi Kivity018a98d2007-11-27 19:30:56 +02004053 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004054 case 0x21: /* mov from dr to reg */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004055 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val);
Avi Kivity6aa8b732006-12-10 02:21:36 -08004056 break;
Avi Kivity018a98d2007-11-27 19:30:56 +02004057 case 0x22: /* mov reg, cr */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004058 if (ops->set_cr(ctxt, ctxt->modrm_reg, ctxt->src.val)) {
Gleb Natapov54b84862010-04-28 19:15:44 +03004059 emulate_gp(ctxt, 0);
Avi Kivityda9cb572010-11-22 17:53:21 +02004060 rc = X86EMUL_PROPAGATE_FAULT;
Gleb Natapov0f122442010-04-28 19:15:31 +03004061 goto done;
4062 }
Avi Kivity9dac77f2011-06-01 15:34:25 +03004063 ctxt->dst.type = OP_NONE;
Avi Kivity018a98d2007-11-27 19:30:56 +02004064 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004065 case 0x23: /* mov from reg to dr */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004066 if (ops->set_dr(ctxt, ctxt->modrm_reg, ctxt->src.val &
Gleb Natapov338dbc92010-04-28 19:15:32 +03004067 ((ctxt->mode == X86EMUL_MODE_PROT64) ?
Avi Kivity717746e2011-04-20 13:37:53 +03004068 ~0ULL : ~0U)) < 0) {
Gleb Natapov338dbc92010-04-28 19:15:32 +03004069 /* #UD condition is already handled by the code above */
Gleb Natapov54b84862010-04-28 19:15:44 +03004070 emulate_gp(ctxt, 0);
Avi Kivityda9cb572010-11-22 17:53:21 +02004071 rc = X86EMUL_PROPAGATE_FAULT;
Gleb Natapov338dbc92010-04-28 19:15:32 +03004072 goto done;
4073 }
4074
Avi Kivity9dac77f2011-06-01 15:34:25 +03004075 ctxt->dst.type = OP_NONE; /* no writeback */
Avi Kivity6aa8b732006-12-10 02:21:36 -08004076 break;
Avi Kivity018a98d2007-11-27 19:30:56 +02004077 case 0x30:
4078 /* wrmsr */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004079 msr_data = (u32)ctxt->regs[VCPU_REGS_RAX]
4080 | ((u64)ctxt->regs[VCPU_REGS_RDX] << 32);
4081 if (ops->set_msr(ctxt, ctxt->regs[VCPU_REGS_RCX], msr_data)) {
Gleb Natapov54b84862010-04-28 19:15:44 +03004082 emulate_gp(ctxt, 0);
Avi Kivityda9cb572010-11-22 17:53:21 +02004083 rc = X86EMUL_PROPAGATE_FAULT;
Gleb Natapovfd525362010-03-18 15:20:13 +02004084 goto done;
Avi Kivity018a98d2007-11-27 19:30:56 +02004085 }
4086 rc = X86EMUL_CONTINUE;
Avi Kivity018a98d2007-11-27 19:30:56 +02004087 break;
4088 case 0x32:
4089 /* rdmsr */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004090 if (ops->get_msr(ctxt, ctxt->regs[VCPU_REGS_RCX], &msr_data)) {
Gleb Natapov54b84862010-04-28 19:15:44 +03004091 emulate_gp(ctxt, 0);
Avi Kivityda9cb572010-11-22 17:53:21 +02004092 rc = X86EMUL_PROPAGATE_FAULT;
Gleb Natapovfd525362010-03-18 15:20:13 +02004093 goto done;
Avi Kivity018a98d2007-11-27 19:30:56 +02004094 } else {
Avi Kivity9dac77f2011-06-01 15:34:25 +03004095 ctxt->regs[VCPU_REGS_RAX] = (u32)msr_data;
4096 ctxt->regs[VCPU_REGS_RDX] = msr_data >> 32;
Avi Kivity018a98d2007-11-27 19:30:56 +02004097 }
4098 rc = X86EMUL_CONTINUE;
Avi Kivity018a98d2007-11-27 19:30:56 +02004099 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004100 case 0x40 ... 0x4f: /* cmov */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004101 ctxt->dst.val = ctxt->dst.orig_val = ctxt->src.val;
4102 if (!test_cc(ctxt->b, ctxt->eflags))
4103 ctxt->dst.type = OP_NONE; /* no writeback */
Avi Kivity6aa8b732006-12-10 02:21:36 -08004104 break;
Gleb Natapovb2833e32009-04-12 13:36:30 +03004105 case 0x80 ... 0x8f: /* jnz rel, etc*/
Avi Kivity9dac77f2011-06-01 15:34:25 +03004106 if (test_cc(ctxt->b, ctxt->eflags))
4107 jmp_rel(ctxt, ctxt->src.val);
Avi Kivity018a98d2007-11-27 19:30:56 +02004108 break;
Wei Yongjunee45b582010-08-06 17:10:07 +08004109 case 0x90 ... 0x9f: /* setcc r/m8 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004110 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags);
Wei Yongjunee45b582010-08-06 17:10:07 +08004111 break;
Mohammed Gamal0934ac92009-08-23 14:24:24 +03004112 case 0xa0: /* push fs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004113 rc = emulate_push_sreg(ctxt, VCPU_SREG_FS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03004114 break;
4115 case 0xa1: /* pop fs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004116 rc = emulate_pop_sreg(ctxt, VCPU_SREG_FS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03004117 break;
Nitin A Kamble7de75242007-09-15 10:13:07 +03004118 case 0xa3:
4119 bt: /* bt */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004120 ctxt->dst.type = OP_NONE;
Laurent Viviere4e03de2007-09-18 11:52:50 +02004121 /* only subword offset */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004122 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1;
Avi Kivitya31b9ce2011-09-07 16:41:35 +03004123 emulate_2op_SrcV_nobyte(ctxt, "bt");
Nitin A Kamble7de75242007-09-15 10:13:07 +03004124 break;
Guillaume Thouvenin9bf8ea42008-12-04 14:30:13 +01004125 case 0xa4: /* shld imm8, r, r/m */
4126 case 0xa5: /* shld cl, r, r/m */
Avi Kivity761441b2011-09-07 16:41:36 +03004127 emulate_2op_cl(ctxt, "shld");
Guillaume Thouvenin9bf8ea42008-12-04 14:30:13 +01004128 break;
Mohammed Gamal0934ac92009-08-23 14:24:24 +03004129 case 0xa8: /* push gs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004130 rc = emulate_push_sreg(ctxt, VCPU_SREG_GS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03004131 break;
4132 case 0xa9: /* pop gs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004133 rc = emulate_pop_sreg(ctxt, VCPU_SREG_GS);
Mohammed Gamal0934ac92009-08-23 14:24:24 +03004134 break;
Nitin A Kamble7de75242007-09-15 10:13:07 +03004135 case 0xab:
4136 bts: /* bts */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03004137 emulate_2op_SrcV_nobyte(ctxt, "bts");
Nitin A Kamble7de75242007-09-15 10:13:07 +03004138 break;
Guillaume Thouvenin9bf8ea42008-12-04 14:30:13 +01004139 case 0xac: /* shrd imm8, r, r/m */
4140 case 0xad: /* shrd cl, r, r/m */
Avi Kivity761441b2011-09-07 16:41:36 +03004141 emulate_2op_cl(ctxt, "shrd");
Guillaume Thouvenin9bf8ea42008-12-04 14:30:13 +01004142 break;
Glauber Costa2a7c5b82008-07-10 17:08:15 -03004143 case 0xae: /* clflush */
4144 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004145 case 0xb0 ... 0xb1: /* cmpxchg */
4146 /*
4147 * Save real source value, then compare EAX against
4148 * destination.
4149 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004150 ctxt->src.orig_val = ctxt->src.val;
4151 ctxt->src.val = ctxt->regs[VCPU_REGS_RAX];
Avi Kivitya31b9ce2011-09-07 16:41:35 +03004152 emulate_2op_SrcV(ctxt, "cmp");
Laurent Vivier05f086f2007-09-24 11:10:55 +02004153 if (ctxt->eflags & EFLG_ZF) {
Avi Kivity6aa8b732006-12-10 02:21:36 -08004154 /* Success: write back to memory. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004155 ctxt->dst.val = ctxt->src.orig_val;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004156 } else {
4157 /* Failure: write the value we saw to EAX. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004158 ctxt->dst.type = OP_REG;
4159 ctxt->dst.addr.reg = (unsigned long *)&ctxt->regs[VCPU_REGS_RAX];
Avi Kivity6aa8b732006-12-10 02:21:36 -08004160 }
4161 break;
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08004162 case 0xb2: /* lss */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004163 rc = emulate_load_segment(ctxt, VCPU_SREG_SS);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08004164 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004165 case 0xb3:
4166 btr: /* btr */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03004167 emulate_2op_SrcV_nobyte(ctxt, "btr");
Avi Kivity6aa8b732006-12-10 02:21:36 -08004168 break;
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08004169 case 0xb4: /* lfs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004170 rc = emulate_load_segment(ctxt, VCPU_SREG_FS);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08004171 break;
4172 case 0xb5: /* lgs */
Takuya Yoshikawa7b105ca2011-05-15 01:00:52 +09004173 rc = emulate_load_segment(ctxt, VCPU_SREG_GS);
Wei Yongjun09b5f4d2010-08-23 14:56:54 +08004174 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004175 case 0xb6 ... 0xb7: /* movzx */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004176 ctxt->dst.bytes = ctxt->op_bytes;
4177 ctxt->dst.val = (ctxt->d & ByteOp) ? (u8) ctxt->src.val
4178 : (u16) ctxt->src.val;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004179 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004180 case 0xba: /* Grp8 */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004181 switch (ctxt->modrm_reg & 3) {
Avi Kivity6aa8b732006-12-10 02:21:36 -08004182 case 0:
4183 goto bt;
4184 case 1:
4185 goto bts;
4186 case 2:
4187 goto btr;
4188 case 3:
4189 goto btc;
4190 }
4191 break;
Nitin A Kamble7de75242007-09-15 10:13:07 +03004192 case 0xbb:
4193 btc: /* btc */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03004194 emulate_2op_SrcV_nobyte(ctxt, "btc");
Nitin A Kamble7de75242007-09-15 10:13:07 +03004195 break;
Wei Yongjund9574a22010-08-10 13:48:22 +08004196 case 0xbc: { /* bsf */
4197 u8 zf;
4198 __asm__ ("bsf %2, %0; setz %1"
Avi Kivity9dac77f2011-06-01 15:34:25 +03004199 : "=r"(ctxt->dst.val), "=q"(zf)
4200 : "r"(ctxt->src.val));
Wei Yongjund9574a22010-08-10 13:48:22 +08004201 ctxt->eflags &= ~X86_EFLAGS_ZF;
4202 if (zf) {
4203 ctxt->eflags |= X86_EFLAGS_ZF;
Avi Kivity9dac77f2011-06-01 15:34:25 +03004204 ctxt->dst.type = OP_NONE; /* Disable writeback. */
Wei Yongjund9574a22010-08-10 13:48:22 +08004205 }
4206 break;
4207 }
4208 case 0xbd: { /* bsr */
4209 u8 zf;
4210 __asm__ ("bsr %2, %0; setz %1"
Avi Kivity9dac77f2011-06-01 15:34:25 +03004211 : "=r"(ctxt->dst.val), "=q"(zf)
4212 : "r"(ctxt->src.val));
Wei Yongjund9574a22010-08-10 13:48:22 +08004213 ctxt->eflags &= ~X86_EFLAGS_ZF;
4214 if (zf) {
4215 ctxt->eflags |= X86_EFLAGS_ZF;
Avi Kivity9dac77f2011-06-01 15:34:25 +03004216 ctxt->dst.type = OP_NONE; /* Disable writeback. */
Wei Yongjund9574a22010-08-10 13:48:22 +08004217 }
4218 break;
4219 }
Avi Kivity6aa8b732006-12-10 02:21:36 -08004220 case 0xbe ... 0xbf: /* movsx */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004221 ctxt->dst.bytes = ctxt->op_bytes;
4222 ctxt->dst.val = (ctxt->d & ByteOp) ? (s8) ctxt->src.val :
4223 (s16) ctxt->src.val;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004224 break;
Wei Yongjun92f738a2010-08-17 09:19:34 +08004225 case 0xc0 ... 0xc1: /* xadd */
Avi Kivitya31b9ce2011-09-07 16:41:35 +03004226 emulate_2op_SrcV(ctxt, "add");
Wei Yongjun92f738a2010-08-17 09:19:34 +08004227 /* Write back the register source. */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004228 ctxt->src.val = ctxt->dst.orig_val;
4229 write_register_operand(&ctxt->src);
Wei Yongjun92f738a2010-08-17 09:19:34 +08004230 break;
Sheng Yanga012e652007-10-15 14:24:20 +08004231 case 0xc3: /* movnti */
Avi Kivity9dac77f2011-06-01 15:34:25 +03004232 ctxt->dst.bytes = ctxt->op_bytes;
4233 ctxt->dst.val = (ctxt->op_bytes == 4) ? (u32) ctxt->src.val :
4234 (u64) ctxt->src.val;
Sheng Yanga012e652007-10-15 14:24:20 +08004235 break;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004236 case 0xc7: /* Grp9 (cmpxchg8b) */
Takuya Yoshikawa51187682011-05-02 02:29:17 +09004237 rc = em_grp9(ctxt);
Laurent Vivier8cdbd2c2007-09-24 11:10:54 +02004238 break;
Avi Kivity91269b82010-07-25 14:51:16 +03004239 default:
4240 goto cannot_emulate;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004241 }
Avi Kivity7d9ddae2010-08-30 17:12:28 +03004242
4243 if (rc != X86EMUL_CONTINUE)
4244 goto done;
4245
Avi Kivity6aa8b732006-12-10 02:21:36 -08004246 goto writeback;
4247
4248cannot_emulate:
Gleb Natapova0c0ab22011-03-28 16:57:49 +02004249 return EMULATION_FAILED;
Avi Kivity6aa8b732006-12-10 02:21:36 -08004250}