1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table[256] = {
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 SrcImmByte, SrcImm, 0, 0,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0,
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
108 0, 0, 0, 0, 0, 0, 0, 0,
110 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
111 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
113 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
115 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
116 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
117 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
118 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
120 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
121 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
122 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
124 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
126 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
127 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
128 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
129 ByteOp | ImplicitOps, ImplicitOps,
131 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
132 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
133 ByteOp | ImplicitOps, ImplicitOps,
135 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
137 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
138 0, ImplicitOps, 0, 0,
139 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
141 0, 0, 0, 0, 0, 0, 0, 0,
143 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
144 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
147 0, 0, 0, 0, 0, 0, 0, 0,
149 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
153 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
156 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
159 static u16 twobyte_table[256] = {
161 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
162 0, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
164 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
166 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
167 0, 0, 0, 0, 0, 0, 0, 0,
169 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
171 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
172 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
173 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
174 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
176 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
177 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
178 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
179 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
181 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
191 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
193 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
195 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
196 DstMem | SrcReg | ModRM | BitOp,
197 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
198 DstReg | SrcMem16 | ModRM | Mov,
200 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
201 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
202 DstReg | SrcMem16 | ModRM | Mov,
204 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
214 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
215 * are interested only in invlpg and not in any of the rest.
217 * invlpg is a special instruction in that the data it references may not
220 void kvm_emulator_want_group7_invlpg(void)
222 twobyte_table[1] &= ~SrcMem;
224 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
226 /* Type, address-of, and value of an instruction's operand. */
228 enum { OP_REG, OP_MEM, OP_IMM } type;
230 unsigned long val, orig_val, *ptr;
233 /* EFLAGS bit definitions. */
234 #define EFLG_OF (1<<11)
235 #define EFLG_DF (1<<10)
236 #define EFLG_SF (1<<7)
237 #define EFLG_ZF (1<<6)
238 #define EFLG_AF (1<<4)
239 #define EFLG_PF (1<<2)
240 #define EFLG_CF (1<<0)
243 * Instruction emulation:
244 * Most instructions are emulated directly via a fragment of inline assembly
245 * code. This allows us to save/restore EFLAGS and thus very easily pick up
246 * any modified flags.
249 #if defined(CONFIG_X86_64)
250 #define _LO32 "k" /* force 32-bit operand */
251 #define _STK "%%rsp" /* stack pointer */
252 #elif defined(__i386__)
253 #define _LO32 "" /* force 32-bit operand */
254 #define _STK "%%esp" /* stack pointer */
258 * These EFLAGS bits are restored from saved value during emulation, and
259 * any changes are written back to the saved value after emulation.
261 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
263 /* Before executing instruction: restore necessary bits in EFLAGS. */
264 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
265 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
267 "movl %"_msk",%"_LO32 _tmp"; " \
268 "andl %"_LO32 _tmp",("_STK"); " \
270 "notl %"_LO32 _tmp"; " \
271 "andl %"_LO32 _tmp",("_STK"); " \
273 "orl %"_LO32 _tmp",("_STK"); " \
275 /* _sav &= ~msk; */ \
276 "movl %"_msk",%"_LO32 _tmp"; " \
277 "notl %"_LO32 _tmp"; " \
278 "andl %"_LO32 _tmp",%"_sav"; "
280 /* After executing instruction: write-back necessary bits in EFLAGS. */
281 #define _POST_EFLAGS(_sav, _msk, _tmp) \
282 /* _sav |= EFLAGS & _msk; */ \
285 "andl %"_msk",%"_LO32 _tmp"; " \
286 "orl %"_LO32 _tmp",%"_sav"; "
288 /* Raw emulation: instruction has two explicit operands. */
289 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
291 unsigned long _tmp; \
293 switch ((_dst).bytes) { \
295 __asm__ __volatile__ ( \
296 _PRE_EFLAGS("0","4","2") \
297 _op"w %"_wx"3,%1; " \
298 _POST_EFLAGS("0","4","2") \
299 : "=m" (_eflags), "=m" ((_dst).val), \
301 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
304 __asm__ __volatile__ ( \
305 _PRE_EFLAGS("0","4","2") \
306 _op"l %"_lx"3,%1; " \
307 _POST_EFLAGS("0","4","2") \
308 : "=m" (_eflags), "=m" ((_dst).val), \
310 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
313 __emulate_2op_8byte(_op, _src, _dst, \
314 _eflags, _qx, _qy); \
319 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
321 unsigned long _tmp; \
322 switch ( (_dst).bytes ) \
325 __asm__ __volatile__ ( \
326 _PRE_EFLAGS("0","4","2") \
327 _op"b %"_bx"3,%1; " \
328 _POST_EFLAGS("0","4","2") \
329 : "=m" (_eflags), "=m" ((_dst).val), \
331 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
334 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
335 _wx, _wy, _lx, _ly, _qx, _qy); \
340 /* Source operand is byte-sized and may be restricted to just %cl. */
341 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
342 __emulate_2op(_op, _src, _dst, _eflags, \
343 "b", "c", "b", "c", "b", "c", "b", "c")
345 /* Source operand is byte, word, long or quad sized. */
346 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
347 __emulate_2op(_op, _src, _dst, _eflags, \
348 "b", "q", "w", "r", _LO32, "r", "", "r")
350 /* Source operand is word, long or quad sized. */
351 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
352 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
353 "w", "r", _LO32, "r", "", "r")
355 /* Instruction has only one explicit operand (no source operand). */
356 #define emulate_1op(_op, _dst, _eflags) \
358 unsigned long _tmp; \
360 switch ( (_dst).bytes ) \
363 __asm__ __volatile__ ( \
364 _PRE_EFLAGS("0","3","2") \
366 _POST_EFLAGS("0","3","2") \
367 : "=m" (_eflags), "=m" ((_dst).val), \
369 : "i" (EFLAGS_MASK) ); \
372 __asm__ __volatile__ ( \
373 _PRE_EFLAGS("0","3","2") \
375 _POST_EFLAGS("0","3","2") \
376 : "=m" (_eflags), "=m" ((_dst).val), \
378 : "i" (EFLAGS_MASK) ); \
381 __asm__ __volatile__ ( \
382 _PRE_EFLAGS("0","3","2") \
384 _POST_EFLAGS("0","3","2") \
385 : "=m" (_eflags), "=m" ((_dst).val), \
387 : "i" (EFLAGS_MASK) ); \
390 __emulate_1op_8byte(_op, _dst, _eflags); \
395 /* Emulate an instruction with quadword operands (x86/64 only). */
396 #if defined(CONFIG_X86_64)
397 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
399 __asm__ __volatile__ ( \
400 _PRE_EFLAGS("0","4","2") \
401 _op"q %"_qx"3,%1; " \
402 _POST_EFLAGS("0","4","2") \
403 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
404 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
407 #define __emulate_1op_8byte(_op, _dst, _eflags) \
409 __asm__ __volatile__ ( \
410 _PRE_EFLAGS("0","3","2") \
412 _POST_EFLAGS("0","3","2") \
413 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
414 : "i" (EFLAGS_MASK) ); \
417 #elif defined(__i386__)
418 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
419 #define __emulate_1op_8byte(_op, _dst, _eflags)
420 #endif /* __i386__ */
422 /* Fetch next part of the instruction being emulated. */
423 #define insn_fetch(_type, _size, _eip) \
424 ({ unsigned long _x; \
425 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
426 (_size), ctxt->vcpu); \
433 /* Access/update address held in a register, based on addressing mode. */
434 #define address_mask(reg) \
435 ((ad_bytes == sizeof(unsigned long)) ? \
436 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
437 #define register_address(base, reg) \
438 ((base) + address_mask(reg))
439 #define register_address_increment(reg, inc) \
441 /* signed type ensures sign extension to long */ \
443 if ( ad_bytes == sizeof(unsigned long) ) \
446 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
447 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
451 * Given the 'reg' portion of a ModRM byte, and a register block, return a
452 * pointer into the block that addresses the relevant register.
453 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
455 static void *decode_register(u8 modrm_reg, unsigned long *regs,
460 p = ®s[modrm_reg];
461 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
462 p = (unsigned char *)®s[modrm_reg & 3] + 1;
466 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
467 struct x86_emulate_ops *ops,
469 u16 *size, unsigned long *address, int op_bytes)
476 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
480 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
486 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
489 u8 b, sib, twobyte = 0, rex_prefix = 0;
490 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
491 unsigned long *override_base = NULL;
492 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
494 struct operand src, dst;
495 unsigned long cr2 = ctxt->cr2;
496 int mode = ctxt->mode;
497 unsigned long modrm_ea;
498 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
502 /* Shadow copy of register state. Committed on successful emulation. */
503 unsigned long _regs[NR_VCPU_REGS];
504 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
505 unsigned long modrm_val = 0;
507 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
510 case X86EMUL_MODE_REAL:
511 case X86EMUL_MODE_PROT16:
512 op_bytes = ad_bytes = 2;
514 case X86EMUL_MODE_PROT32:
515 op_bytes = ad_bytes = 4;
518 case X86EMUL_MODE_PROT64:
527 /* Legacy prefixes. */
528 for (i = 0; i < 8; i++) {
529 switch (b = insn_fetch(u8, 1, _eip)) {
530 case 0x66: /* operand-size override */
531 op_bytes ^= 6; /* switch between 2/4 bytes */
533 case 0x67: /* address-size override */
534 if (mode == X86EMUL_MODE_PROT64)
535 ad_bytes ^= 12; /* switch between 4/8 bytes */
537 ad_bytes ^= 6; /* switch between 2/4 bytes */
539 case 0x2e: /* CS override */
540 override_base = &ctxt->cs_base;
542 case 0x3e: /* DS override */
543 override_base = &ctxt->ds_base;
545 case 0x26: /* ES override */
546 override_base = &ctxt->es_base;
548 case 0x64: /* FS override */
549 override_base = &ctxt->fs_base;
551 case 0x65: /* GS override */
552 override_base = &ctxt->gs_base;
554 case 0x36: /* SS override */
555 override_base = &ctxt->ss_base;
557 case 0xf0: /* LOCK */
560 case 0xf3: /* REP/REPE/REPZ */
563 case 0xf2: /* REPNE/REPNZ */
573 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
576 op_bytes = 8; /* REX.W */
577 modrm_reg = (b & 4) << 1; /* REX.R */
578 index_reg = (b & 2) << 2; /* REX.X */
579 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
580 b = insn_fetch(u8, 1, _eip);
583 /* Opcode byte(s). */
586 /* Two-byte opcode? */
589 b = insn_fetch(u8, 1, _eip);
590 d = twobyte_table[b];
598 /* ModRM and SIB bytes. */
600 modrm = insn_fetch(u8, 1, _eip);
601 modrm_mod |= (modrm & 0xc0) >> 6;
602 modrm_reg |= (modrm & 0x38) >> 3;
603 modrm_rm |= (modrm & 0x07);
607 if (modrm_mod == 3) {
608 modrm_val = *(unsigned long *)
609 decode_register(modrm_rm, _regs, d & ByteOp);
614 unsigned bx = _regs[VCPU_REGS_RBX];
615 unsigned bp = _regs[VCPU_REGS_RBP];
616 unsigned si = _regs[VCPU_REGS_RSI];
617 unsigned di = _regs[VCPU_REGS_RDI];
619 /* 16-bit ModR/M decode. */
623 modrm_ea += insn_fetch(u16, 2, _eip);
626 modrm_ea += insn_fetch(s8, 1, _eip);
629 modrm_ea += insn_fetch(u16, 2, _eip);
659 if (modrm_rm == 2 || modrm_rm == 3 ||
660 (modrm_rm == 6 && modrm_mod != 0))
662 override_base = &ctxt->ss_base;
663 modrm_ea = (u16)modrm_ea;
665 /* 32/64-bit ModR/M decode. */
669 sib = insn_fetch(u8, 1, _eip);
670 index_reg |= (sib >> 3) & 7;
677 modrm_ea += _regs[base_reg];
679 modrm_ea += insn_fetch(s32, 4, _eip);
682 modrm_ea += _regs[base_reg];
688 modrm_ea += _regs[index_reg] << scale;
694 modrm_ea += _regs[modrm_rm];
695 else if (mode == X86EMUL_MODE_PROT64)
699 modrm_ea += _regs[modrm_rm];
705 modrm_ea += insn_fetch(s32, 4, _eip);
708 modrm_ea += insn_fetch(s8, 1, _eip);
711 modrm_ea += insn_fetch(s32, 4, _eip);
716 override_base = &ctxt->ds_base;
717 if (mode == X86EMUL_MODE_PROT64 &&
718 override_base != &ctxt->fs_base &&
719 override_base != &ctxt->gs_base)
720 override_base = NULL;
723 modrm_ea += *override_base;
727 switch (d & SrcMask) {
738 modrm_ea += op_bytes;
742 modrm_ea = (u32)modrm_ea;
749 * Decode and fetch the source operand: register, memory
752 switch (d & SrcMask) {
758 src.ptr = decode_register(modrm_reg, _regs,
760 src.val = src.orig_val = *(u8 *) src.ptr;
763 src.ptr = decode_register(modrm_reg, _regs, 0);
764 switch ((src.bytes = op_bytes)) {
766 src.val = src.orig_val = *(u16 *) src.ptr;
769 src.val = src.orig_val = *(u32 *) src.ptr;
772 src.val = src.orig_val = *(u64 *) src.ptr;
784 src.bytes = (d & ByteOp) ? 1 : op_bytes;
787 src.ptr = (unsigned long *)cr2;
788 if ((rc = ops->read_emulated((unsigned long)src.ptr,
789 &src.val, src.bytes, ctxt->vcpu)) != 0)
791 src.orig_val = src.val;
795 src.ptr = (unsigned long *)_eip;
796 src.bytes = (d & ByteOp) ? 1 : op_bytes;
799 /* NB. Immediates are sign-extended as necessary. */
802 src.val = insn_fetch(s8, 1, _eip);
805 src.val = insn_fetch(s16, 2, _eip);
808 src.val = insn_fetch(s32, 4, _eip);
814 src.ptr = (unsigned long *)_eip;
816 src.val = insn_fetch(s8, 1, _eip);
820 /* Decode and fetch the destination operand: register or memory. */
821 switch (d & DstMask) {
823 /* Special instructions do their own operand decoding. */
828 && !(twobyte && (b == 0xb6 || b == 0xb7))) {
829 dst.ptr = decode_register(modrm_reg, _regs,
831 dst.val = *(u8 *) dst.ptr;
834 dst.ptr = decode_register(modrm_reg, _regs, 0);
835 switch ((dst.bytes = op_bytes)) {
837 dst.val = *(u16 *)dst.ptr;
840 dst.val = *(u32 *)dst.ptr;
843 dst.val = *(u64 *)dst.ptr;
850 dst.ptr = (unsigned long *)cr2;
851 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
853 unsigned long mask = ~(dst.bytes * 8 - 1);
855 dst.ptr = (void *)dst.ptr + (src.val & mask) / 8;
857 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
858 ((rc = ops->read_emulated((unsigned long)dst.ptr,
859 &dst.val, dst.bytes, ctxt->vcpu)) != 0))
863 dst.orig_val = dst.val;
871 emulate_2op_SrcV("add", src, dst, _eflags);
875 emulate_2op_SrcV("or", src, dst, _eflags);
879 emulate_2op_SrcV("adc", src, dst, _eflags);
883 emulate_2op_SrcV("sbb", src, dst, _eflags);
887 emulate_2op_SrcV("and", src, dst, _eflags);
889 case 0x24: /* and al imm8 */
891 dst.ptr = &_regs[VCPU_REGS_RAX];
892 dst.val = *(u8 *)dst.ptr;
894 dst.orig_val = dst.val;
896 case 0x25: /* and ax imm16, or eax imm32 */
898 dst.bytes = op_bytes;
899 dst.ptr = &_regs[VCPU_REGS_RAX];
901 dst.val = *(u16 *)dst.ptr;
903 dst.val = *(u32 *)dst.ptr;
904 dst.orig_val = dst.val;
908 emulate_2op_SrcV("sub", src, dst, _eflags);
912 emulate_2op_SrcV("xor", src, dst, _eflags);
916 emulate_2op_SrcV("cmp", src, dst, _eflags);
918 case 0x63: /* movsxd */
919 if (mode != X86EMUL_MODE_PROT64)
921 dst.val = (s32) src.val;
923 case 0x80 ... 0x83: /* Grp1 */
945 emulate_2op_SrcV("test", src, dst, _eflags);
947 case 0x86 ... 0x87: /* xchg */
948 /* Write back the register source. */
951 *(u8 *) src.ptr = (u8) dst.val;
954 *(u16 *) src.ptr = (u16) dst.val;
957 *src.ptr = (u32) dst.val;
958 break; /* 64b reg: zero-extend */
964 * Write back the memory destination with implicit LOCK
970 case 0xa0 ... 0xa1: /* mov */
971 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
973 _eip += ad_bytes; /* skip src displacement */
975 case 0xa2 ... 0xa3: /* mov */
976 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
977 _eip += ad_bytes; /* skip dst displacement */
979 case 0x88 ... 0x8b: /* mov */
980 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
983 case 0x8f: /* pop (sole member of Grp1a) */
984 /* 64-bit mode: POP always pops a 64-bit operand. */
985 if (mode == X86EMUL_MODE_PROT64)
987 if ((rc = ops->read_std(register_address(ctxt->ss_base,
988 _regs[VCPU_REGS_RSP]),
989 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
991 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
997 emulate_2op_SrcB("rol", src, dst, _eflags);
1000 emulate_2op_SrcB("ror", src, dst, _eflags);
1003 emulate_2op_SrcB("rcl", src, dst, _eflags);
1006 emulate_2op_SrcB("rcr", src, dst, _eflags);
1008 case 4: /* sal/shl */
1009 case 6: /* sal/shl */
1010 emulate_2op_SrcB("sal", src, dst, _eflags);
1013 emulate_2op_SrcB("shr", src, dst, _eflags);
1016 emulate_2op_SrcB("sar", src, dst, _eflags);
1020 case 0xd0 ... 0xd1: /* Grp2 */
1023 case 0xd2 ... 0xd3: /* Grp2 */
1024 src.val = _regs[VCPU_REGS_RCX];
1026 case 0xf6 ... 0xf7: /* Grp3 */
1027 switch (modrm_reg) {
1028 case 0 ... 1: /* test */
1030 * Special case in Grp3: test has an immediate
1034 src.ptr = (unsigned long *)_eip;
1035 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1038 switch (src.bytes) {
1040 src.val = insn_fetch(s8, 1, _eip);
1043 src.val = insn_fetch(s16, 2, _eip);
1046 src.val = insn_fetch(s32, 4, _eip);
1054 emulate_1op("neg", dst, _eflags);
1057 goto cannot_emulate;
1060 case 0xfe ... 0xff: /* Grp4/Grp5 */
1061 switch (modrm_reg) {
1063 emulate_1op("inc", dst, _eflags);
1066 emulate_1op("dec", dst, _eflags);
1069 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1070 if (mode == X86EMUL_MODE_PROT64) {
1072 if ((rc = ops->read_std((unsigned long)dst.ptr,
1077 register_address_increment(_regs[VCPU_REGS_RSP],
1079 if ((rc = ops->write_std(
1080 register_address(ctxt->ss_base,
1081 _regs[VCPU_REGS_RSP]),
1082 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1087 goto cannot_emulate;
1096 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1097 switch (dst.bytes) {
1099 *(u8 *)dst.ptr = (u8)dst.val;
1102 *(u16 *)dst.ptr = (u16)dst.val;
1105 *dst.ptr = (u32)dst.val;
1106 break; /* 64b: zero-ext */
1114 rc = ops->cmpxchg_emulated((unsigned long)dst.
1116 &dst.val, dst.bytes,
1119 rc = ops->write_emulated((unsigned long)dst.ptr,
1120 &dst.val, dst.bytes,
1129 /* Commit shadow register state. */
1130 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1131 ctxt->eflags = _eflags;
1132 ctxt->vcpu->rip = _eip;
1135 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1139 goto twobyte_special_insn;
1141 case 0x6c: /* insb */
1142 case 0x6d: /* insw/insd */
1143 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1145 (d & ByteOp) ? 1 : op_bytes, /* size */
1147 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1148 (_eflags & EFLG_DF), /* down */
1149 register_address(ctxt->es_base,
1150 _regs[VCPU_REGS_RDI]), /* address */
1152 _regs[VCPU_REGS_RDX] /* port */
1156 case 0x6e: /* outsb */
1157 case 0x6f: /* outsw/outsd */
1158 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1160 (d & ByteOp) ? 1 : op_bytes, /* size */
1162 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1163 (_eflags & EFLG_DF), /* down */
1164 register_address(override_base ?
1165 *override_base : ctxt->ds_base,
1166 _regs[VCPU_REGS_RSI]), /* address */
1168 _regs[VCPU_REGS_RDX] /* port */
1174 if (_regs[VCPU_REGS_RCX] == 0) {
1175 ctxt->vcpu->rip = _eip;
1178 _regs[VCPU_REGS_RCX]--;
1179 _eip = ctxt->vcpu->rip;
1182 case 0xa4 ... 0xa5: /* movs */
1184 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1185 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1186 _regs[VCPU_REGS_RDI]);
1187 if ((rc = ops->read_emulated(register_address(
1188 override_base ? *override_base : ctxt->ds_base,
1189 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1191 register_address_increment(_regs[VCPU_REGS_RSI],
1192 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1193 register_address_increment(_regs[VCPU_REGS_RDI],
1194 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1196 case 0xa6 ... 0xa7: /* cmps */
1197 DPRINTF("Urk! I don't handle CMPS.\n");
1198 goto cannot_emulate;
1199 case 0xaa ... 0xab: /* stos */
1201 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1202 dst.ptr = (unsigned long *)cr2;
1203 dst.val = _regs[VCPU_REGS_RAX];
1204 register_address_increment(_regs[VCPU_REGS_RDI],
1205 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1207 case 0xac ... 0xad: /* lods */
1209 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1210 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1211 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes,
1214 register_address_increment(_regs[VCPU_REGS_RSI],
1215 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1217 case 0xae ... 0xaf: /* scas */
1218 DPRINTF("Urk! I don't handle SCAS.\n");
1219 goto cannot_emulate;
1220 case 0xf4: /* hlt */
1221 ctxt->vcpu->halt_request = 1;
1223 case 0xc3: /* ret */
1225 goto pop_instruction;
1226 case 0x58 ... 0x5f: /* pop reg */
1227 dst.ptr = (unsigned long *)&_regs[b & 0x7];
1230 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1231 _regs[VCPU_REGS_RSP]), dst.ptr, op_bytes, ctxt->vcpu))
1235 register_address_increment(_regs[VCPU_REGS_RSP], op_bytes);
1236 no_wb = 1; /* Disable writeback. */
1243 case 0x01: /* lgdt, lidt, lmsw */
1244 /* Disable writeback. */
1246 switch (modrm_reg) {
1248 unsigned long address;
1251 rc = read_descriptor(ctxt, ops, src.ptr,
1252 &size, &address, op_bytes);
1255 realmode_lgdt(ctxt->vcpu, size, address);
1258 rc = read_descriptor(ctxt, ops, src.ptr,
1259 &size, &address, op_bytes);
1262 realmode_lidt(ctxt->vcpu, size, address);
1266 goto cannot_emulate;
1267 *(u16 *)&_regs[modrm_rm]
1268 = realmode_get_cr(ctxt->vcpu, 0);
1272 goto cannot_emulate;
1273 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1276 emulate_invlpg(ctxt->vcpu, cr2);
1279 goto cannot_emulate;
1282 case 0x21: /* mov from dr to reg */
1285 goto cannot_emulate;
1286 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1288 case 0x23: /* mov from reg to dr */
1291 goto cannot_emulate;
1292 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1294 case 0x40 ... 0x4f: /* cmov */
1295 dst.val = dst.orig_val = src.val;
1298 * First, assume we're decoding an even cmov opcode
1301 switch ((b & 15) >> 1) {
1303 no_wb = (_eflags & EFLG_OF) ? 0 : 1;
1305 case 1: /* cmovb/cmovc/cmovnae */
1306 no_wb = (_eflags & EFLG_CF) ? 0 : 1;
1308 case 2: /* cmovz/cmove */
1309 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1311 case 3: /* cmovbe/cmovna */
1312 no_wb = (_eflags & (EFLG_CF | EFLG_ZF)) ? 0 : 1;
1315 no_wb = (_eflags & EFLG_SF) ? 0 : 1;
1317 case 5: /* cmovp/cmovpe */
1318 no_wb = (_eflags & EFLG_PF) ? 0 : 1;
1320 case 7: /* cmovle/cmovng */
1321 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1323 case 6: /* cmovl/cmovnge */
1324 no_wb &= (!(_eflags & EFLG_SF) !=
1325 !(_eflags & EFLG_OF)) ? 0 : 1;
1328 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1331 case 0xb0 ... 0xb1: /* cmpxchg */
1333 * Save real source value, then compare EAX against
1336 src.orig_val = src.val;
1337 src.val = _regs[VCPU_REGS_RAX];
1338 emulate_2op_SrcV("cmp", src, dst, _eflags);
1339 if (_eflags & EFLG_ZF) {
1340 /* Success: write back to memory. */
1341 dst.val = src.orig_val;
1343 /* Failure: write the value we saw to EAX. */
1345 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1350 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1351 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1355 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1356 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1360 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1361 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1363 case 0xb6 ... 0xb7: /* movzx */
1364 dst.bytes = op_bytes;
1365 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1369 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1370 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1372 case 0xba: /* Grp8 */
1373 switch (modrm_reg & 3) {
1384 case 0xbe ... 0xbf: /* movsx */
1385 dst.bytes = op_bytes;
1386 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1391 twobyte_special_insn:
1392 /* Disable writeback. */
1395 case 0x09: /* wbinvd */
1397 case 0x0d: /* GrpP (prefetch) */
1398 case 0x18: /* Grp16 (prefetch/nop) */
1401 emulate_clts(ctxt->vcpu);
1403 case 0x20: /* mov cr, reg */
1405 goto cannot_emulate;
1406 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1408 case 0x22: /* mov reg, cr */
1410 goto cannot_emulate;
1411 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1415 msr_data = (u32)_regs[VCPU_REGS_RAX]
1416 | ((u64)_regs[VCPU_REGS_RDX] << 32);
1417 rc = kvm_set_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], msr_data);
1419 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1420 _eip = ctxt->vcpu->rip;
1422 rc = X86EMUL_CONTINUE;
1426 rc = kvm_get_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], &msr_data);
1428 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1429 _eip = ctxt->vcpu->rip;
1431 _regs[VCPU_REGS_RAX] = (u32)msr_data;
1432 _regs[VCPU_REGS_RDX] = msr_data >> 32;
1434 rc = X86EMUL_CONTINUE;
1436 case 0xc7: /* Grp9 (cmpxchg8b) */
1439 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt->vcpu))
1442 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1443 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1444 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1445 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1446 _eflags &= ~EFLG_ZF;
1448 new = ((u64)_regs[VCPU_REGS_RCX] << 32)
1449 | (u32) _regs[VCPU_REGS_RBX];
1450 if ((rc = ops->cmpxchg_emulated(cr2, &old,
1451 &new, 8, ctxt->vcpu)) != 0)
1461 DPRINTF("Cannot emulate %02x\n", b);
1468 #include <asm/uaccess.h>
1471 x86_emulate_read_std(unsigned long addr,
1473 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1479 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1480 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1481 return X86EMUL_PROPAGATE_FAULT;
1484 return X86EMUL_CONTINUE;
1488 x86_emulate_write_std(unsigned long addr,
1490 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1494 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1495 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1496 return X86EMUL_PROPAGATE_FAULT;
1499 return X86EMUL_CONTINUE;