1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
30 #define DPRINTF(x...) do {} while (0)
32 #include "x86_emulate.h"
33 #include <linux/module.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstMask (3<<1)
51 /* Source operand type. */
52 #define SrcNone (0<<3) /* No source operand. */
53 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
54 #define SrcReg (1<<3) /* Register operand. */
55 #define SrcMem (2<<3) /* Memory operand. */
56 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
57 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
58 #define SrcImm (5<<3) /* Immediate operand. */
59 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
60 #define SrcMask (7<<3)
61 /* Generic ModRM decode. */
63 /* Destination is only written; never read. */
66 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
67 #define String (1<<10) /* String instruction (rep capable) */
68 #define Stack (1<<11) /* Stack instruction (push/pop) */
70 static u16 opcode_table[256] = {
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 SrcImmByte, SrcImm, 0, 0,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
104 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
106 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
108 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
109 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
111 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
112 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
114 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
117 0, 0, ImplicitOps | Mov | Stack, 0,
118 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
119 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
121 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
122 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
124 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
125 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
128 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
129 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
130 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
132 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
133 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
134 0, ModRM | DstReg, 0, DstMem | SrcNone | ModRM | Mov | Stack,
136 0, 0, 0, 0, 0, 0, 0, 0,
137 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
139 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
140 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
141 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
142 ByteOp | ImplicitOps | String, ImplicitOps | String,
144 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
145 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
146 ByteOp | ImplicitOps | String, ImplicitOps | String,
148 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
150 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
151 0, ImplicitOps | Stack, 0, 0,
152 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
154 0, 0, 0, 0, 0, 0, 0, 0,
156 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
157 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
160 0, 0, 0, 0, 0, 0, 0, 0,
162 0, 0, 0, 0, 0, 0, 0, 0,
164 ImplicitOps | Stack, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps,
168 ImplicitOps, ImplicitOps,
169 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
171 ImplicitOps, 0, ImplicitOps, ImplicitOps,
172 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
175 static u16 twobyte_table[256] = {
177 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
178 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
180 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
182 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
183 0, 0, 0, 0, 0, 0, 0, 0,
185 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
188 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
189 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
190 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
192 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
193 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
194 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
195 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
197 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
199 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
203 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
204 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
205 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
206 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
210 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
212 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
214 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
215 DstMem | SrcReg | ModRM | BitOp,
216 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
217 DstReg | SrcMem16 | ModRM | Mov,
219 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
220 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
221 DstReg | SrcMem16 | ModRM | Mov,
223 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
224 0, 0, 0, 0, 0, 0, 0, 0,
226 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
228 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
230 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
233 /* EFLAGS bit definitions. */
234 #define EFLG_OF (1<<11)
235 #define EFLG_DF (1<<10)
236 #define EFLG_SF (1<<7)
237 #define EFLG_ZF (1<<6)
238 #define EFLG_AF (1<<4)
239 #define EFLG_PF (1<<2)
240 #define EFLG_CF (1<<0)
243 * Instruction emulation:
244 * Most instructions are emulated directly via a fragment of inline assembly
245 * code. This allows us to save/restore EFLAGS and thus very easily pick up
246 * any modified flags.
249 #if defined(CONFIG_X86_64)
250 #define _LO32 "k" /* force 32-bit operand */
251 #define _STK "%%rsp" /* stack pointer */
252 #elif defined(__i386__)
253 #define _LO32 "" /* force 32-bit operand */
254 #define _STK "%%esp" /* stack pointer */
258 * These EFLAGS bits are restored from saved value during emulation, and
259 * any changes are written back to the saved value after emulation.
261 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
263 /* Before executing instruction: restore necessary bits in EFLAGS. */
264 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
265 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
266 "movl %"_sav",%"_LO32 _tmp"; " \
269 "movl %"_msk",%"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",("_STK"); " \
272 "notl %"_LO32 _tmp"; " \
273 "andl %"_LO32 _tmp",("_STK"); " \
274 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
276 "orl %"_LO32 _tmp",("_STK"); " \
280 /* After executing instruction: write-back necessary bits in EFLAGS. */
281 #define _POST_EFLAGS(_sav, _msk, _tmp) \
282 /* _sav |= EFLAGS & _msk; */ \
285 "andl %"_msk",%"_LO32 _tmp"; " \
286 "orl %"_LO32 _tmp",%"_sav"; "
288 /* Raw emulation: instruction has two explicit operands. */
289 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
291 unsigned long _tmp; \
293 switch ((_dst).bytes) { \
295 __asm__ __volatile__ ( \
296 _PRE_EFLAGS("0", "4", "2") \
297 _op"w %"_wx"3,%1; " \
298 _POST_EFLAGS("0", "4", "2") \
299 : "=m" (_eflags), "=m" ((_dst).val), \
301 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
304 __asm__ __volatile__ ( \
305 _PRE_EFLAGS("0", "4", "2") \
306 _op"l %"_lx"3,%1; " \
307 _POST_EFLAGS("0", "4", "2") \
308 : "=m" (_eflags), "=m" ((_dst).val), \
310 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
313 __emulate_2op_8byte(_op, _src, _dst, \
314 _eflags, _qx, _qy); \
319 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
321 unsigned long _tmp; \
322 switch ((_dst).bytes) { \
324 __asm__ __volatile__ ( \
325 _PRE_EFLAGS("0", "4", "2") \
326 _op"b %"_bx"3,%1; " \
327 _POST_EFLAGS("0", "4", "2") \
328 : "=m" (_eflags), "=m" ((_dst).val), \
330 : _by ((_src).val), "i" (EFLAGS_MASK)); \
333 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
334 _wx, _wy, _lx, _ly, _qx, _qy); \
339 /* Source operand is byte-sized and may be restricted to just %cl. */
340 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
341 __emulate_2op(_op, _src, _dst, _eflags, \
342 "b", "c", "b", "c", "b", "c", "b", "c")
344 /* Source operand is byte, word, long or quad sized. */
345 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
346 __emulate_2op(_op, _src, _dst, _eflags, \
347 "b", "q", "w", "r", _LO32, "r", "", "r")
349 /* Source operand is word, long or quad sized. */
350 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
351 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
352 "w", "r", _LO32, "r", "", "r")
354 /* Instruction has only one explicit operand (no source operand). */
355 #define emulate_1op(_op, _dst, _eflags) \
357 unsigned long _tmp; \
359 switch ((_dst).bytes) { \
361 __asm__ __volatile__ ( \
362 _PRE_EFLAGS("0", "3", "2") \
364 _POST_EFLAGS("0", "3", "2") \
365 : "=m" (_eflags), "=m" ((_dst).val), \
367 : "i" (EFLAGS_MASK)); \
370 __asm__ __volatile__ ( \
371 _PRE_EFLAGS("0", "3", "2") \
373 _POST_EFLAGS("0", "3", "2") \
374 : "=m" (_eflags), "=m" ((_dst).val), \
376 : "i" (EFLAGS_MASK)); \
379 __asm__ __volatile__ ( \
380 _PRE_EFLAGS("0", "3", "2") \
382 _POST_EFLAGS("0", "3", "2") \
383 : "=m" (_eflags), "=m" ((_dst).val), \
385 : "i" (EFLAGS_MASK)); \
388 __emulate_1op_8byte(_op, _dst, _eflags); \
393 /* Emulate an instruction with quadword operands (x86/64 only). */
394 #if defined(CONFIG_X86_64)
395 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
397 __asm__ __volatile__ ( \
398 _PRE_EFLAGS("0", "4", "2") \
399 _op"q %"_qx"3,%1; " \
400 _POST_EFLAGS("0", "4", "2") \
401 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
402 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
405 #define __emulate_1op_8byte(_op, _dst, _eflags) \
407 __asm__ __volatile__ ( \
408 _PRE_EFLAGS("0", "3", "2") \
410 _POST_EFLAGS("0", "3", "2") \
411 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
412 : "i" (EFLAGS_MASK)); \
415 #elif defined(__i386__)
416 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
417 #define __emulate_1op_8byte(_op, _dst, _eflags)
418 #endif /* __i386__ */
420 /* Fetch next part of the instruction being emulated. */
421 #define insn_fetch(_type, _size, _eip) \
422 ({ unsigned long _x; \
423 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
430 /* Access/update address held in a register, based on addressing mode. */
431 #define address_mask(reg) \
432 ((c->ad_bytes == sizeof(unsigned long)) ? \
433 (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1)))
434 #define register_address(base, reg) \
435 ((base) + address_mask(reg))
436 #define register_address_increment(reg, inc) \
438 /* signed type ensures sign extension to long */ \
440 if (c->ad_bytes == sizeof(unsigned long)) \
444 ~((1UL << (c->ad_bytes << 3)) - 1)) | \
446 ((1UL << (c->ad_bytes << 3)) - 1)); \
449 #define JMP_REL(rel) \
451 register_address_increment(c->eip, rel); \
454 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
455 struct x86_emulate_ops *ops,
456 unsigned long linear, u8 *dest)
458 struct fetch_cache *fc = &ctxt->decode.fetch;
462 if (linear < fc->start || linear >= fc->end) {
463 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
464 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
468 fc->end = linear + size;
470 *dest = fc->data[linear - fc->start];
474 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
475 struct x86_emulate_ops *ops,
476 unsigned long eip, void *dest, unsigned size)
480 eip += ctxt->cs_base;
482 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
490 * Given the 'reg' portion of a ModRM byte, and a register block, return a
491 * pointer into the block that addresses the relevant register.
492 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
494 static void *decode_register(u8 modrm_reg, unsigned long *regs,
499 p = ®s[modrm_reg];
500 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
501 p = (unsigned char *)®s[modrm_reg & 3] + 1;
505 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
506 struct x86_emulate_ops *ops,
508 u16 *size, unsigned long *address, int op_bytes)
515 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
519 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
524 static int test_cc(unsigned int condition, unsigned int flags)
528 switch ((condition & 15) >> 1) {
530 rc |= (flags & EFLG_OF);
532 case 1: /* b/c/nae */
533 rc |= (flags & EFLG_CF);
536 rc |= (flags & EFLG_ZF);
539 rc |= (flags & (EFLG_CF|EFLG_ZF));
542 rc |= (flags & EFLG_SF);
545 rc |= (flags & EFLG_PF);
548 rc |= (flags & EFLG_ZF);
551 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
555 /* Odd condition identifiers (lsb == 1) have inverted sense. */
556 return (!!rc ^ (condition & 1));
559 static void decode_register_operand(struct operand *op,
560 struct decode_cache *c,
563 unsigned reg = c->modrm_reg;
564 int highbyte_regs = c->rex_prefix == 0;
567 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
569 if ((c->d & ByteOp) && !inhibit_bytereg) {
570 op->ptr = decode_register(reg, c->regs, highbyte_regs);
571 op->val = *(u8 *)op->ptr;
574 op->ptr = decode_register(reg, c->regs, 0);
575 op->bytes = c->op_bytes;
578 op->val = *(u16 *)op->ptr;
581 op->val = *(u32 *)op->ptr;
584 op->val = *(u64 *) op->ptr;
588 op->orig_val = op->val;
591 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
592 struct x86_emulate_ops *ops)
594 struct decode_cache *c = &ctxt->decode;
596 int index_reg = 0, base_reg = 0, scale, rip_relative = 0;
600 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
601 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
602 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
605 c->modrm = insn_fetch(u8, 1, c->eip);
606 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
607 c->modrm_reg |= (c->modrm & 0x38) >> 3;
608 c->modrm_rm |= (c->modrm & 0x07);
612 if (c->modrm_mod == 3) {
613 c->modrm_val = *(unsigned long *)
614 decode_register(c->modrm_rm, c->regs, c->d & ByteOp);
618 if (c->ad_bytes == 2) {
619 unsigned bx = c->regs[VCPU_REGS_RBX];
620 unsigned bp = c->regs[VCPU_REGS_RBP];
621 unsigned si = c->regs[VCPU_REGS_RSI];
622 unsigned di = c->regs[VCPU_REGS_RDI];
624 /* 16-bit ModR/M decode. */
625 switch (c->modrm_mod) {
627 if (c->modrm_rm == 6)
628 c->modrm_ea += insn_fetch(u16, 2, c->eip);
631 c->modrm_ea += insn_fetch(s8, 1, c->eip);
634 c->modrm_ea += insn_fetch(u16, 2, c->eip);
637 switch (c->modrm_rm) {
639 c->modrm_ea += bx + si;
642 c->modrm_ea += bx + di;
645 c->modrm_ea += bp + si;
648 c->modrm_ea += bp + di;
657 if (c->modrm_mod != 0)
664 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
665 (c->modrm_rm == 6 && c->modrm_mod != 0))
666 if (!c->override_base)
667 c->override_base = &ctxt->ss_base;
668 c->modrm_ea = (u16)c->modrm_ea;
670 /* 32/64-bit ModR/M decode. */
671 switch (c->modrm_rm) {
674 sib = insn_fetch(u8, 1, c->eip);
675 index_reg |= (sib >> 3) & 7;
681 if (c->modrm_mod != 0)
682 c->modrm_ea += c->regs[base_reg];
685 insn_fetch(s32, 4, c->eip);
688 c->modrm_ea += c->regs[base_reg];
694 c->modrm_ea += c->regs[index_reg] << scale;
698 if (c->modrm_mod != 0)
699 c->modrm_ea += c->regs[c->modrm_rm];
700 else if (ctxt->mode == X86EMUL_MODE_PROT64)
704 c->modrm_ea += c->regs[c->modrm_rm];
707 switch (c->modrm_mod) {
709 if (c->modrm_rm == 5)
710 c->modrm_ea += insn_fetch(s32, 4, c->eip);
713 c->modrm_ea += insn_fetch(s8, 1, c->eip);
716 c->modrm_ea += insn_fetch(s32, 4, c->eip);
721 c->modrm_ea += c->eip;
722 switch (c->d & SrcMask) {
730 if (c->op_bytes == 8)
733 c->modrm_ea += c->op_bytes;
740 static int decode_abs(struct x86_emulate_ctxt *ctxt,
741 struct x86_emulate_ops *ops)
743 struct decode_cache *c = &ctxt->decode;
746 switch (c->ad_bytes) {
748 c->modrm_ea = insn_fetch(u16, 2, c->eip);
751 c->modrm_ea = insn_fetch(u32, 4, c->eip);
754 c->modrm_ea = insn_fetch(u64, 8, c->eip);
762 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
764 struct decode_cache *c = &ctxt->decode;
766 int mode = ctxt->mode;
767 int def_op_bytes, def_ad_bytes;
769 /* Shadow copy of register state. Committed on successful emulation. */
771 memset(c, 0, sizeof(struct decode_cache));
772 c->eip = ctxt->vcpu->arch.rip;
773 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
776 case X86EMUL_MODE_REAL:
777 case X86EMUL_MODE_PROT16:
778 def_op_bytes = def_ad_bytes = 2;
780 case X86EMUL_MODE_PROT32:
781 def_op_bytes = def_ad_bytes = 4;
784 case X86EMUL_MODE_PROT64:
793 c->op_bytes = def_op_bytes;
794 c->ad_bytes = def_ad_bytes;
796 /* Legacy prefixes. */
798 switch (c->b = insn_fetch(u8, 1, c->eip)) {
799 case 0x66: /* operand-size override */
800 /* switch between 2/4 bytes */
801 c->op_bytes = def_op_bytes ^ 6;
803 case 0x67: /* address-size override */
804 if (mode == X86EMUL_MODE_PROT64)
805 /* switch between 4/8 bytes */
806 c->ad_bytes = def_ad_bytes ^ 12;
808 /* switch between 2/4 bytes */
809 c->ad_bytes = def_ad_bytes ^ 6;
811 case 0x2e: /* CS override */
812 c->override_base = &ctxt->cs_base;
814 case 0x3e: /* DS override */
815 c->override_base = &ctxt->ds_base;
817 case 0x26: /* ES override */
818 c->override_base = &ctxt->es_base;
820 case 0x64: /* FS override */
821 c->override_base = &ctxt->fs_base;
823 case 0x65: /* GS override */
824 c->override_base = &ctxt->gs_base;
826 case 0x36: /* SS override */
827 c->override_base = &ctxt->ss_base;
829 case 0x40 ... 0x4f: /* REX */
830 if (mode != X86EMUL_MODE_PROT64)
832 c->rex_prefix = c->b;
834 case 0xf0: /* LOCK */
837 case 0xf2: /* REPNE/REPNZ */
838 c->rep_prefix = REPNE_PREFIX;
840 case 0xf3: /* REP/REPE/REPZ */
841 c->rep_prefix = REPE_PREFIX;
847 /* Any legacy prefix after a REX prefix nullifies its effect. */
856 if (c->rex_prefix & 8)
857 c->op_bytes = 8; /* REX.W */
859 /* Opcode byte(s). */
860 c->d = opcode_table[c->b];
862 /* Two-byte opcode? */
865 c->b = insn_fetch(u8, 1, c->eip);
866 c->d = twobyte_table[c->b];
871 DPRINTF("Cannot emulate %02x\n", c->b);
876 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
879 /* ModRM and SIB bytes. */
881 rc = decode_modrm(ctxt, ops);
882 else if (c->d & MemAbs)
883 rc = decode_abs(ctxt, ops);
887 if (!c->override_base)
888 c->override_base = &ctxt->ds_base;
889 if (mode == X86EMUL_MODE_PROT64 &&
890 c->override_base != &ctxt->fs_base &&
891 c->override_base != &ctxt->gs_base)
892 c->override_base = NULL;
894 if (c->override_base)
895 c->modrm_ea += *c->override_base;
897 if (c->ad_bytes != 8)
898 c->modrm_ea = (u32)c->modrm_ea;
900 * Decode and fetch the source operand: register, memory
903 switch (c->d & SrcMask) {
907 decode_register_operand(&c->src, c, 0);
916 c->src.bytes = (c->d & ByteOp) ? 1 :
918 /* Don't fetch the address for invlpg: it could be unmapped. */
919 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
923 * For instructions with a ModR/M byte, switch to register
926 if ((c->d & ModRM) && c->modrm_mod == 3) {
927 c->src.type = OP_REG;
930 c->src.type = OP_MEM;
933 c->src.type = OP_IMM;
934 c->src.ptr = (unsigned long *)c->eip;
935 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
936 if (c->src.bytes == 8)
938 /* NB. Immediates are sign-extended as necessary. */
939 switch (c->src.bytes) {
941 c->src.val = insn_fetch(s8, 1, c->eip);
944 c->src.val = insn_fetch(s16, 2, c->eip);
947 c->src.val = insn_fetch(s32, 4, c->eip);
952 c->src.type = OP_IMM;
953 c->src.ptr = (unsigned long *)c->eip;
955 c->src.val = insn_fetch(s8, 1, c->eip);
959 /* Decode and fetch the destination operand: register or memory. */
960 switch (c->d & DstMask) {
962 /* Special instructions do their own operand decoding. */
965 decode_register_operand(&c->dst, c,
966 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
969 if ((c->d & ModRM) && c->modrm_mod == 3) {
970 c->dst.type = OP_REG;
973 c->dst.type = OP_MEM;
978 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
981 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
983 struct decode_cache *c = &ctxt->decode;
985 c->dst.type = OP_MEM;
986 c->dst.bytes = c->op_bytes;
987 c->dst.val = c->src.val;
988 register_address_increment(c->regs[VCPU_REGS_RSP], -c->op_bytes);
989 c->dst.ptr = (void *) register_address(ctxt->ss_base,
990 c->regs[VCPU_REGS_RSP]);
993 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
994 struct x86_emulate_ops *ops)
996 struct decode_cache *c = &ctxt->decode;
999 rc = ops->read_std(register_address(ctxt->ss_base,
1000 c->regs[VCPU_REGS_RSP]),
1001 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1005 register_address_increment(c->regs[VCPU_REGS_RSP], c->dst.bytes);
1010 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1012 struct decode_cache *c = &ctxt->decode;
1013 switch (c->modrm_reg) {
1015 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1018 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1021 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1024 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1026 case 4: /* sal/shl */
1027 case 6: /* sal/shl */
1028 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1031 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1034 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1039 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1040 struct x86_emulate_ops *ops)
1042 struct decode_cache *c = &ctxt->decode;
1045 switch (c->modrm_reg) {
1046 case 0 ... 1: /* test */
1048 * Special case in Grp3: test has an immediate
1051 c->src.type = OP_IMM;
1052 c->src.ptr = (unsigned long *)c->eip;
1053 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1054 if (c->src.bytes == 8)
1056 switch (c->src.bytes) {
1058 c->src.val = insn_fetch(s8, 1, c->eip);
1061 c->src.val = insn_fetch(s16, 2, c->eip);
1064 c->src.val = insn_fetch(s32, 4, c->eip);
1067 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1070 c->dst.val = ~c->dst.val;
1073 emulate_1op("neg", c->dst, ctxt->eflags);
1076 DPRINTF("Cannot emulate %02x\n", c->b);
1077 rc = X86EMUL_UNHANDLEABLE;
1084 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1085 struct x86_emulate_ops *ops)
1087 struct decode_cache *c = &ctxt->decode;
1090 switch (c->modrm_reg) {
1092 emulate_1op("inc", c->dst, ctxt->eflags);
1095 emulate_1op("dec", c->dst, ctxt->eflags);
1097 case 4: /* jmp abs */
1099 c->eip = c->dst.val;
1101 DPRINTF("Cannot emulate %02x\n", c->b);
1102 return X86EMUL_UNHANDLEABLE;
1107 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1109 if (ctxt->mode == X86EMUL_MODE_PROT64) {
1111 rc = ops->read_std((unsigned long)c->dst.ptr,
1112 &c->dst.val, 8, ctxt->vcpu);
1116 register_address_increment(c->regs[VCPU_REGS_RSP],
1118 rc = ops->write_emulated(register_address(ctxt->ss_base,
1119 c->regs[VCPU_REGS_RSP]), &c->dst.val,
1120 c->dst.bytes, ctxt->vcpu);
1123 c->dst.type = OP_NONE;
1126 DPRINTF("Cannot emulate %02x\n", c->b);
1127 return X86EMUL_UNHANDLEABLE;
1132 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1133 struct x86_emulate_ops *ops,
1134 unsigned long memop)
1136 struct decode_cache *c = &ctxt->decode;
1140 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1144 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1145 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1147 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1148 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1149 ctxt->eflags &= ~EFLG_ZF;
1152 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1153 (u32) c->regs[VCPU_REGS_RBX];
1155 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1158 ctxt->eflags |= EFLG_ZF;
1163 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1164 struct x86_emulate_ops *ops)
1167 struct decode_cache *c = &ctxt->decode;
1169 switch (c->dst.type) {
1171 /* The 4-byte case *is* correct:
1172 * in 64-bit mode we zero-extend.
1174 switch (c->dst.bytes) {
1176 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1179 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1182 *c->dst.ptr = (u32)c->dst.val;
1183 break; /* 64b: zero-ext */
1185 *c->dst.ptr = c->dst.val;
1191 rc = ops->cmpxchg_emulated(
1192 (unsigned long)c->dst.ptr,
1198 rc = ops->write_emulated(
1199 (unsigned long)c->dst.ptr,
1216 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1218 unsigned long memop = 0;
1220 unsigned long saved_eip = 0;
1221 struct decode_cache *c = &ctxt->decode;
1224 /* Shadow copy of register state. Committed on successful emulation.
1225 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1229 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1232 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1233 memop = c->modrm_ea;
1235 if (c->rep_prefix && (c->d & String)) {
1236 /* All REP prefixes have the same first termination condition */
1237 if (c->regs[VCPU_REGS_RCX] == 0) {
1238 ctxt->vcpu->arch.rip = c->eip;
1241 /* The second termination condition only applies for REPE
1242 * and REPNE. Test if the repeat string operation prefix is
1243 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1244 * corresponding termination condition according to:
1245 * - if REPE/REPZ and ZF = 0 then done
1246 * - if REPNE/REPNZ and ZF = 1 then done
1248 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1249 (c->b == 0xae) || (c->b == 0xaf)) {
1250 if ((c->rep_prefix == REPE_PREFIX) &&
1251 ((ctxt->eflags & EFLG_ZF) == 0)) {
1252 ctxt->vcpu->arch.rip = c->eip;
1255 if ((c->rep_prefix == REPNE_PREFIX) &&
1256 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1257 ctxt->vcpu->arch.rip = c->eip;
1261 c->regs[VCPU_REGS_RCX]--;
1262 c->eip = ctxt->vcpu->arch.rip;
1265 if (c->src.type == OP_MEM) {
1266 c->src.ptr = (unsigned long *)memop;
1268 rc = ops->read_emulated((unsigned long)c->src.ptr,
1274 c->src.orig_val = c->src.val;
1277 if ((c->d & DstMask) == ImplicitOps)
1281 if (c->dst.type == OP_MEM) {
1282 c->dst.ptr = (unsigned long *)memop;
1283 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1286 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1288 c->dst.ptr = (void *)c->dst.ptr +
1289 (c->src.val & mask) / 8;
1291 if (!(c->d & Mov) &&
1292 /* optimisation - avoid slow emulated read */
1293 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1295 c->dst.bytes, ctxt->vcpu)) != 0))
1298 c->dst.orig_val = c->dst.val;
1308 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1312 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1316 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1320 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1324 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1326 case 0x24: /* and al imm8 */
1327 c->dst.type = OP_REG;
1328 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1329 c->dst.val = *(u8 *)c->dst.ptr;
1331 c->dst.orig_val = c->dst.val;
1333 case 0x25: /* and ax imm16, or eax imm32 */
1334 c->dst.type = OP_REG;
1335 c->dst.bytes = c->op_bytes;
1336 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1337 if (c->op_bytes == 2)
1338 c->dst.val = *(u16 *)c->dst.ptr;
1340 c->dst.val = *(u32 *)c->dst.ptr;
1341 c->dst.orig_val = c->dst.val;
1345 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1349 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1353 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1355 case 0x40 ... 0x47: /* inc r16/r32 */
1356 emulate_1op("inc", c->dst, ctxt->eflags);
1358 case 0x48 ... 0x4f: /* dec r16/r32 */
1359 emulate_1op("dec", c->dst, ctxt->eflags);
1361 case 0x50 ... 0x57: /* push reg */
1362 c->dst.type = OP_MEM;
1363 c->dst.bytes = c->op_bytes;
1364 c->dst.val = c->src.val;
1365 register_address_increment(c->regs[VCPU_REGS_RSP],
1367 c->dst.ptr = (void *) register_address(
1368 ctxt->ss_base, c->regs[VCPU_REGS_RSP]);
1370 case 0x58 ... 0x5f: /* pop reg */
1372 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1373 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1374 c->op_bytes, ctxt->vcpu)) != 0)
1377 register_address_increment(c->regs[VCPU_REGS_RSP],
1379 c->dst.type = OP_NONE; /* Disable writeback. */
1381 case 0x63: /* movsxd */
1382 if (ctxt->mode != X86EMUL_MODE_PROT64)
1383 goto cannot_emulate;
1384 c->dst.val = (s32) c->src.val;
1386 case 0x6a: /* push imm8 */
1388 c->src.val = insn_fetch(s8, 1, c->eip);
1391 case 0x6c: /* insb */
1392 case 0x6d: /* insw/insd */
1393 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1395 (c->d & ByteOp) ? 1 : c->op_bytes,
1397 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1398 (ctxt->eflags & EFLG_DF),
1399 register_address(ctxt->es_base,
1400 c->regs[VCPU_REGS_RDI]),
1402 c->regs[VCPU_REGS_RDX]) == 0) {
1407 case 0x6e: /* outsb */
1408 case 0x6f: /* outsw/outsd */
1409 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1411 (c->d & ByteOp) ? 1 : c->op_bytes,
1413 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1414 (ctxt->eflags & EFLG_DF),
1415 register_address(c->override_base ?
1418 c->regs[VCPU_REGS_RSI]),
1420 c->regs[VCPU_REGS_RDX]) == 0) {
1425 case 0x70 ... 0x7f: /* jcc (short) */ {
1426 int rel = insn_fetch(s8, 1, c->eip);
1428 if (test_cc(c->b, ctxt->eflags))
1432 case 0x80 ... 0x83: /* Grp1 */
1433 switch (c->modrm_reg) {
1453 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1455 case 0x86 ... 0x87: /* xchg */
1456 /* Write back the register source. */
1457 switch (c->dst.bytes) {
1459 *(u8 *) c->src.ptr = (u8) c->dst.val;
1462 *(u16 *) c->src.ptr = (u16) c->dst.val;
1465 *c->src.ptr = (u32) c->dst.val;
1466 break; /* 64b reg: zero-extend */
1468 *c->src.ptr = c->dst.val;
1472 * Write back the memory destination with implicit LOCK
1475 c->dst.val = c->src.val;
1478 case 0x88 ... 0x8b: /* mov */
1480 case 0x8d: /* lea r16/r32, m */
1481 c->dst.val = c->modrm_val;
1483 case 0x8f: /* pop (sole member of Grp1a) */
1484 rc = emulate_grp1a(ctxt, ops);
1488 case 0x9c: /* pushf */
1489 c->src.val = (unsigned long) ctxt->eflags;
1492 case 0x9d: /* popf */
1493 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1494 goto pop_instruction;
1495 case 0xa0 ... 0xa1: /* mov */
1496 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1497 c->dst.val = c->src.val;
1499 case 0xa2 ... 0xa3: /* mov */
1500 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1502 case 0xa4 ... 0xa5: /* movs */
1503 c->dst.type = OP_MEM;
1504 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1505 c->dst.ptr = (unsigned long *)register_address(
1507 c->regs[VCPU_REGS_RDI]);
1508 if ((rc = ops->read_emulated(register_address(
1509 c->override_base ? *c->override_base :
1511 c->regs[VCPU_REGS_RSI]),
1513 c->dst.bytes, ctxt->vcpu)) != 0)
1515 register_address_increment(c->regs[VCPU_REGS_RSI],
1516 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1518 register_address_increment(c->regs[VCPU_REGS_RDI],
1519 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1522 case 0xa6 ... 0xa7: /* cmps */
1523 c->src.type = OP_NONE; /* Disable writeback. */
1524 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1525 c->src.ptr = (unsigned long *)register_address(
1526 c->override_base ? *c->override_base :
1528 c->regs[VCPU_REGS_RSI]);
1529 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1535 c->dst.type = OP_NONE; /* Disable writeback. */
1536 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1537 c->dst.ptr = (unsigned long *)register_address(
1539 c->regs[VCPU_REGS_RDI]);
1540 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1546 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1548 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1550 register_address_increment(c->regs[VCPU_REGS_RSI],
1551 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1553 register_address_increment(c->regs[VCPU_REGS_RDI],
1554 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1558 case 0xaa ... 0xab: /* stos */
1559 c->dst.type = OP_MEM;
1560 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1561 c->dst.ptr = (unsigned long *)register_address(
1563 c->regs[VCPU_REGS_RDI]);
1564 c->dst.val = c->regs[VCPU_REGS_RAX];
1565 register_address_increment(c->regs[VCPU_REGS_RDI],
1566 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1569 case 0xac ... 0xad: /* lods */
1570 c->dst.type = OP_REG;
1571 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1572 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1573 if ((rc = ops->read_emulated(register_address(
1574 c->override_base ? *c->override_base :
1576 c->regs[VCPU_REGS_RSI]),
1581 register_address_increment(c->regs[VCPU_REGS_RSI],
1582 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1585 case 0xae ... 0xaf: /* scas */
1586 DPRINTF("Urk! I don't handle SCAS.\n");
1587 goto cannot_emulate;
1591 case 0xc3: /* ret */
1592 c->dst.ptr = &c->eip;
1593 goto pop_instruction;
1594 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1596 c->dst.val = c->src.val;
1598 case 0xd0 ... 0xd1: /* Grp2 */
1602 case 0xd2 ... 0xd3: /* Grp2 */
1603 c->src.val = c->regs[VCPU_REGS_RCX];
1606 case 0xe8: /* call (near) */ {
1608 switch (c->op_bytes) {
1610 rel = insn_fetch(s16, 2, c->eip);
1613 rel = insn_fetch(s32, 4, c->eip);
1616 DPRINTF("Call: Invalid op_bytes\n");
1617 goto cannot_emulate;
1619 c->src.val = (unsigned long) c->eip;
1621 c->op_bytes = c->ad_bytes;
1625 case 0xe9: /* jmp rel */
1626 case 0xeb: /* jmp rel short */
1627 JMP_REL(c->src.val);
1628 c->dst.type = OP_NONE; /* Disable writeback. */
1630 case 0xf4: /* hlt */
1631 ctxt->vcpu->arch.halt_request = 1;
1633 case 0xf5: /* cmc */
1634 /* complement carry flag from eflags reg */
1635 ctxt->eflags ^= EFLG_CF;
1636 c->dst.type = OP_NONE; /* Disable writeback. */
1638 case 0xf6 ... 0xf7: /* Grp3 */
1639 rc = emulate_grp3(ctxt, ops);
1643 case 0xf8: /* clc */
1644 ctxt->eflags &= ~EFLG_CF;
1645 c->dst.type = OP_NONE; /* Disable writeback. */
1647 case 0xfa: /* cli */
1648 ctxt->eflags &= ~X86_EFLAGS_IF;
1649 c->dst.type = OP_NONE; /* Disable writeback. */
1651 case 0xfb: /* sti */
1652 ctxt->eflags |= X86_EFLAGS_IF;
1653 c->dst.type = OP_NONE; /* Disable writeback. */
1655 case 0xfe ... 0xff: /* Grp4/Grp5 */
1656 rc = emulate_grp45(ctxt, ops);
1663 rc = writeback(ctxt, ops);
1667 /* Commit shadow register state. */
1668 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1669 ctxt->vcpu->arch.rip = c->eip;
1672 if (rc == X86EMUL_UNHANDLEABLE) {
1680 case 0x01: /* lgdt, lidt, lmsw */
1681 switch (c->modrm_reg) {
1683 unsigned long address;
1685 case 0: /* vmcall */
1686 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1687 goto cannot_emulate;
1689 rc = kvm_fix_hypercall(ctxt->vcpu);
1693 kvm_emulate_hypercall(ctxt->vcpu);
1696 rc = read_descriptor(ctxt, ops, c->src.ptr,
1697 &size, &address, c->op_bytes);
1700 realmode_lgdt(ctxt->vcpu, size, address);
1702 case 3: /* lidt/vmmcall */
1703 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1704 rc = kvm_fix_hypercall(ctxt->vcpu);
1707 kvm_emulate_hypercall(ctxt->vcpu);
1709 rc = read_descriptor(ctxt, ops, c->src.ptr,
1714 realmode_lidt(ctxt->vcpu, size, address);
1718 if (c->modrm_mod != 3)
1719 goto cannot_emulate;
1720 *(u16 *)&c->regs[c->modrm_rm]
1721 = realmode_get_cr(ctxt->vcpu, 0);
1724 if (c->modrm_mod != 3)
1725 goto cannot_emulate;
1726 realmode_lmsw(ctxt->vcpu, (u16)c->modrm_val,
1730 emulate_invlpg(ctxt->vcpu, memop);
1733 goto cannot_emulate;
1735 /* Disable writeback. */
1736 c->dst.type = OP_NONE;
1739 emulate_clts(ctxt->vcpu);
1740 c->dst.type = OP_NONE;
1742 case 0x08: /* invd */
1743 case 0x09: /* wbinvd */
1744 case 0x0d: /* GrpP (prefetch) */
1745 case 0x18: /* Grp16 (prefetch/nop) */
1746 c->dst.type = OP_NONE;
1748 case 0x20: /* mov cr, reg */
1749 if (c->modrm_mod != 3)
1750 goto cannot_emulate;
1751 c->regs[c->modrm_rm] =
1752 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1753 c->dst.type = OP_NONE; /* no writeback */
1755 case 0x21: /* mov from dr to reg */
1756 if (c->modrm_mod != 3)
1757 goto cannot_emulate;
1758 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1760 goto cannot_emulate;
1761 c->dst.type = OP_NONE; /* no writeback */
1763 case 0x22: /* mov reg, cr */
1764 if (c->modrm_mod != 3)
1765 goto cannot_emulate;
1766 realmode_set_cr(ctxt->vcpu,
1767 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1768 c->dst.type = OP_NONE;
1770 case 0x23: /* mov from reg to dr */
1771 if (c->modrm_mod != 3)
1772 goto cannot_emulate;
1773 rc = emulator_set_dr(ctxt, c->modrm_reg,
1774 c->regs[c->modrm_rm]);
1776 goto cannot_emulate;
1777 c->dst.type = OP_NONE; /* no writeback */
1781 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1782 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1783 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1785 kvm_inject_gp(ctxt->vcpu, 0);
1786 c->eip = ctxt->vcpu->arch.rip;
1788 rc = X86EMUL_CONTINUE;
1789 c->dst.type = OP_NONE;
1793 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1795 kvm_inject_gp(ctxt->vcpu, 0);
1796 c->eip = ctxt->vcpu->arch.rip;
1798 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1799 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1801 rc = X86EMUL_CONTINUE;
1802 c->dst.type = OP_NONE;
1804 case 0x40 ... 0x4f: /* cmov */
1805 c->dst.val = c->dst.orig_val = c->src.val;
1806 if (!test_cc(c->b, ctxt->eflags))
1807 c->dst.type = OP_NONE; /* no writeback */
1809 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1812 switch (c->op_bytes) {
1814 rel = insn_fetch(s16, 2, c->eip);
1817 rel = insn_fetch(s32, 4, c->eip);
1820 rel = insn_fetch(s64, 8, c->eip);
1823 DPRINTF("jnz: Invalid op_bytes\n");
1824 goto cannot_emulate;
1826 if (test_cc(c->b, ctxt->eflags))
1828 c->dst.type = OP_NONE;
1833 c->dst.type = OP_NONE;
1834 /* only subword offset */
1835 c->src.val &= (c->dst.bytes << 3) - 1;
1836 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1840 /* only subword offset */
1841 c->src.val &= (c->dst.bytes << 3) - 1;
1842 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1844 case 0xb0 ... 0xb1: /* cmpxchg */
1846 * Save real source value, then compare EAX against
1849 c->src.orig_val = c->src.val;
1850 c->src.val = c->regs[VCPU_REGS_RAX];
1851 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1852 if (ctxt->eflags & EFLG_ZF) {
1853 /* Success: write back to memory. */
1854 c->dst.val = c->src.orig_val;
1856 /* Failure: write the value we saw to EAX. */
1857 c->dst.type = OP_REG;
1858 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1863 /* only subword offset */
1864 c->src.val &= (c->dst.bytes << 3) - 1;
1865 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1867 case 0xb6 ... 0xb7: /* movzx */
1868 c->dst.bytes = c->op_bytes;
1869 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1872 case 0xba: /* Grp8 */
1873 switch (c->modrm_reg & 3) {
1886 /* only subword offset */
1887 c->src.val &= (c->dst.bytes << 3) - 1;
1888 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
1890 case 0xbe ... 0xbf: /* movsx */
1891 c->dst.bytes = c->op_bytes;
1892 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
1895 case 0xc3: /* movnti */
1896 c->dst.bytes = c->op_bytes;
1897 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
1900 case 0xc7: /* Grp9 (cmpxchg8b) */
1901 rc = emulate_grp9(ctxt, ops, memop);
1904 c->dst.type = OP_NONE;
1910 DPRINTF("Cannot emulate %02x\n", c->b);