KVM: Call x86_decode_insn() only when needed
[deliverable/linux.git] / drivers / kvm / x86_emulate.c
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
10 *
11 * Copyright (C) 2006 Qumranet
12 *
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
15 *
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
18 *
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
20 */
21
22 #ifndef __KERNEL__
23 #include <stdio.h>
24 #include <stdint.h>
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
27 #else
28 #include "kvm.h"
29 #define DPRINTF(x...) do {} while (0)
30 #endif
31 #include "x86_emulate.h"
32 #include <linux/module.h>
33
34 /*
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
40 * not be handled.
41 */
42
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
61 #define ModRM (1<<6)
62 /* Destination is only written; never read. */
63 #define Mov (1<<7)
64 #define BitOp (1<<8)
65
66 static u8 opcode_table[256] = {
67 /* 0x00 - 0x07 */
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
70 0, 0, 0, 0,
71 /* 0x08 - 0x0F */
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
74 0, 0, 0, 0,
75 /* 0x10 - 0x17 */
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
78 0, 0, 0, 0,
79 /* 0x18 - 0x1F */
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
82 0, 0, 0, 0,
83 /* 0x20 - 0x27 */
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 SrcImmByte, SrcImm, 0, 0,
87 /* 0x28 - 0x2F */
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 0, 0, 0, 0,
91 /* 0x30 - 0x37 */
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
94 0, 0, 0, 0,
95 /* 0x38 - 0x3F */
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
98 0, 0, 0, 0,
99 /* 0x40 - 0x4F */
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
101 /* 0x50 - 0x57 */
102 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
103 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
104 /* 0x58 - 0x5F */
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
106 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 /* 0x60 - 0x67 */
108 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
109 0, 0, 0, 0,
110 /* 0x68 - 0x6F */
111 0, 0, ImplicitOps|Mov, 0,
112 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
113 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
114 /* 0x70 - 0x77 */
115 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
116 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
117 /* 0x78 - 0x7F */
118 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
119 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
120 /* 0x80 - 0x87 */
121 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
122 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
123 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
124 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
125 /* 0x88 - 0x8F */
126 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
127 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
128 0, ModRM | DstReg, 0, DstMem | SrcNone | ModRM | Mov,
129 /* 0x90 - 0x9F */
130 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps, ImplicitOps, 0, 0,
131 /* 0xA0 - 0xA7 */
132 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
133 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
134 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
135 ByteOp | ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
138 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
139 ByteOp | ImplicitOps, ImplicitOps,
140 /* 0xB0 - 0xBF */
141 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
142 /* 0xC0 - 0xC7 */
143 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
144 0, ImplicitOps, 0, 0,
145 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
146 /* 0xC8 - 0xCF */
147 0, 0, 0, 0, 0, 0, 0, 0,
148 /* 0xD0 - 0xD7 */
149 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
150 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
151 0, 0, 0, 0,
152 /* 0xD8 - 0xDF */
153 0, 0, 0, 0, 0, 0, 0, 0,
154 /* 0xE0 - 0xE7 */
155 0, 0, 0, 0, 0, 0, 0, 0,
156 /* 0xE8 - 0xEF */
157 ImplicitOps, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps, 0, 0, 0, 0,
158 /* 0xF0 - 0xF7 */
159 0, 0, 0, 0,
160 ImplicitOps, 0,
161 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
162 /* 0xF8 - 0xFF */
163 0, 0, 0, 0,
164 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
165 };
166
167 static u16 twobyte_table[256] = {
168 /* 0x00 - 0x0F */
169 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
170 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
171 /* 0x10 - 0x1F */
172 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
173 /* 0x20 - 0x2F */
174 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
175 0, 0, 0, 0, 0, 0, 0, 0,
176 /* 0x30 - 0x3F */
177 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
178 /* 0x40 - 0x47 */
179 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
180 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
181 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
182 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
183 /* 0x48 - 0x4F */
184 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
185 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
186 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
187 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
188 /* 0x50 - 0x5F */
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
190 /* 0x60 - 0x6F */
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
192 /* 0x70 - 0x7F */
193 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
194 /* 0x80 - 0x8F */
195 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
196 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
197 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
198 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
199 /* 0x90 - 0x9F */
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
201 /* 0xA0 - 0xA7 */
202 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
203 /* 0xA8 - 0xAF */
204 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
205 /* 0xB0 - 0xB7 */
206 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
207 DstMem | SrcReg | ModRM | BitOp,
208 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
209 DstReg | SrcMem16 | ModRM | Mov,
210 /* 0xB8 - 0xBF */
211 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
212 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
213 DstReg | SrcMem16 | ModRM | Mov,
214 /* 0xC0 - 0xCF */
215 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
216 0, 0, 0, 0, 0, 0, 0, 0,
217 /* 0xD0 - 0xDF */
218 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
219 /* 0xE0 - 0xEF */
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
221 /* 0xF0 - 0xFF */
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
223 };
224
225 /* EFLAGS bit definitions. */
226 #define EFLG_OF (1<<11)
227 #define EFLG_DF (1<<10)
228 #define EFLG_SF (1<<7)
229 #define EFLG_ZF (1<<6)
230 #define EFLG_AF (1<<4)
231 #define EFLG_PF (1<<2)
232 #define EFLG_CF (1<<0)
233
234 /*
235 * Instruction emulation:
236 * Most instructions are emulated directly via a fragment of inline assembly
237 * code. This allows us to save/restore EFLAGS and thus very easily pick up
238 * any modified flags.
239 */
240
241 #if defined(CONFIG_X86_64)
242 #define _LO32 "k" /* force 32-bit operand */
243 #define _STK "%%rsp" /* stack pointer */
244 #elif defined(__i386__)
245 #define _LO32 "" /* force 32-bit operand */
246 #define _STK "%%esp" /* stack pointer */
247 #endif
248
249 /*
250 * These EFLAGS bits are restored from saved value during emulation, and
251 * any changes are written back to the saved value after emulation.
252 */
253 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
254
255 /* Before executing instruction: restore necessary bits in EFLAGS. */
256 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
257 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
258 "push %"_sav"; " \
259 "movl %"_msk",%"_LO32 _tmp"; " \
260 "andl %"_LO32 _tmp",("_STK"); " \
261 "pushf; " \
262 "notl %"_LO32 _tmp"; " \
263 "andl %"_LO32 _tmp",("_STK"); " \
264 "pop %"_tmp"; " \
265 "orl %"_LO32 _tmp",("_STK"); " \
266 "popf; " \
267 /* _sav &= ~msk; */ \
268 "movl %"_msk",%"_LO32 _tmp"; " \
269 "notl %"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",%"_sav"; "
271
272 /* After executing instruction: write-back necessary bits in EFLAGS. */
273 #define _POST_EFLAGS(_sav, _msk, _tmp) \
274 /* _sav |= EFLAGS & _msk; */ \
275 "pushf; " \
276 "pop %"_tmp"; " \
277 "andl %"_msk",%"_LO32 _tmp"; " \
278 "orl %"_LO32 _tmp",%"_sav"; "
279
280 /* Raw emulation: instruction has two explicit operands. */
281 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
282 do { \
283 unsigned long _tmp; \
284 \
285 switch ((_dst).bytes) { \
286 case 2: \
287 __asm__ __volatile__ ( \
288 _PRE_EFLAGS("0","4","2") \
289 _op"w %"_wx"3,%1; " \
290 _POST_EFLAGS("0","4","2") \
291 : "=m" (_eflags), "=m" ((_dst).val), \
292 "=&r" (_tmp) \
293 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
294 break; \
295 case 4: \
296 __asm__ __volatile__ ( \
297 _PRE_EFLAGS("0","4","2") \
298 _op"l %"_lx"3,%1; " \
299 _POST_EFLAGS("0","4","2") \
300 : "=m" (_eflags), "=m" ((_dst).val), \
301 "=&r" (_tmp) \
302 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
303 break; \
304 case 8: \
305 __emulate_2op_8byte(_op, _src, _dst, \
306 _eflags, _qx, _qy); \
307 break; \
308 } \
309 } while (0)
310
311 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
312 do { \
313 unsigned long _tmp; \
314 switch ( (_dst).bytes ) \
315 { \
316 case 1: \
317 __asm__ __volatile__ ( \
318 _PRE_EFLAGS("0","4","2") \
319 _op"b %"_bx"3,%1; " \
320 _POST_EFLAGS("0","4","2") \
321 : "=m" (_eflags), "=m" ((_dst).val), \
322 "=&r" (_tmp) \
323 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
324 break; \
325 default: \
326 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
327 _wx, _wy, _lx, _ly, _qx, _qy); \
328 break; \
329 } \
330 } while (0)
331
332 /* Source operand is byte-sized and may be restricted to just %cl. */
333 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
334 __emulate_2op(_op, _src, _dst, _eflags, \
335 "b", "c", "b", "c", "b", "c", "b", "c")
336
337 /* Source operand is byte, word, long or quad sized. */
338 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
339 __emulate_2op(_op, _src, _dst, _eflags, \
340 "b", "q", "w", "r", _LO32, "r", "", "r")
341
342 /* Source operand is word, long or quad sized. */
343 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
344 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
345 "w", "r", _LO32, "r", "", "r")
346
347 /* Instruction has only one explicit operand (no source operand). */
348 #define emulate_1op(_op, _dst, _eflags) \
349 do { \
350 unsigned long _tmp; \
351 \
352 switch ( (_dst).bytes ) \
353 { \
354 case 1: \
355 __asm__ __volatile__ ( \
356 _PRE_EFLAGS("0","3","2") \
357 _op"b %1; " \
358 _POST_EFLAGS("0","3","2") \
359 : "=m" (_eflags), "=m" ((_dst).val), \
360 "=&r" (_tmp) \
361 : "i" (EFLAGS_MASK) ); \
362 break; \
363 case 2: \
364 __asm__ __volatile__ ( \
365 _PRE_EFLAGS("0","3","2") \
366 _op"w %1; " \
367 _POST_EFLAGS("0","3","2") \
368 : "=m" (_eflags), "=m" ((_dst).val), \
369 "=&r" (_tmp) \
370 : "i" (EFLAGS_MASK) ); \
371 break; \
372 case 4: \
373 __asm__ __volatile__ ( \
374 _PRE_EFLAGS("0","3","2") \
375 _op"l %1; " \
376 _POST_EFLAGS("0","3","2") \
377 : "=m" (_eflags), "=m" ((_dst).val), \
378 "=&r" (_tmp) \
379 : "i" (EFLAGS_MASK) ); \
380 break; \
381 case 8: \
382 __emulate_1op_8byte(_op, _dst, _eflags); \
383 break; \
384 } \
385 } while (0)
386
387 /* Emulate an instruction with quadword operands (x86/64 only). */
388 #if defined(CONFIG_X86_64)
389 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
390 do { \
391 __asm__ __volatile__ ( \
392 _PRE_EFLAGS("0","4","2") \
393 _op"q %"_qx"3,%1; " \
394 _POST_EFLAGS("0","4","2") \
395 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
396 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
397 } while (0)
398
399 #define __emulate_1op_8byte(_op, _dst, _eflags) \
400 do { \
401 __asm__ __volatile__ ( \
402 _PRE_EFLAGS("0","3","2") \
403 _op"q %1; " \
404 _POST_EFLAGS("0","3","2") \
405 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
406 : "i" (EFLAGS_MASK) ); \
407 } while (0)
408
409 #elif defined(__i386__)
410 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
411 #define __emulate_1op_8byte(_op, _dst, _eflags)
412 #endif /* __i386__ */
413
414 /* Fetch next part of the instruction being emulated. */
415 #define insn_fetch(_type, _size, _eip) \
416 ({ unsigned long _x; \
417 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
418 (_size), ctxt->vcpu); \
419 if ( rc != 0 ) \
420 goto done; \
421 (_eip) += (_size); \
422 (_type)_x; \
423 })
424
425 /* Access/update address held in a register, based on addressing mode. */
426 #define address_mask(reg) \
427 ((c->ad_bytes == sizeof(unsigned long)) ? \
428 (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1)))
429 #define register_address(base, reg) \
430 ((base) + address_mask(reg))
431 #define register_address_increment(reg, inc) \
432 do { \
433 /* signed type ensures sign extension to long */ \
434 int _inc = (inc); \
435 if (c->ad_bytes == sizeof(unsigned long)) \
436 (reg) += _inc; \
437 else \
438 (reg) = ((reg) & \
439 ~((1UL << (c->ad_bytes << 3)) - 1)) | \
440 (((reg) + _inc) & \
441 ((1UL << (c->ad_bytes << 3)) - 1)); \
442 } while (0)
443
444 #define JMP_REL(rel) \
445 do { \
446 register_address_increment(c->eip, rel); \
447 } while (0)
448
449 /*
450 * Given the 'reg' portion of a ModRM byte, and a register block, return a
451 * pointer into the block that addresses the relevant register.
452 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
453 */
454 static void *decode_register(u8 modrm_reg, unsigned long *regs,
455 int highbyte_regs)
456 {
457 void *p;
458
459 p = &regs[modrm_reg];
460 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
461 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
462 return p;
463 }
464
465 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
466 struct x86_emulate_ops *ops,
467 void *ptr,
468 u16 *size, unsigned long *address, int op_bytes)
469 {
470 int rc;
471
472 if (op_bytes == 2)
473 op_bytes = 3;
474 *address = 0;
475 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
476 ctxt->vcpu);
477 if (rc)
478 return rc;
479 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
480 ctxt->vcpu);
481 return rc;
482 }
483
484 static int test_cc(unsigned int condition, unsigned int flags)
485 {
486 int rc = 0;
487
488 switch ((condition & 15) >> 1) {
489 case 0: /* o */
490 rc |= (flags & EFLG_OF);
491 break;
492 case 1: /* b/c/nae */
493 rc |= (flags & EFLG_CF);
494 break;
495 case 2: /* z/e */
496 rc |= (flags & EFLG_ZF);
497 break;
498 case 3: /* be/na */
499 rc |= (flags & (EFLG_CF|EFLG_ZF));
500 break;
501 case 4: /* s */
502 rc |= (flags & EFLG_SF);
503 break;
504 case 5: /* p/pe */
505 rc |= (flags & EFLG_PF);
506 break;
507 case 7: /* le/ng */
508 rc |= (flags & EFLG_ZF);
509 /* fall through */
510 case 6: /* l/nge */
511 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
512 break;
513 }
514
515 /* Odd condition identifiers (lsb == 1) have inverted sense. */
516 return (!!rc ^ (condition & 1));
517 }
518
519 int
520 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
521 {
522 struct decode_cache *c = &ctxt->decode;
523 u8 sib, rex_prefix = 0;
524 unsigned int i;
525 int rc = 0;
526 int mode = ctxt->mode;
527 int index_reg = 0, base_reg = 0, scale, rip_relative = 0;
528
529 /* Shadow copy of register state. Committed on successful emulation. */
530
531 memset(c, 0, sizeof(struct decode_cache));
532 c->eip = ctxt->vcpu->rip;
533 memcpy(c->regs, ctxt->vcpu->regs, sizeof c->regs);
534
535 switch (mode) {
536 case X86EMUL_MODE_REAL:
537 case X86EMUL_MODE_PROT16:
538 c->op_bytes = c->ad_bytes = 2;
539 break;
540 case X86EMUL_MODE_PROT32:
541 c->op_bytes = c->ad_bytes = 4;
542 break;
543 #ifdef CONFIG_X86_64
544 case X86EMUL_MODE_PROT64:
545 c->op_bytes = 4;
546 c->ad_bytes = 8;
547 break;
548 #endif
549 default:
550 return -1;
551 }
552
553 /* Legacy prefixes. */
554 for (i = 0; i < 8; i++) {
555 switch (c->b = insn_fetch(u8, 1, c->eip)) {
556 case 0x66: /* operand-size override */
557 c->op_bytes ^= 6; /* switch between 2/4 bytes */
558 break;
559 case 0x67: /* address-size override */
560 if (mode == X86EMUL_MODE_PROT64)
561 /* switch between 4/8 bytes */
562 c->ad_bytes ^= 12;
563 else
564 /* switch between 2/4 bytes */
565 c->ad_bytes ^= 6;
566 break;
567 case 0x2e: /* CS override */
568 c->override_base = &ctxt->cs_base;
569 break;
570 case 0x3e: /* DS override */
571 c->override_base = &ctxt->ds_base;
572 break;
573 case 0x26: /* ES override */
574 c->override_base = &ctxt->es_base;
575 break;
576 case 0x64: /* FS override */
577 c->override_base = &ctxt->fs_base;
578 break;
579 case 0x65: /* GS override */
580 c->override_base = &ctxt->gs_base;
581 break;
582 case 0x36: /* SS override */
583 c->override_base = &ctxt->ss_base;
584 break;
585 case 0xf0: /* LOCK */
586 c->lock_prefix = 1;
587 break;
588 case 0xf2: /* REPNE/REPNZ */
589 case 0xf3: /* REP/REPE/REPZ */
590 c->rep_prefix = 1;
591 break;
592 default:
593 goto done_prefixes;
594 }
595 }
596
597 done_prefixes:
598
599 /* REX prefix. */
600 if ((mode == X86EMUL_MODE_PROT64) && ((c->b & 0xf0) == 0x40)) {
601 rex_prefix = c->b;
602 if (c->b & 8)
603 c->op_bytes = 8; /* REX.W */
604 c->modrm_reg = (c->b & 4) << 1; /* REX.R */
605 index_reg = (c->b & 2) << 2; /* REX.X */
606 c->modrm_rm = base_reg = (c->b & 1) << 3; /* REG.B */
607 c->b = insn_fetch(u8, 1, c->eip);
608 }
609
610 /* Opcode byte(s). */
611 c->d = opcode_table[c->b];
612 if (c->d == 0) {
613 /* Two-byte opcode? */
614 if (c->b == 0x0f) {
615 c->twobyte = 1;
616 c->b = insn_fetch(u8, 1, c->eip);
617 c->d = twobyte_table[c->b];
618 }
619
620 /* Unrecognised? */
621 if (c->d == 0) {
622 DPRINTF("Cannot emulate %02x\n", c->b);
623 return -1;
624 }
625 }
626
627 /* ModRM and SIB bytes. */
628 if (c->d & ModRM) {
629 c->modrm = insn_fetch(u8, 1, c->eip);
630 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
631 c->modrm_reg |= (c->modrm & 0x38) >> 3;
632 c->modrm_rm |= (c->modrm & 0x07);
633 c->modrm_ea = 0;
634 c->use_modrm_ea = 1;
635
636 if (c->modrm_mod == 3) {
637 c->modrm_val = *(unsigned long *)
638 decode_register(c->modrm_rm, c->regs, c->d & ByteOp);
639 goto modrm_done;
640 }
641
642 if (c->ad_bytes == 2) {
643 unsigned bx = c->regs[VCPU_REGS_RBX];
644 unsigned bp = c->regs[VCPU_REGS_RBP];
645 unsigned si = c->regs[VCPU_REGS_RSI];
646 unsigned di = c->regs[VCPU_REGS_RDI];
647
648 /* 16-bit ModR/M decode. */
649 switch (c->modrm_mod) {
650 case 0:
651 if (c->modrm_rm == 6)
652 c->modrm_ea +=
653 insn_fetch(u16, 2, c->eip);
654 break;
655 case 1:
656 c->modrm_ea += insn_fetch(s8, 1, c->eip);
657 break;
658 case 2:
659 c->modrm_ea += insn_fetch(u16, 2, c->eip);
660 break;
661 }
662 switch (c->modrm_rm) {
663 case 0:
664 c->modrm_ea += bx + si;
665 break;
666 case 1:
667 c->modrm_ea += bx + di;
668 break;
669 case 2:
670 c->modrm_ea += bp + si;
671 break;
672 case 3:
673 c->modrm_ea += bp + di;
674 break;
675 case 4:
676 c->modrm_ea += si;
677 break;
678 case 5:
679 c->modrm_ea += di;
680 break;
681 case 6:
682 if (c->modrm_mod != 0)
683 c->modrm_ea += bp;
684 break;
685 case 7:
686 c->modrm_ea += bx;
687 break;
688 }
689 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
690 (c->modrm_rm == 6 && c->modrm_mod != 0))
691 if (!c->override_base)
692 c->override_base = &ctxt->ss_base;
693 c->modrm_ea = (u16)c->modrm_ea;
694 } else {
695 /* 32/64-bit ModR/M decode. */
696 switch (c->modrm_rm) {
697 case 4:
698 case 12:
699 sib = insn_fetch(u8, 1, c->eip);
700 index_reg |= (sib >> 3) & 7;
701 base_reg |= sib & 7;
702 scale = sib >> 6;
703
704 switch (base_reg) {
705 case 5:
706 if (c->modrm_mod != 0)
707 c->modrm_ea +=
708 c->regs[base_reg];
709 else
710 c->modrm_ea +=
711 insn_fetch(s32, 4, c->eip);
712 break;
713 default:
714 c->modrm_ea += c->regs[base_reg];
715 }
716 switch (index_reg) {
717 case 4:
718 break;
719 default:
720 c->modrm_ea +=
721 c->regs[index_reg] << scale;
722
723 }
724 break;
725 case 5:
726 if (c->modrm_mod != 0)
727 c->modrm_ea += c->regs[c->modrm_rm];
728 else if (mode == X86EMUL_MODE_PROT64)
729 rip_relative = 1;
730 break;
731 default:
732 c->modrm_ea += c->regs[c->modrm_rm];
733 break;
734 }
735 switch (c->modrm_mod) {
736 case 0:
737 if (c->modrm_rm == 5)
738 c->modrm_ea +=
739 insn_fetch(s32, 4, c->eip);
740 break;
741 case 1:
742 c->modrm_ea += insn_fetch(s8, 1, c->eip);
743 break;
744 case 2:
745 c->modrm_ea += insn_fetch(s32, 4, c->eip);
746 break;
747 }
748 }
749 if (!c->override_base)
750 c->override_base = &ctxt->ds_base;
751 if (mode == X86EMUL_MODE_PROT64 &&
752 c->override_base != &ctxt->fs_base &&
753 c->override_base != &ctxt->gs_base)
754 c->override_base = NULL;
755
756 if (c->override_base)
757 c->modrm_ea += *c->override_base;
758
759 if (rip_relative) {
760 c->modrm_ea += c->eip;
761 switch (c->d & SrcMask) {
762 case SrcImmByte:
763 c->modrm_ea += 1;
764 break;
765 case SrcImm:
766 if (c->d & ByteOp)
767 c->modrm_ea += 1;
768 else
769 if (c->op_bytes == 8)
770 c->modrm_ea += 4;
771 else
772 c->modrm_ea += c->op_bytes;
773 }
774 }
775 if (c->ad_bytes != 8)
776 c->modrm_ea = (u32)c->modrm_ea;
777 modrm_done:
778 ;
779 }
780
781 /*
782 * Decode and fetch the source operand: register, memory
783 * or immediate.
784 */
785 switch (c->d & SrcMask) {
786 case SrcNone:
787 break;
788 case SrcReg:
789 c->src.type = OP_REG;
790 if (c->d & ByteOp) {
791 c->src.ptr =
792 decode_register(c->modrm_reg, c->regs,
793 (rex_prefix == 0));
794 c->src.val = c->src.orig_val = *(u8 *)c->src.ptr;
795 c->src.bytes = 1;
796 } else {
797 c->src.ptr =
798 decode_register(c->modrm_reg, c->regs, 0);
799 switch ((c->src.bytes = c->op_bytes)) {
800 case 2:
801 c->src.val = c->src.orig_val =
802 *(u16 *) c->src.ptr;
803 break;
804 case 4:
805 c->src.val = c->src.orig_val =
806 *(u32 *) c->src.ptr;
807 break;
808 case 8:
809 c->src.val = c->src.orig_val =
810 *(u64 *) c->src.ptr;
811 break;
812 }
813 }
814 break;
815 case SrcMem16:
816 c->src.bytes = 2;
817 goto srcmem_common;
818 case SrcMem32:
819 c->src.bytes = 4;
820 goto srcmem_common;
821 case SrcMem:
822 c->src.bytes = (c->d & ByteOp) ? 1 :
823 c->op_bytes;
824 /* Don't fetch the address for invlpg: it could be unmapped. */
825 if (c->twobyte && c->b == 0x01
826 && c->modrm_reg == 7)
827 break;
828 srcmem_common:
829 /*
830 * For instructions with a ModR/M byte, switch to register
831 * access if Mod = 3.
832 */
833 if ((c->d & ModRM) && c->modrm_mod == 3) {
834 c->src.type = OP_REG;
835 break;
836 }
837 c->src.type = OP_MEM;
838 break;
839 case SrcImm:
840 c->src.type = OP_IMM;
841 c->src.ptr = (unsigned long *)c->eip;
842 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
843 if (c->src.bytes == 8)
844 c->src.bytes = 4;
845 /* NB. Immediates are sign-extended as necessary. */
846 switch (c->src.bytes) {
847 case 1:
848 c->src.val = insn_fetch(s8, 1, c->eip);
849 break;
850 case 2:
851 c->src.val = insn_fetch(s16, 2, c->eip);
852 break;
853 case 4:
854 c->src.val = insn_fetch(s32, 4, c->eip);
855 break;
856 }
857 break;
858 case SrcImmByte:
859 c->src.type = OP_IMM;
860 c->src.ptr = (unsigned long *)c->eip;
861 c->src.bytes = 1;
862 c->src.val = insn_fetch(s8, 1, c->eip);
863 break;
864 }
865
866 /* Decode and fetch the destination operand: register or memory. */
867 switch (c->d & DstMask) {
868 case ImplicitOps:
869 /* Special instructions do their own operand decoding. */
870 return 0;
871 case DstReg:
872 c->dst.type = OP_REG;
873 if ((c->d & ByteOp)
874 && !(c->twobyte &&
875 (c->b == 0xb6 || c->b == 0xb7))) {
876 c->dst.ptr =
877 decode_register(c->modrm_reg, c->regs,
878 (rex_prefix == 0));
879 c->dst.val = *(u8 *) c->dst.ptr;
880 c->dst.bytes = 1;
881 } else {
882 c->dst.ptr =
883 decode_register(c->modrm_reg, c->regs, 0);
884 switch ((c->dst.bytes = c->op_bytes)) {
885 case 2:
886 c->dst.val = *(u16 *)c->dst.ptr;
887 break;
888 case 4:
889 c->dst.val = *(u32 *)c->dst.ptr;
890 break;
891 case 8:
892 c->dst.val = *(u64 *)c->dst.ptr;
893 break;
894 }
895 }
896 break;
897 case DstMem:
898 if ((c->d & ModRM) && c->modrm_mod == 3) {
899 c->dst.type = OP_REG;
900 break;
901 }
902 c->dst.type = OP_MEM;
903 break;
904 }
905
906 done:
907 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
908 }
909
910 int
911 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
912 {
913 unsigned long cr2 = ctxt->cr2;
914 int no_wb = 0;
915 u64 msr_data;
916 unsigned long saved_eip = 0;
917 unsigned long _eflags = ctxt->eflags;
918 struct decode_cache *c = &ctxt->decode;
919 int rc = 0;
920
921 /* Shadow copy of register state. Committed on successful emulation.
922 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
923 * modify them.
924 */
925
926 memcpy(c->regs, ctxt->vcpu->regs, sizeof c->regs);
927 saved_eip = c->eip;
928
929 if ((c->d & ModRM) && (c->modrm_mod != 3))
930 cr2 = c->modrm_ea;
931
932 if (c->src.type == OP_MEM) {
933 c->src.ptr = (unsigned long *)cr2;
934 c->src.val = 0;
935 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
936 &c->src.val,
937 c->src.bytes,
938 ctxt->vcpu)) != 0)
939 goto done;
940 c->src.orig_val = c->src.val;
941 }
942
943 if ((c->d & DstMask) == ImplicitOps)
944 goto special_insn;
945
946
947 if (c->dst.type == OP_MEM) {
948 c->dst.ptr = (unsigned long *)cr2;
949 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
950 c->dst.val = 0;
951 if (c->d & BitOp) {
952 unsigned long mask = ~(c->dst.bytes * 8 - 1);
953
954 c->dst.ptr = (void *)c->dst.ptr +
955 (c->src.val & mask) / 8;
956 }
957 if (!(c->d & Mov) &&
958 /* optimisation - avoid slow emulated read */
959 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
960 &c->dst.val,
961 c->dst.bytes, ctxt->vcpu)) != 0))
962 goto done;
963 }
964 c->dst.orig_val = c->dst.val;
965
966 if (c->twobyte)
967 goto twobyte_insn;
968
969 switch (c->b) {
970 case 0x00 ... 0x05:
971 add: /* add */
972 emulate_2op_SrcV("add", c->src, c->dst, _eflags);
973 break;
974 case 0x08 ... 0x0d:
975 or: /* or */
976 emulate_2op_SrcV("or", c->src, c->dst, _eflags);
977 break;
978 case 0x10 ... 0x15:
979 adc: /* adc */
980 emulate_2op_SrcV("adc", c->src, c->dst, _eflags);
981 break;
982 case 0x18 ... 0x1d:
983 sbb: /* sbb */
984 emulate_2op_SrcV("sbb", c->src, c->dst, _eflags);
985 break;
986 case 0x20 ... 0x23:
987 and: /* and */
988 emulate_2op_SrcV("and", c->src, c->dst, _eflags);
989 break;
990 case 0x24: /* and al imm8 */
991 c->dst.type = OP_REG;
992 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
993 c->dst.val = *(u8 *)c->dst.ptr;
994 c->dst.bytes = 1;
995 c->dst.orig_val = c->dst.val;
996 goto and;
997 case 0x25: /* and ax imm16, or eax imm32 */
998 c->dst.type = OP_REG;
999 c->dst.bytes = c->op_bytes;
1000 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1001 if (c->op_bytes == 2)
1002 c->dst.val = *(u16 *)c->dst.ptr;
1003 else
1004 c->dst.val = *(u32 *)c->dst.ptr;
1005 c->dst.orig_val = c->dst.val;
1006 goto and;
1007 case 0x28 ... 0x2d:
1008 sub: /* sub */
1009 emulate_2op_SrcV("sub", c->src, c->dst, _eflags);
1010 break;
1011 case 0x30 ... 0x35:
1012 xor: /* xor */
1013 emulate_2op_SrcV("xor", c->src, c->dst, _eflags);
1014 break;
1015 case 0x38 ... 0x3d:
1016 cmp: /* cmp */
1017 emulate_2op_SrcV("cmp", c->src, c->dst, _eflags);
1018 break;
1019 case 0x63: /* movsxd */
1020 if (ctxt->mode != X86EMUL_MODE_PROT64)
1021 goto cannot_emulate;
1022 c->dst.val = (s32) c->src.val;
1023 break;
1024 case 0x80 ... 0x83: /* Grp1 */
1025 switch (c->modrm_reg) {
1026 case 0:
1027 goto add;
1028 case 1:
1029 goto or;
1030 case 2:
1031 goto adc;
1032 case 3:
1033 goto sbb;
1034 case 4:
1035 goto and;
1036 case 5:
1037 goto sub;
1038 case 6:
1039 goto xor;
1040 case 7:
1041 goto cmp;
1042 }
1043 break;
1044 case 0x84 ... 0x85:
1045 test: /* test */
1046 emulate_2op_SrcV("test", c->src, c->dst, _eflags);
1047 break;
1048 case 0x86 ... 0x87: /* xchg */
1049 /* Write back the register source. */
1050 switch (c->dst.bytes) {
1051 case 1:
1052 *(u8 *) c->src.ptr = (u8) c->dst.val;
1053 break;
1054 case 2:
1055 *(u16 *) c->src.ptr = (u16) c->dst.val;
1056 break;
1057 case 4:
1058 *c->src.ptr = (u32) c->dst.val;
1059 break; /* 64b reg: zero-extend */
1060 case 8:
1061 *c->src.ptr = c->dst.val;
1062 break;
1063 }
1064 /*
1065 * Write back the memory destination with implicit LOCK
1066 * prefix.
1067 */
1068 c->dst.val = c->src.val;
1069 c->lock_prefix = 1;
1070 break;
1071 case 0x88 ... 0x8b: /* mov */
1072 goto mov;
1073 case 0x8d: /* lea r16/r32, m */
1074 c->dst.val = c->modrm_val;
1075 break;
1076 case 0x8f: /* pop (sole member of Grp1a) */
1077 /* 64-bit mode: POP always pops a 64-bit operand. */
1078 if (ctxt->mode == X86EMUL_MODE_PROT64)
1079 c->dst.bytes = 8;
1080 if ((rc = ops->read_std(register_address(
1081 ctxt->ss_base,
1082 c->regs[VCPU_REGS_RSP]),
1083 &c->dst.val,
1084 c->dst.bytes,
1085 ctxt->vcpu)) != 0)
1086 goto done;
1087 register_address_increment(c->regs[VCPU_REGS_RSP],
1088 c->dst.bytes);
1089 break;
1090 case 0xa0 ... 0xa1: /* mov */
1091 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1092 c->dst.val = c->src.val;
1093 /* skip src displacement */
1094 c->eip += c->ad_bytes;
1095 break;
1096 case 0xa2 ... 0xa3: /* mov */
1097 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1098 /* skip c->dst displacement */
1099 c->eip += c->ad_bytes;
1100 break;
1101 case 0xc0 ... 0xc1:
1102 grp2: /* Grp2 */
1103 switch (c->modrm_reg) {
1104 case 0: /* rol */
1105 emulate_2op_SrcB("rol", c->src, c->dst, _eflags);
1106 break;
1107 case 1: /* ror */
1108 emulate_2op_SrcB("ror", c->src, c->dst, _eflags);
1109 break;
1110 case 2: /* rcl */
1111 emulate_2op_SrcB("rcl", c->src, c->dst, _eflags);
1112 break;
1113 case 3: /* rcr */
1114 emulate_2op_SrcB("rcr", c->src, c->dst, _eflags);
1115 break;
1116 case 4: /* sal/shl */
1117 case 6: /* sal/shl */
1118 emulate_2op_SrcB("sal", c->src, c->dst, _eflags);
1119 break;
1120 case 5: /* shr */
1121 emulate_2op_SrcB("shr", c->src, c->dst, _eflags);
1122 break;
1123 case 7: /* sar */
1124 emulate_2op_SrcB("sar", c->src, c->dst, _eflags);
1125 break;
1126 }
1127 break;
1128 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1129 mov:
1130 c->dst.val = c->src.val;
1131 break;
1132 case 0xd0 ... 0xd1: /* Grp2 */
1133 c->src.val = 1;
1134 goto grp2;
1135 case 0xd2 ... 0xd3: /* Grp2 */
1136 c->src.val = c->regs[VCPU_REGS_RCX];
1137 goto grp2;
1138 case 0xf6 ... 0xf7: /* Grp3 */
1139 switch (c->modrm_reg) {
1140 case 0 ... 1: /* test */
1141 /*
1142 * Special case in Grp3: test has an immediate
1143 * source operand.
1144 */
1145 c->src.type = OP_IMM;
1146 c->src.ptr = (unsigned long *)c->eip;
1147 c->src.bytes = (c->d & ByteOp) ? 1 :
1148 c->op_bytes;
1149 if (c->src.bytes == 8)
1150 c->src.bytes = 4;
1151 switch (c->src.bytes) {
1152 case 1:
1153 c->src.val = insn_fetch(s8, 1, c->eip);
1154 break;
1155 case 2:
1156 c->src.val = insn_fetch(s16, 2, c->eip);
1157 break;
1158 case 4:
1159 c->src.val = insn_fetch(s32, 4, c->eip);
1160 break;
1161 }
1162 goto test;
1163 case 2: /* not */
1164 c->dst.val = ~c->dst.val;
1165 break;
1166 case 3: /* neg */
1167 emulate_1op("neg", c->dst, _eflags);
1168 break;
1169 default:
1170 goto cannot_emulate;
1171 }
1172 break;
1173 case 0xfe ... 0xff: /* Grp4/Grp5 */
1174 switch (c->modrm_reg) {
1175 case 0: /* inc */
1176 emulate_1op("inc", c->dst, _eflags);
1177 break;
1178 case 1: /* dec */
1179 emulate_1op("dec", c->dst, _eflags);
1180 break;
1181 case 4: /* jmp abs */
1182 if (c->b == 0xff)
1183 c->eip = c->dst.val;
1184 else
1185 goto cannot_emulate;
1186 break;
1187 case 6: /* push */
1188 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1189 if (ctxt->mode == X86EMUL_MODE_PROT64) {
1190 c->dst.bytes = 8;
1191 if ((rc = ops->read_std(
1192 (unsigned long)c->dst.ptr,
1193 &c->dst.val, 8,
1194 ctxt->vcpu)) != 0)
1195 goto done;
1196 }
1197 register_address_increment(c->regs[VCPU_REGS_RSP],
1198 -c->dst.bytes);
1199 if ((rc = ops->write_emulated(
1200 register_address(ctxt->ss_base,
1201 c->regs[VCPU_REGS_RSP]),
1202 &c->dst.val,
1203 c->dst.bytes, ctxt->vcpu)) != 0)
1204 goto done;
1205 no_wb = 1;
1206 break;
1207 default:
1208 goto cannot_emulate;
1209 }
1210 break;
1211 }
1212
1213 writeback:
1214 if (!no_wb) {
1215 switch (c->dst.type) {
1216 case OP_REG:
1217 /* The 4-byte case *is* correct:
1218 * in 64-bit mode we zero-extend.
1219 */
1220 switch (c->dst.bytes) {
1221 case 1:
1222 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1223 break;
1224 case 2:
1225 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1226 break;
1227 case 4:
1228 *c->dst.ptr = (u32)c->dst.val;
1229 break; /* 64b: zero-ext */
1230 case 8:
1231 *c->dst.ptr = c->dst.val;
1232 break;
1233 }
1234 break;
1235 case OP_MEM:
1236 if (c->lock_prefix)
1237 rc = ops->cmpxchg_emulated(
1238 (unsigned long)c->dst.ptr,
1239 &c->dst.orig_val,
1240 &c->dst.val,
1241 c->dst.bytes,
1242 ctxt->vcpu);
1243 else
1244 rc = ops->write_emulated(
1245 (unsigned long)c->dst.ptr,
1246 &c->dst.val,
1247 c->dst.bytes,
1248 ctxt->vcpu);
1249 if (rc != 0)
1250 goto done;
1251 default:
1252 break;
1253 }
1254 }
1255
1256 /* Commit shadow register state. */
1257 memcpy(ctxt->vcpu->regs, c->regs, sizeof c->regs);
1258 ctxt->eflags = _eflags;
1259 ctxt->vcpu->rip = c->eip;
1260
1261 done:
1262 if (rc == X86EMUL_UNHANDLEABLE) {
1263 c->eip = saved_eip;
1264 return -1;
1265 }
1266 return 0;
1267
1268 special_insn:
1269 if (c->twobyte)
1270 goto twobyte_special_insn;
1271 switch (c->b) {
1272 case 0x50 ... 0x57: /* push reg */
1273 if (c->op_bytes == 2)
1274 c->src.val = (u16) c->regs[c->b & 0x7];
1275 else
1276 c->src.val = (u32) c->regs[c->b & 0x7];
1277 c->dst.type = OP_MEM;
1278 c->dst.bytes = c->op_bytes;
1279 c->dst.val = c->src.val;
1280 register_address_increment(c->regs[VCPU_REGS_RSP],
1281 -c->op_bytes);
1282 c->dst.ptr = (void *) register_address(
1283 ctxt->ss_base, c->regs[VCPU_REGS_RSP]);
1284 break;
1285 case 0x58 ... 0x5f: /* pop reg */
1286 c->dst.ptr =
1287 (unsigned long *)&c->regs[c->b & 0x7];
1288 pop_instruction:
1289 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1290 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1291 c->op_bytes, ctxt->vcpu)) != 0)
1292 goto done;
1293
1294 register_address_increment(c->regs[VCPU_REGS_RSP],
1295 c->op_bytes);
1296 no_wb = 1; /* Disable writeback. */
1297 break;
1298 case 0x6a: /* push imm8 */
1299 c->src.val = 0L;
1300 c->src.val = insn_fetch(s8, 1, c->eip);
1301 push:
1302 c->dst.type = OP_MEM;
1303 c->dst.bytes = c->op_bytes;
1304 c->dst.val = c->src.val;
1305 register_address_increment(c->regs[VCPU_REGS_RSP],
1306 -c->op_bytes);
1307 c->dst.ptr = (void *) register_address(ctxt->ss_base,
1308 c->regs[VCPU_REGS_RSP]);
1309 break;
1310 case 0x6c: /* insb */
1311 case 0x6d: /* insw/insd */
1312 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1313 1,
1314 (c->d & ByteOp) ? 1 : c->op_bytes,
1315 c->rep_prefix ?
1316 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1317 (_eflags & EFLG_DF),
1318 register_address(ctxt->es_base,
1319 c->regs[VCPU_REGS_RDI]),
1320 c->rep_prefix,
1321 c->regs[VCPU_REGS_RDX]) == 0) {
1322 c->eip = saved_eip;
1323 return -1;
1324 }
1325 return 0;
1326 case 0x6e: /* outsb */
1327 case 0x6f: /* outsw/outsd */
1328 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1329 0,
1330 (c->d & ByteOp) ? 1 : c->op_bytes,
1331 c->rep_prefix ?
1332 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1333 (_eflags & EFLG_DF),
1334 register_address(c->override_base ?
1335 *c->override_base :
1336 ctxt->ds_base,
1337 c->regs[VCPU_REGS_RSI]),
1338 c->rep_prefix,
1339 c->regs[VCPU_REGS_RDX]) == 0) {
1340 c->eip = saved_eip;
1341 return -1;
1342 }
1343 return 0;
1344 case 0x70 ... 0x7f: /* jcc (short) */ {
1345 int rel = insn_fetch(s8, 1, c->eip);
1346
1347 if (test_cc(c->b, _eflags))
1348 JMP_REL(rel);
1349 break;
1350 }
1351 case 0x9c: /* pushf */
1352 c->src.val = (unsigned long) _eflags;
1353 goto push;
1354 case 0x9d: /* popf */
1355 c->dst.ptr = (unsigned long *) &_eflags;
1356 goto pop_instruction;
1357 case 0xc3: /* ret */
1358 c->dst.ptr = &c->eip;
1359 goto pop_instruction;
1360 case 0xf4: /* hlt */
1361 ctxt->vcpu->halt_request = 1;
1362 goto done;
1363 }
1364 if (c->rep_prefix) {
1365 if (c->regs[VCPU_REGS_RCX] == 0) {
1366 ctxt->vcpu->rip = c->eip;
1367 goto done;
1368 }
1369 c->regs[VCPU_REGS_RCX]--;
1370 c->eip = ctxt->vcpu->rip;
1371 }
1372 switch (c->b) {
1373 case 0xa4 ... 0xa5: /* movs */
1374 c->dst.type = OP_MEM;
1375 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1376 c->dst.ptr = (unsigned long *)register_address(
1377 ctxt->es_base,
1378 c->regs[VCPU_REGS_RDI]);
1379 if ((rc = ops->read_emulated(register_address(
1380 c->override_base ? *c->override_base :
1381 ctxt->ds_base,
1382 c->regs[VCPU_REGS_RSI]),
1383 &c->dst.val,
1384 c->dst.bytes, ctxt->vcpu)) != 0)
1385 goto done;
1386 register_address_increment(c->regs[VCPU_REGS_RSI],
1387 (_eflags & EFLG_DF) ? -c->dst.bytes
1388 : c->dst.bytes);
1389 register_address_increment(c->regs[VCPU_REGS_RDI],
1390 (_eflags & EFLG_DF) ? -c->dst.bytes
1391 : c->dst.bytes);
1392 break;
1393 case 0xa6 ... 0xa7: /* cmps */
1394 DPRINTF("Urk! I don't handle CMPS.\n");
1395 goto cannot_emulate;
1396 case 0xaa ... 0xab: /* stos */
1397 c->dst.type = OP_MEM;
1398 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1399 c->dst.ptr = (unsigned long *)cr2;
1400 c->dst.val = c->regs[VCPU_REGS_RAX];
1401 register_address_increment(c->regs[VCPU_REGS_RDI],
1402 (_eflags & EFLG_DF) ? -c->dst.bytes
1403 : c->dst.bytes);
1404 break;
1405 case 0xac ... 0xad: /* lods */
1406 c->dst.type = OP_REG;
1407 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1408 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1409 if ((rc = ops->read_emulated(cr2, &c->dst.val,
1410 c->dst.bytes,
1411 ctxt->vcpu)) != 0)
1412 goto done;
1413 register_address_increment(c->regs[VCPU_REGS_RSI],
1414 (_eflags & EFLG_DF) ? -c->dst.bytes
1415 : c->dst.bytes);
1416 break;
1417 case 0xae ... 0xaf: /* scas */
1418 DPRINTF("Urk! I don't handle SCAS.\n");
1419 goto cannot_emulate;
1420 case 0xe8: /* call (near) */ {
1421 long int rel;
1422 switch (c->op_bytes) {
1423 case 2:
1424 rel = insn_fetch(s16, 2, c->eip);
1425 break;
1426 case 4:
1427 rel = insn_fetch(s32, 4, c->eip);
1428 break;
1429 case 8:
1430 rel = insn_fetch(s64, 8, c->eip);
1431 break;
1432 default:
1433 DPRINTF("Call: Invalid op_bytes\n");
1434 goto cannot_emulate;
1435 }
1436 c->src.val = (unsigned long) c->eip;
1437 JMP_REL(rel);
1438 c->op_bytes = c->ad_bytes;
1439 goto push;
1440 }
1441 case 0xe9: /* jmp rel */
1442 case 0xeb: /* jmp rel short */
1443 JMP_REL(c->src.val);
1444 no_wb = 1; /* Disable writeback. */
1445 break;
1446
1447
1448 }
1449 goto writeback;
1450
1451 twobyte_insn:
1452 switch (c->b) {
1453 case 0x01: /* lgdt, lidt, lmsw */
1454 /* Disable writeback. */
1455 no_wb = 1;
1456 switch (c->modrm_reg) {
1457 u16 size;
1458 unsigned long address;
1459
1460 case 0: /* vmcall */
1461 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1462 goto cannot_emulate;
1463
1464 rc = kvm_fix_hypercall(ctxt->vcpu);
1465 if (rc)
1466 goto done;
1467
1468 kvm_emulate_hypercall(ctxt->vcpu);
1469 break;
1470 case 2: /* lgdt */
1471 rc = read_descriptor(ctxt, ops, c->src.ptr,
1472 &size, &address, c->op_bytes);
1473 if (rc)
1474 goto done;
1475 realmode_lgdt(ctxt->vcpu, size, address);
1476 break;
1477 case 3: /* lidt/vmmcall */
1478 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1479 rc = kvm_fix_hypercall(ctxt->vcpu);
1480 if (rc)
1481 goto done;
1482 kvm_emulate_hypercall(ctxt->vcpu);
1483 } else {
1484 rc = read_descriptor(ctxt, ops, c->src.ptr,
1485 &size, &address,
1486 c->op_bytes);
1487 if (rc)
1488 goto done;
1489 realmode_lidt(ctxt->vcpu, size, address);
1490 }
1491 break;
1492 case 4: /* smsw */
1493 if (c->modrm_mod != 3)
1494 goto cannot_emulate;
1495 *(u16 *)&c->regs[c->modrm_rm]
1496 = realmode_get_cr(ctxt->vcpu, 0);
1497 break;
1498 case 6: /* lmsw */
1499 if (c->modrm_mod != 3)
1500 goto cannot_emulate;
1501 realmode_lmsw(ctxt->vcpu, (u16)c->modrm_val, &_eflags);
1502 break;
1503 case 7: /* invlpg*/
1504 emulate_invlpg(ctxt->vcpu, cr2);
1505 break;
1506 default:
1507 goto cannot_emulate;
1508 }
1509 break;
1510 case 0x21: /* mov from dr to reg */
1511 no_wb = 1;
1512 if (c->modrm_mod != 3)
1513 goto cannot_emulate;
1514 rc = emulator_get_dr(ctxt, c->modrm_reg,
1515 &c->regs[c->modrm_rm]);
1516 break;
1517 case 0x23: /* mov from reg to dr */
1518 no_wb = 1;
1519 if (c->modrm_mod != 3)
1520 goto cannot_emulate;
1521 rc = emulator_set_dr(ctxt, c->modrm_reg,
1522 c->regs[c->modrm_rm]);
1523 break;
1524 case 0x40 ... 0x4f: /* cmov */
1525 c->dst.val = c->dst.orig_val = c->src.val;
1526 no_wb = 1;
1527 /*
1528 * First, assume we're decoding an even cmov opcode
1529 * (lsb == 0).
1530 */
1531 switch ((c->b & 15) >> 1) {
1532 case 0: /* cmovo */
1533 no_wb = (_eflags & EFLG_OF) ? 0 : 1;
1534 break;
1535 case 1: /* cmovb/cmovc/cmovnae */
1536 no_wb = (_eflags & EFLG_CF) ? 0 : 1;
1537 break;
1538 case 2: /* cmovz/cmove */
1539 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1540 break;
1541 case 3: /* cmovbe/cmovna */
1542 no_wb = (_eflags & (EFLG_CF | EFLG_ZF)) ? 0 : 1;
1543 break;
1544 case 4: /* cmovs */
1545 no_wb = (_eflags & EFLG_SF) ? 0 : 1;
1546 break;
1547 case 5: /* cmovp/cmovpe */
1548 no_wb = (_eflags & EFLG_PF) ? 0 : 1;
1549 break;
1550 case 7: /* cmovle/cmovng */
1551 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1552 /* fall through */
1553 case 6: /* cmovl/cmovnge */
1554 no_wb &= (!(_eflags & EFLG_SF) !=
1555 !(_eflags & EFLG_OF)) ? 0 : 1;
1556 break;
1557 }
1558 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1559 no_wb ^= c->b & 1;
1560 break;
1561 case 0xa3:
1562 bt: /* bt */
1563 /* only subword offset */
1564 c->src.val &= (c->dst.bytes << 3) - 1;
1565 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, _eflags);
1566 break;
1567 case 0xab:
1568 bts: /* bts */
1569 /* only subword offset */
1570 c->src.val &= (c->dst.bytes << 3) - 1;
1571 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, _eflags);
1572 break;
1573 case 0xb0 ... 0xb1: /* cmpxchg */
1574 /*
1575 * Save real source value, then compare EAX against
1576 * destination.
1577 */
1578 c->src.orig_val = c->src.val;
1579 c->src.val = c->regs[VCPU_REGS_RAX];
1580 emulate_2op_SrcV("cmp", c->src, c->dst, _eflags);
1581 if (_eflags & EFLG_ZF) {
1582 /* Success: write back to memory. */
1583 c->dst.val = c->src.orig_val;
1584 } else {
1585 /* Failure: write the value we saw to EAX. */
1586 c->dst.type = OP_REG;
1587 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1588 }
1589 break;
1590 case 0xb3:
1591 btr: /* btr */
1592 /* only subword offset */
1593 c->src.val &= (c->dst.bytes << 3) - 1;
1594 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, _eflags);
1595 break;
1596 case 0xb6 ... 0xb7: /* movzx */
1597 c->dst.bytes = c->op_bytes;
1598 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1599 : (u16) c->src.val;
1600 break;
1601 case 0xba: /* Grp8 */
1602 switch (c->modrm_reg & 3) {
1603 case 0:
1604 goto bt;
1605 case 1:
1606 goto bts;
1607 case 2:
1608 goto btr;
1609 case 3:
1610 goto btc;
1611 }
1612 break;
1613 case 0xbb:
1614 btc: /* btc */
1615 /* only subword offset */
1616 c->src.val &= (c->dst.bytes << 3) - 1;
1617 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, _eflags);
1618 break;
1619 case 0xbe ... 0xbf: /* movsx */
1620 c->dst.bytes = c->op_bytes;
1621 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
1622 (s16) c->src.val;
1623 break;
1624 case 0xc3: /* movnti */
1625 c->dst.bytes = c->op_bytes;
1626 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
1627 (u64) c->src.val;
1628 break;
1629 }
1630 goto writeback;
1631
1632 twobyte_special_insn:
1633 /* Disable writeback. */
1634 no_wb = 1;
1635 switch (c->b) {
1636 case 0x06:
1637 emulate_clts(ctxt->vcpu);
1638 break;
1639 case 0x08: /* invd */
1640 break;
1641 case 0x09: /* wbinvd */
1642 break;
1643 case 0x0d: /* GrpP (prefetch) */
1644 case 0x18: /* Grp16 (prefetch/nop) */
1645 break;
1646 case 0x20: /* mov cr, reg */
1647 if (c->modrm_mod != 3)
1648 goto cannot_emulate;
1649 c->regs[c->modrm_rm] =
1650 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1651 break;
1652 case 0x22: /* mov reg, cr */
1653 if (c->modrm_mod != 3)
1654 goto cannot_emulate;
1655 realmode_set_cr(ctxt->vcpu,
1656 c->modrm_reg, c->modrm_val, &_eflags);
1657 break;
1658 case 0x30:
1659 /* wrmsr */
1660 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1661 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1662 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1663 if (rc) {
1664 kvm_x86_ops->inject_gp(ctxt->vcpu, 0);
1665 c->eip = ctxt->vcpu->rip;
1666 }
1667 rc = X86EMUL_CONTINUE;
1668 break;
1669 case 0x32:
1670 /* rdmsr */
1671 rc = kvm_get_msr(ctxt->vcpu,
1672 c->regs[VCPU_REGS_RCX], &msr_data);
1673 if (rc) {
1674 kvm_x86_ops->inject_gp(ctxt->vcpu, 0);
1675 c->eip = ctxt->vcpu->rip;
1676 } else {
1677 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1678 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1679 }
1680 rc = X86EMUL_CONTINUE;
1681 break;
1682 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1683 long int rel;
1684
1685 switch (c->op_bytes) {
1686 case 2:
1687 rel = insn_fetch(s16, 2, c->eip);
1688 break;
1689 case 4:
1690 rel = insn_fetch(s32, 4, c->eip);
1691 break;
1692 case 8:
1693 rel = insn_fetch(s64, 8, c->eip);
1694 break;
1695 default:
1696 DPRINTF("jnz: Invalid op_bytes\n");
1697 goto cannot_emulate;
1698 }
1699 if (test_cc(c->b, _eflags))
1700 JMP_REL(rel);
1701 break;
1702 }
1703 case 0xc7: /* Grp9 (cmpxchg8b) */
1704 {
1705 u64 old, new;
1706 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt->vcpu))
1707 != 0)
1708 goto done;
1709 if (((u32) (old >> 0) !=
1710 (u32) c->regs[VCPU_REGS_RAX]) ||
1711 ((u32) (old >> 32) !=
1712 (u32) c->regs[VCPU_REGS_RDX])) {
1713 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1714 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1715 _eflags &= ~EFLG_ZF;
1716 } else {
1717 new = ((u64)c->regs[VCPU_REGS_RCX] << 32)
1718 | (u32) c->regs[VCPU_REGS_RBX];
1719 if ((rc = ops->cmpxchg_emulated(cr2, &old,
1720 &new, 8, ctxt->vcpu)) != 0)
1721 goto done;
1722 _eflags |= EFLG_ZF;
1723 }
1724 break;
1725 }
1726 }
1727 goto writeback;
1728
1729 cannot_emulate:
1730 DPRINTF("Cannot emulate %02x\n", c->b);
1731 c->eip = saved_eip;
1732 return -1;
1733 }
This page took 0.117235 seconds and 5 git commands to generate.