VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 11982

Last change on this file since 11982 was 11982, checked in by vboxsync, 17 years ago

All: license header changes for 2.0 (OSE headers, add Sun GPL/LGPL disclaimer)

  • Property svn:eol-style set to native
File size: 206.0 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#include <inttypes.h>
34#ifndef VBOX
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42
43/* XXX: move that elsewhere */
44static uint16_t *gen_opc_ptr;
45static uint32_t *gen_opparam_ptr;
46
47#define PREFIX_REPZ 0x01
48#define PREFIX_REPNZ 0x02
49#define PREFIX_LOCK 0x04
50#define PREFIX_DATA 0x08
51#define PREFIX_ADR 0x10
52
53#ifdef TARGET_X86_64
54#define X86_64_ONLY(x) x
55#define X86_64_DEF(x...) x
56#define CODE64(s) ((s)->code64)
57#define REX_X(s) ((s)->rex_x)
58#define REX_B(s) ((s)->rex_b)
59/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
60#if 1
61#define BUGGY_64(x) NULL
62#endif
63#else
64#define X86_64_ONLY(x) NULL
65#define X86_64_DEF(x...)
66#define CODE64(s) 0
67#define REX_X(s) 0
68#define REX_B(s) 0
69#endif
70
71#ifdef TARGET_X86_64
72static int x86_64_hregs;
73#endif
74
75#ifdef USE_DIRECT_JUMP
76#define TBPARAM(x)
77#else
78#define TBPARAM(x) (long)(x)
79#endif
80
81#ifdef VBOX
82/* Special/override code readers to hide patched code. */
83
84uint8_t ldub_code_raw(target_ulong pc)
85{
86 uint8_t b;
87
88 if (!remR3GetOpcode(cpu_single_env, pc, &b))
89 b = ldub_code(pc);
90 return b;
91}
92#define ldub_code(a) ldub_code_raw(a)
93
94uint16_t lduw_code_raw(target_ulong pc)
95{
96 return (ldub_code(pc+1) << 8) | ldub_code(pc);
97}
98#define lduw_code(a) lduw_code_raw(a)
99
100
101uint32_t ldl_code_raw(target_ulong pc)
102{
103 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
104}
105#define ldl_code(a) ldl_code_raw(a)
106
107#endif /* VBOX */
108
109
110typedef struct DisasContext {
111 /* current insn context */
112 int override; /* -1 if no override */
113 int prefix;
114 int aflag, dflag;
115 target_ulong pc; /* pc = eip + cs_base */
116 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
117 static state change (stop translation) */
118 /* current block context */
119 target_ulong cs_base; /* base of CS segment */
120 int pe; /* protected mode */
121 int code32; /* 32 bit code segment */
122#ifdef TARGET_X86_64
123 int lma; /* long mode active */
124 int code64; /* 64 bit code segment */
125 int rex_x, rex_b;
126#endif
127 int ss32; /* 32 bit stack segment */
128 int cc_op; /* current CC operation */
129 int addseg; /* non zero if either DS/ES/SS have a non zero base */
130 int f_st; /* currently unused */
131 int vm86; /* vm86 mode */
132#ifdef VBOX
133 int vme; /* CR4.VME */
134 int record_call; /* record calls for CSAM or not? */
135#endif
136 int cpl;
137 int iopl;
138 int tf; /* TF cpu flag */
139 int singlestep_enabled; /* "hardware" single step enabled */
140 int jmp_opt; /* use direct block chaining for direct jumps */
141 int mem_index; /* select memory access functions */
142 int flags; /* all execution flags */
143 struct TranslationBlock *tb;
144 int popl_esp_hack; /* for correct popl with esp base handling */
145 int rip_offset; /* only used in x86_64, but left for simplicity */
146 int cpuid_features;
147 int cpuid_ext_features;
148 int cpuid_ext2_features;
149 int cpuid_ext3_features;
150} DisasContext;
151
152static void gen_eob(DisasContext *s);
153static void gen_jmp(DisasContext *s, target_ulong eip);
154static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
155
156/* i386 arith/logic operations */
157enum {
158 OP_ADDL,
159 OP_ORL,
160 OP_ADCL,
161 OP_SBBL,
162 OP_ANDL,
163 OP_SUBL,
164 OP_XORL,
165 OP_CMPL,
166};
167
168/* i386 shift ops */
169enum {
170 OP_ROL,
171 OP_ROR,
172 OP_RCL,
173 OP_RCR,
174 OP_SHL,
175 OP_SHR,
176 OP_SHL1, /* undocumented */
177 OP_SAR = 7,
178};
179
180enum {
181#define DEF(s, n, copy_size) INDEX_op_ ## s,
182#include "opc.h"
183#undef DEF
184 NB_OPS,
185};
186
187#include "gen-op.h"
188
189/* operand size */
190enum {
191 OT_BYTE = 0,
192 OT_WORD,
193 OT_LONG,
194 OT_QUAD,
195};
196
197enum {
198 /* I386 int registers */
199 OR_EAX, /* MUST be even numbered */
200 OR_ECX,
201 OR_EDX,
202 OR_EBX,
203 OR_ESP,
204 OR_EBP,
205 OR_ESI,
206 OR_EDI,
207
208 OR_TMP0 = 16, /* temporary operand register */
209 OR_TMP1,
210 OR_A0, /* temporary register used when doing address evaluation */
211};
212
213#ifdef TARGET_X86_64
214
215#define NB_OP_SIZES 4
216
217#define DEF_REGS(prefix, suffix) \
218 prefix ## EAX ## suffix,\
219 prefix ## ECX ## suffix,\
220 prefix ## EDX ## suffix,\
221 prefix ## EBX ## suffix,\
222 prefix ## ESP ## suffix,\
223 prefix ## EBP ## suffix,\
224 prefix ## ESI ## suffix,\
225 prefix ## EDI ## suffix,\
226 prefix ## R8 ## suffix,\
227 prefix ## R9 ## suffix,\
228 prefix ## R10 ## suffix,\
229 prefix ## R11 ## suffix,\
230 prefix ## R12 ## suffix,\
231 prefix ## R13 ## suffix,\
232 prefix ## R14 ## suffix,\
233 prefix ## R15 ## suffix,
234
235#define DEF_BREGS(prefixb, prefixh, suffix) \
236 \
237static void prefixb ## ESP ## suffix ## _wrapper(void) \
238{ \
239 if (x86_64_hregs) \
240 prefixb ## ESP ## suffix (); \
241 else \
242 prefixh ## EAX ## suffix (); \
243} \
244 \
245static void prefixb ## EBP ## suffix ## _wrapper(void) \
246{ \
247 if (x86_64_hregs) \
248 prefixb ## EBP ## suffix (); \
249 else \
250 prefixh ## ECX ## suffix (); \
251} \
252 \
253static void prefixb ## ESI ## suffix ## _wrapper(void) \
254{ \
255 if (x86_64_hregs) \
256 prefixb ## ESI ## suffix (); \
257 else \
258 prefixh ## EDX ## suffix (); \
259} \
260 \
261static void prefixb ## EDI ## suffix ## _wrapper(void) \
262{ \
263 if (x86_64_hregs) \
264 prefixb ## EDI ## suffix (); \
265 else \
266 prefixh ## EBX ## suffix (); \
267}
268
269DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
270DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
271DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
272DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
273
274#else /* !TARGET_X86_64 */
275
276#define NB_OP_SIZES 3
277
278#define DEF_REGS(prefix, suffix) \
279 prefix ## EAX ## suffix,\
280 prefix ## ECX ## suffix,\
281 prefix ## EDX ## suffix,\
282 prefix ## EBX ## suffix,\
283 prefix ## ESP ## suffix,\
284 prefix ## EBP ## suffix,\
285 prefix ## ESI ## suffix,\
286 prefix ## EDI ## suffix,
287
288#endif /* !TARGET_X86_64 */
289
290static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
291 [OT_BYTE] = {
292 gen_op_movb_EAX_T0,
293 gen_op_movb_ECX_T0,
294 gen_op_movb_EDX_T0,
295 gen_op_movb_EBX_T0,
296#ifdef TARGET_X86_64
297 gen_op_movb_ESP_T0_wrapper,
298 gen_op_movb_EBP_T0_wrapper,
299 gen_op_movb_ESI_T0_wrapper,
300 gen_op_movb_EDI_T0_wrapper,
301 gen_op_movb_R8_T0,
302 gen_op_movb_R9_T0,
303 gen_op_movb_R10_T0,
304 gen_op_movb_R11_T0,
305 gen_op_movb_R12_T0,
306 gen_op_movb_R13_T0,
307 gen_op_movb_R14_T0,
308 gen_op_movb_R15_T0,
309#else
310 gen_op_movh_EAX_T0,
311 gen_op_movh_ECX_T0,
312 gen_op_movh_EDX_T0,
313 gen_op_movh_EBX_T0,
314#endif
315 },
316 [OT_WORD] = {
317 DEF_REGS(gen_op_movw_, _T0)
318 },
319 [OT_LONG] = {
320 DEF_REGS(gen_op_movl_, _T0)
321 },
322#ifdef TARGET_X86_64
323 [OT_QUAD] = {
324 DEF_REGS(gen_op_movq_, _T0)
325 },
326#endif
327};
328
329static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
330 [OT_BYTE] = {
331 gen_op_movb_EAX_T1,
332 gen_op_movb_ECX_T1,
333 gen_op_movb_EDX_T1,
334 gen_op_movb_EBX_T1,
335#ifdef TARGET_X86_64
336 gen_op_movb_ESP_T1_wrapper,
337 gen_op_movb_EBP_T1_wrapper,
338 gen_op_movb_ESI_T1_wrapper,
339 gen_op_movb_EDI_T1_wrapper,
340 gen_op_movb_R8_T1,
341 gen_op_movb_R9_T1,
342 gen_op_movb_R10_T1,
343 gen_op_movb_R11_T1,
344 gen_op_movb_R12_T1,
345 gen_op_movb_R13_T1,
346 gen_op_movb_R14_T1,
347 gen_op_movb_R15_T1,
348#else
349 gen_op_movh_EAX_T1,
350 gen_op_movh_ECX_T1,
351 gen_op_movh_EDX_T1,
352 gen_op_movh_EBX_T1,
353#endif
354 },
355 [OT_WORD] = {
356 DEF_REGS(gen_op_movw_, _T1)
357 },
358 [OT_LONG] = {
359 DEF_REGS(gen_op_movl_, _T1)
360 },
361#ifdef TARGET_X86_64
362 [OT_QUAD] = {
363 DEF_REGS(gen_op_movq_, _T1)
364 },
365#endif
366};
367
368static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
369 [0] = {
370 DEF_REGS(gen_op_movw_, _A0)
371 },
372 [1] = {
373 DEF_REGS(gen_op_movl_, _A0)
374 },
375#ifdef TARGET_X86_64
376 [2] = {
377 DEF_REGS(gen_op_movq_, _A0)
378 },
379#endif
380};
381
382static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
383{
384 [OT_BYTE] = {
385 {
386 gen_op_movl_T0_EAX,
387 gen_op_movl_T0_ECX,
388 gen_op_movl_T0_EDX,
389 gen_op_movl_T0_EBX,
390#ifdef TARGET_X86_64
391 gen_op_movl_T0_ESP_wrapper,
392 gen_op_movl_T0_EBP_wrapper,
393 gen_op_movl_T0_ESI_wrapper,
394 gen_op_movl_T0_EDI_wrapper,
395 gen_op_movl_T0_R8,
396 gen_op_movl_T0_R9,
397 gen_op_movl_T0_R10,
398 gen_op_movl_T0_R11,
399 gen_op_movl_T0_R12,
400 gen_op_movl_T0_R13,
401 gen_op_movl_T0_R14,
402 gen_op_movl_T0_R15,
403#else
404 gen_op_movh_T0_EAX,
405 gen_op_movh_T0_ECX,
406 gen_op_movh_T0_EDX,
407 gen_op_movh_T0_EBX,
408#endif
409 },
410 {
411 gen_op_movl_T1_EAX,
412 gen_op_movl_T1_ECX,
413 gen_op_movl_T1_EDX,
414 gen_op_movl_T1_EBX,
415#ifdef TARGET_X86_64
416 gen_op_movl_T1_ESP_wrapper,
417 gen_op_movl_T1_EBP_wrapper,
418 gen_op_movl_T1_ESI_wrapper,
419 gen_op_movl_T1_EDI_wrapper,
420 gen_op_movl_T1_R8,
421 gen_op_movl_T1_R9,
422 gen_op_movl_T1_R10,
423 gen_op_movl_T1_R11,
424 gen_op_movl_T1_R12,
425 gen_op_movl_T1_R13,
426 gen_op_movl_T1_R14,
427 gen_op_movl_T1_R15,
428#else
429 gen_op_movh_T1_EAX,
430 gen_op_movh_T1_ECX,
431 gen_op_movh_T1_EDX,
432 gen_op_movh_T1_EBX,
433#endif
434 },
435 },
436 [OT_WORD] = {
437 {
438 DEF_REGS(gen_op_movl_T0_, )
439 },
440 {
441 DEF_REGS(gen_op_movl_T1_, )
442 },
443 },
444 [OT_LONG] = {
445 {
446 DEF_REGS(gen_op_movl_T0_, )
447 },
448 {
449 DEF_REGS(gen_op_movl_T1_, )
450 },
451 },
452#ifdef TARGET_X86_64
453 [OT_QUAD] = {
454 {
455 DEF_REGS(gen_op_movl_T0_, )
456 },
457 {
458 DEF_REGS(gen_op_movl_T1_, )
459 },
460 },
461#endif
462};
463
464static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
465 DEF_REGS(gen_op_movl_A0_, )
466};
467
468static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
469 [0] = {
470 DEF_REGS(gen_op_addl_A0_, )
471 },
472 [1] = {
473 DEF_REGS(gen_op_addl_A0_, _s1)
474 },
475 [2] = {
476 DEF_REGS(gen_op_addl_A0_, _s2)
477 },
478 [3] = {
479 DEF_REGS(gen_op_addl_A0_, _s3)
480 },
481};
482
483#ifdef TARGET_X86_64
484static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
485 DEF_REGS(gen_op_movq_A0_, )
486};
487
488static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
489 [0] = {
490 DEF_REGS(gen_op_addq_A0_, )
491 },
492 [1] = {
493 DEF_REGS(gen_op_addq_A0_, _s1)
494 },
495 [2] = {
496 DEF_REGS(gen_op_addq_A0_, _s2)
497 },
498 [3] = {
499 DEF_REGS(gen_op_addq_A0_, _s3)
500 },
501};
502#endif
503
504static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
505 [0] = {
506 DEF_REGS(gen_op_cmovw_, _T1_T0)
507 },
508 [1] = {
509 DEF_REGS(gen_op_cmovl_, _T1_T0)
510 },
511#ifdef TARGET_X86_64
512 [2] = {
513 DEF_REGS(gen_op_cmovq_, _T1_T0)
514 },
515#endif
516};
517
518static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
519 NULL,
520 gen_op_orl_T0_T1,
521 NULL,
522 NULL,
523 gen_op_andl_T0_T1,
524 NULL,
525 gen_op_xorl_T0_T1,
526 NULL,
527};
528
529#define DEF_ARITHC(SUFFIX)\
530 {\
531 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
532 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
533 },\
534 {\
535 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
536 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
537 },\
538 {\
539 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
540 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
541 },\
542 {\
543 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
544 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
545 },
546
547static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
548 DEF_ARITHC( )
549};
550
551static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
552 DEF_ARITHC(_raw)
553#ifndef CONFIG_USER_ONLY
554 DEF_ARITHC(_kernel)
555 DEF_ARITHC(_user)
556#endif
557};
558
559static const int cc_op_arithb[8] = {
560 CC_OP_ADDB,
561 CC_OP_LOGICB,
562 CC_OP_ADDB,
563 CC_OP_SUBB,
564 CC_OP_LOGICB,
565 CC_OP_SUBB,
566 CC_OP_LOGICB,
567 CC_OP_SUBB,
568};
569
570#define DEF_CMPXCHG(SUFFIX)\
571 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
572 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
573 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
574 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
575
576static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
577 DEF_CMPXCHG( )
578};
579
580static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
581 DEF_CMPXCHG(_raw)
582#ifndef CONFIG_USER_ONLY
583 DEF_CMPXCHG(_kernel)
584 DEF_CMPXCHG(_user)
585#endif
586};
587
588#define DEF_SHIFT(SUFFIX)\
589 {\
590 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
591 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
592 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
593 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
594 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
595 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
596 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
597 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
598 },\
599 {\
600 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
601 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
602 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
603 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
604 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
605 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
606 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
607 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
608 },\
609 {\
610 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
611 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
612 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
613 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
614 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
615 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
616 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
617 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
618 },\
619 {\
620 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
621 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
622 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
623 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
624 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
625 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
626 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
627 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
628 },
629
630static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
631 DEF_SHIFT( )
632};
633
634static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
635 DEF_SHIFT(_raw)
636#ifndef CONFIG_USER_ONLY
637 DEF_SHIFT(_kernel)
638 DEF_SHIFT(_user)
639#endif
640};
641
642#define DEF_SHIFTD(SUFFIX, op)\
643 {\
644 NULL,\
645 NULL,\
646 },\
647 {\
648 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
649 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
650 },\
651 {\
652 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
653 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
654 },\
655 {\
656X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
657 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
658 },
659
660static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
661 DEF_SHIFTD(, im)
662};
663
664static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
665 DEF_SHIFTD(, ECX)
666};
667
668static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
669 DEF_SHIFTD(_raw, im)
670#ifndef CONFIG_USER_ONLY
671 DEF_SHIFTD(_kernel, im)
672 DEF_SHIFTD(_user, im)
673#endif
674};
675
676static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
677 DEF_SHIFTD(_raw, ECX)
678#ifndef CONFIG_USER_ONLY
679 DEF_SHIFTD(_kernel, ECX)
680 DEF_SHIFTD(_user, ECX)
681#endif
682};
683
684static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
685 [0] = {
686 gen_op_btw_T0_T1_cc,
687 gen_op_btsw_T0_T1_cc,
688 gen_op_btrw_T0_T1_cc,
689 gen_op_btcw_T0_T1_cc,
690 },
691 [1] = {
692 gen_op_btl_T0_T1_cc,
693 gen_op_btsl_T0_T1_cc,
694 gen_op_btrl_T0_T1_cc,
695 gen_op_btcl_T0_T1_cc,
696 },
697#ifdef TARGET_X86_64
698 [2] = {
699 gen_op_btq_T0_T1_cc,
700 gen_op_btsq_T0_T1_cc,
701 gen_op_btrq_T0_T1_cc,
702 gen_op_btcq_T0_T1_cc,
703 },
704#endif
705};
706
707static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
708 gen_op_add_bitw_A0_T1,
709 gen_op_add_bitl_A0_T1,
710 X86_64_ONLY(gen_op_add_bitq_A0_T1),
711};
712
713static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
714 [0] = {
715 gen_op_bsfw_T0_cc,
716 gen_op_bsrw_T0_cc,
717 },
718 [1] = {
719 gen_op_bsfl_T0_cc,
720 gen_op_bsrl_T0_cc,
721 },
722#ifdef TARGET_X86_64
723 [2] = {
724 gen_op_bsfq_T0_cc,
725 gen_op_bsrq_T0_cc,
726 },
727#endif
728};
729
730static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
731 gen_op_ldsb_raw_T0_A0,
732 gen_op_ldsw_raw_T0_A0,
733 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
734 NULL,
735#ifndef CONFIG_USER_ONLY
736 gen_op_ldsb_kernel_T0_A0,
737 gen_op_ldsw_kernel_T0_A0,
738 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
739 NULL,
740
741 gen_op_ldsb_user_T0_A0,
742 gen_op_ldsw_user_T0_A0,
743 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
744 NULL,
745#endif
746};
747
748static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
749 gen_op_ldub_raw_T0_A0,
750 gen_op_lduw_raw_T0_A0,
751 NULL,
752 NULL,
753
754#ifndef CONFIG_USER_ONLY
755 gen_op_ldub_kernel_T0_A0,
756 gen_op_lduw_kernel_T0_A0,
757 NULL,
758 NULL,
759
760 gen_op_ldub_user_T0_A0,
761 gen_op_lduw_user_T0_A0,
762 NULL,
763 NULL,
764#endif
765};
766
767/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
768static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
769 gen_op_ldub_raw_T0_A0,
770 gen_op_lduw_raw_T0_A0,
771 gen_op_ldl_raw_T0_A0,
772 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
773
774#ifndef CONFIG_USER_ONLY
775 gen_op_ldub_kernel_T0_A0,
776 gen_op_lduw_kernel_T0_A0,
777 gen_op_ldl_kernel_T0_A0,
778 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
779
780 gen_op_ldub_user_T0_A0,
781 gen_op_lduw_user_T0_A0,
782 gen_op_ldl_user_T0_A0,
783 X86_64_ONLY(gen_op_ldq_user_T0_A0),
784#endif
785};
786
787static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
788 gen_op_ldub_raw_T1_A0,
789 gen_op_lduw_raw_T1_A0,
790 gen_op_ldl_raw_T1_A0,
791 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
792
793#ifndef CONFIG_USER_ONLY
794 gen_op_ldub_kernel_T1_A0,
795 gen_op_lduw_kernel_T1_A0,
796 gen_op_ldl_kernel_T1_A0,
797 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
798
799 gen_op_ldub_user_T1_A0,
800 gen_op_lduw_user_T1_A0,
801 gen_op_ldl_user_T1_A0,
802 X86_64_ONLY(gen_op_ldq_user_T1_A0),
803#endif
804};
805
806static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
807 gen_op_stb_raw_T0_A0,
808 gen_op_stw_raw_T0_A0,
809 gen_op_stl_raw_T0_A0,
810 X86_64_ONLY(gen_op_stq_raw_T0_A0),
811
812#ifndef CONFIG_USER_ONLY
813 gen_op_stb_kernel_T0_A0,
814 gen_op_stw_kernel_T0_A0,
815 gen_op_stl_kernel_T0_A0,
816 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
817
818 gen_op_stb_user_T0_A0,
819 gen_op_stw_user_T0_A0,
820 gen_op_stl_user_T0_A0,
821 X86_64_ONLY(gen_op_stq_user_T0_A0),
822#endif
823};
824
825static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
826 NULL,
827 gen_op_stw_raw_T1_A0,
828 gen_op_stl_raw_T1_A0,
829 X86_64_ONLY(gen_op_stq_raw_T1_A0),
830
831#ifndef CONFIG_USER_ONLY
832 NULL,
833 gen_op_stw_kernel_T1_A0,
834 gen_op_stl_kernel_T1_A0,
835 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
836
837 NULL,
838 gen_op_stw_user_T1_A0,
839 gen_op_stl_user_T1_A0,
840 X86_64_ONLY(gen_op_stq_user_T1_A0),
841#endif
842};
843
844#ifdef VBOX
845static void gen_check_external_event()
846{
847 gen_op_check_external_event();
848}
849
850static inline void gen_update_eip(target_ulong pc)
851{
852#ifdef TARGET_X86_64
853 if (pc == (uint32_t)pc) {
854 gen_op_movl_eip_im(pc);
855 } else if (pc == (int32_t)pc) {
856 gen_op_movq_eip_im(pc);
857 } else {
858 gen_op_movq_eip_im64(pc >> 32, pc);
859 }
860#else
861 gen_op_movl_eip_im(pc);
862#endif
863}
864
865#endif /* VBOX */
866
867static inline void gen_jmp_im(target_ulong pc)
868{
869#ifdef VBOX
870 gen_check_external_event();
871#endif /* VBOX */
872#ifdef TARGET_X86_64
873 if (pc == (uint32_t)pc) {
874 gen_op_movl_eip_im(pc);
875 } else if (pc == (int32_t)pc) {
876 gen_op_movq_eip_im(pc);
877 } else {
878 gen_op_movq_eip_im64(pc >> 32, pc);
879 }
880#else
881 gen_op_movl_eip_im(pc);
882#endif
883}
884
885static inline void gen_string_movl_A0_ESI(DisasContext *s)
886{
887 int override;
888
889 override = s->override;
890#ifdef TARGET_X86_64
891 if (s->aflag == 2) {
892 if (override >= 0) {
893 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
894 gen_op_addq_A0_reg_sN[0][R_ESI]();
895 } else {
896 gen_op_movq_A0_reg[R_ESI]();
897 }
898 } else
899#endif
900 if (s->aflag) {
901 /* 32 bit address */
902 if (s->addseg && override < 0)
903 override = R_DS;
904 if (override >= 0) {
905 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
906 gen_op_addl_A0_reg_sN[0][R_ESI]();
907 } else {
908 gen_op_movl_A0_reg[R_ESI]();
909 }
910 } else {
911 /* 16 address, always override */
912 if (override < 0)
913 override = R_DS;
914 gen_op_movl_A0_reg[R_ESI]();
915 gen_op_andl_A0_ffff();
916 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
917 }
918}
919
920static inline void gen_string_movl_A0_EDI(DisasContext *s)
921{
922#ifdef TARGET_X86_64
923 if (s->aflag == 2) {
924 gen_op_movq_A0_reg[R_EDI]();
925 } else
926#endif
927 if (s->aflag) {
928 if (s->addseg) {
929 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
930 gen_op_addl_A0_reg_sN[0][R_EDI]();
931 } else {
932 gen_op_movl_A0_reg[R_EDI]();
933 }
934 } else {
935 gen_op_movl_A0_reg[R_EDI]();
936 gen_op_andl_A0_ffff();
937 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
938 }
939}
940
941static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
942 gen_op_movl_T0_Dshiftb,
943 gen_op_movl_T0_Dshiftw,
944 gen_op_movl_T0_Dshiftl,
945 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
946};
947
948static GenOpFunc1 *gen_op_jnz_ecx[3] = {
949 gen_op_jnz_ecxw,
950 gen_op_jnz_ecxl,
951 X86_64_ONLY(gen_op_jnz_ecxq),
952};
953
954static GenOpFunc1 *gen_op_jz_ecx[3] = {
955 gen_op_jz_ecxw,
956 gen_op_jz_ecxl,
957 X86_64_ONLY(gen_op_jz_ecxq),
958};
959
960static GenOpFunc *gen_op_dec_ECX[3] = {
961 gen_op_decw_ECX,
962 gen_op_decl_ECX,
963 X86_64_ONLY(gen_op_decq_ECX),
964};
965
966static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
967 {
968 gen_op_jnz_subb,
969 gen_op_jnz_subw,
970 gen_op_jnz_subl,
971 X86_64_ONLY(gen_op_jnz_subq),
972 },
973 {
974 gen_op_jz_subb,
975 gen_op_jz_subw,
976 gen_op_jz_subl,
977 X86_64_ONLY(gen_op_jz_subq),
978 },
979};
980
981static GenOpFunc *gen_op_in_DX_T0[3] = {
982 gen_op_inb_DX_T0,
983 gen_op_inw_DX_T0,
984 gen_op_inl_DX_T0,
985};
986
987static GenOpFunc *gen_op_out_DX_T0[3] = {
988 gen_op_outb_DX_T0,
989 gen_op_outw_DX_T0,
990 gen_op_outl_DX_T0,
991};
992
993static GenOpFunc *gen_op_in[3] = {
994 gen_op_inb_T0_T1,
995 gen_op_inw_T0_T1,
996 gen_op_inl_T0_T1,
997};
998
999static GenOpFunc *gen_op_out[3] = {
1000 gen_op_outb_T0_T1,
1001 gen_op_outw_T0_T1,
1002 gen_op_outl_T0_T1,
1003};
1004
1005static GenOpFunc *gen_check_io_T0[3] = {
1006 gen_op_check_iob_T0,
1007 gen_op_check_iow_T0,
1008 gen_op_check_iol_T0,
1009};
1010
1011static GenOpFunc *gen_check_io_DX[3] = {
1012 gen_op_check_iob_DX,
1013 gen_op_check_iow_DX,
1014 gen_op_check_iol_DX,
1015};
1016
1017static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1018{
1019 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1020 if (s->cc_op != CC_OP_DYNAMIC)
1021 gen_op_set_cc_op(s->cc_op);
1022 gen_jmp_im(cur_eip);
1023 if (use_dx)
1024 gen_check_io_DX[ot]();
1025 else
1026 gen_check_io_T0[ot]();
1027 }
1028}
1029
1030static inline void gen_movs(DisasContext *s, int ot)
1031{
1032 gen_string_movl_A0_ESI(s);
1033 gen_op_ld_T0_A0[ot + s->mem_index]();
1034 gen_string_movl_A0_EDI(s);
1035 gen_op_st_T0_A0[ot + s->mem_index]();
1036 gen_op_movl_T0_Dshift[ot]();
1037#ifdef TARGET_X86_64
1038 if (s->aflag == 2) {
1039 gen_op_addq_ESI_T0();
1040 gen_op_addq_EDI_T0();
1041 } else
1042#endif
1043 if (s->aflag) {
1044 gen_op_addl_ESI_T0();
1045 gen_op_addl_EDI_T0();
1046 } else {
1047 gen_op_addw_ESI_T0();
1048 gen_op_addw_EDI_T0();
1049 }
1050}
1051
1052static inline void gen_update_cc_op(DisasContext *s)
1053{
1054 if (s->cc_op != CC_OP_DYNAMIC) {
1055 gen_op_set_cc_op(s->cc_op);
1056 s->cc_op = CC_OP_DYNAMIC;
1057 }
1058}
1059
1060/* XXX: does not work with gdbstub "ice" single step - not a
1061 serious problem */
1062static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1063{
1064 int l1, l2;
1065
1066 l1 = gen_new_label();
1067 l2 = gen_new_label();
1068 gen_op_jnz_ecx[s->aflag](l1);
1069 gen_set_label(l2);
1070 gen_jmp_tb(s, next_eip, 1);
1071 gen_set_label(l1);
1072 return l2;
1073}
1074
1075static inline void gen_stos(DisasContext *s, int ot)
1076{
1077 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1078 gen_string_movl_A0_EDI(s);
1079 gen_op_st_T0_A0[ot + s->mem_index]();
1080 gen_op_movl_T0_Dshift[ot]();
1081#ifdef TARGET_X86_64
1082 if (s->aflag == 2) {
1083 gen_op_addq_EDI_T0();
1084 } else
1085#endif
1086 if (s->aflag) {
1087 gen_op_addl_EDI_T0();
1088 } else {
1089 gen_op_addw_EDI_T0();
1090 }
1091}
1092
1093static inline void gen_lods(DisasContext *s, int ot)
1094{
1095 gen_string_movl_A0_ESI(s);
1096 gen_op_ld_T0_A0[ot + s->mem_index]();
1097 gen_op_mov_reg_T0[ot][R_EAX]();
1098 gen_op_movl_T0_Dshift[ot]();
1099#ifdef TARGET_X86_64
1100 if (s->aflag == 2) {
1101 gen_op_addq_ESI_T0();
1102 } else
1103#endif
1104 if (s->aflag) {
1105 gen_op_addl_ESI_T0();
1106 } else {
1107 gen_op_addw_ESI_T0();
1108 }
1109}
1110
1111static inline void gen_scas(DisasContext *s, int ot)
1112{
1113 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1114 gen_string_movl_A0_EDI(s);
1115 gen_op_ld_T1_A0[ot + s->mem_index]();
1116 gen_op_cmpl_T0_T1_cc();
1117 gen_op_movl_T0_Dshift[ot]();
1118#ifdef TARGET_X86_64
1119 if (s->aflag == 2) {
1120 gen_op_addq_EDI_T0();
1121 } else
1122#endif
1123 if (s->aflag) {
1124 gen_op_addl_EDI_T0();
1125 } else {
1126 gen_op_addw_EDI_T0();
1127 }
1128}
1129
1130static inline void gen_cmps(DisasContext *s, int ot)
1131{
1132 gen_string_movl_A0_ESI(s);
1133 gen_op_ld_T0_A0[ot + s->mem_index]();
1134 gen_string_movl_A0_EDI(s);
1135 gen_op_ld_T1_A0[ot + s->mem_index]();
1136 gen_op_cmpl_T0_T1_cc();
1137 gen_op_movl_T0_Dshift[ot]();
1138#ifdef TARGET_X86_64
1139 if (s->aflag == 2) {
1140 gen_op_addq_ESI_T0();
1141 gen_op_addq_EDI_T0();
1142 } else
1143#endif
1144 if (s->aflag) {
1145 gen_op_addl_ESI_T0();
1146 gen_op_addl_EDI_T0();
1147 } else {
1148 gen_op_addw_ESI_T0();
1149 gen_op_addw_EDI_T0();
1150 }
1151}
1152
1153static inline void gen_ins(DisasContext *s, int ot)
1154{
1155 gen_string_movl_A0_EDI(s);
1156 gen_op_movl_T0_0();
1157 gen_op_st_T0_A0[ot + s->mem_index]();
1158 gen_op_in_DX_T0[ot]();
1159 gen_op_st_T0_A0[ot + s->mem_index]();
1160 gen_op_movl_T0_Dshift[ot]();
1161#ifdef TARGET_X86_64
1162 if (s->aflag == 2) {
1163 gen_op_addq_EDI_T0();
1164 } else
1165#endif
1166 if (s->aflag) {
1167 gen_op_addl_EDI_T0();
1168 } else {
1169 gen_op_addw_EDI_T0();
1170 }
1171}
1172
1173static inline void gen_outs(DisasContext *s, int ot)
1174{
1175 gen_string_movl_A0_ESI(s);
1176 gen_op_ld_T0_A0[ot + s->mem_index]();
1177 gen_op_out_DX_T0[ot]();
1178 gen_op_movl_T0_Dshift[ot]();
1179#ifdef TARGET_X86_64
1180 if (s->aflag == 2) {
1181 gen_op_addq_ESI_T0();
1182 } else
1183#endif
1184 if (s->aflag) {
1185 gen_op_addl_ESI_T0();
1186 } else {
1187 gen_op_addw_ESI_T0();
1188 }
1189}
1190
1191/* same method as Valgrind : we generate jumps to current or next
1192 instruction */
1193#define GEN_REPZ(op) \
1194static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1195 target_ulong cur_eip, target_ulong next_eip) \
1196{ \
1197 int l2;\
1198 gen_update_cc_op(s); \
1199 l2 = gen_jz_ecx_string(s, next_eip); \
1200 gen_ ## op(s, ot); \
1201 gen_op_dec_ECX[s->aflag](); \
1202 /* a loop would cause two single step exceptions if ECX = 1 \
1203 before rep string_insn */ \
1204 if (!s->jmp_opt) \
1205 gen_op_jz_ecx[s->aflag](l2); \
1206 gen_jmp(s, cur_eip); \
1207}
1208
1209#define GEN_REPZ2(op) \
1210static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1211 target_ulong cur_eip, \
1212 target_ulong next_eip, \
1213 int nz) \
1214{ \
1215 int l2;\
1216 gen_update_cc_op(s); \
1217 l2 = gen_jz_ecx_string(s, next_eip); \
1218 gen_ ## op(s, ot); \
1219 gen_op_dec_ECX[s->aflag](); \
1220 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1221 gen_op_string_jnz_sub[nz][ot](l2);\
1222 if (!s->jmp_opt) \
1223 gen_op_jz_ecx[s->aflag](l2); \
1224 gen_jmp(s, cur_eip); \
1225}
1226
1227GEN_REPZ(movs)
1228GEN_REPZ(stos)
1229GEN_REPZ(lods)
1230GEN_REPZ(ins)
1231GEN_REPZ(outs)
1232GEN_REPZ2(scas)
1233GEN_REPZ2(cmps)
1234
1235enum {
1236 JCC_O,
1237 JCC_B,
1238 JCC_Z,
1239 JCC_BE,
1240 JCC_S,
1241 JCC_P,
1242 JCC_L,
1243 JCC_LE,
1244};
1245
1246static GenOpFunc1 *gen_jcc_sub[4][8] = {
1247 [OT_BYTE] = {
1248 NULL,
1249 gen_op_jb_subb,
1250 gen_op_jz_subb,
1251 gen_op_jbe_subb,
1252 gen_op_js_subb,
1253 NULL,
1254 gen_op_jl_subb,
1255 gen_op_jle_subb,
1256 },
1257 [OT_WORD] = {
1258 NULL,
1259 gen_op_jb_subw,
1260 gen_op_jz_subw,
1261 gen_op_jbe_subw,
1262 gen_op_js_subw,
1263 NULL,
1264 gen_op_jl_subw,
1265 gen_op_jle_subw,
1266 },
1267 [OT_LONG] = {
1268 NULL,
1269 gen_op_jb_subl,
1270 gen_op_jz_subl,
1271 gen_op_jbe_subl,
1272 gen_op_js_subl,
1273 NULL,
1274 gen_op_jl_subl,
1275 gen_op_jle_subl,
1276 },
1277#ifdef TARGET_X86_64
1278 [OT_QUAD] = {
1279 NULL,
1280 BUGGY_64(gen_op_jb_subq),
1281 gen_op_jz_subq,
1282 BUGGY_64(gen_op_jbe_subq),
1283 gen_op_js_subq,
1284 NULL,
1285 BUGGY_64(gen_op_jl_subq),
1286 BUGGY_64(gen_op_jle_subq),
1287 },
1288#endif
1289};
1290static GenOpFunc1 *gen_op_loop[3][4] = {
1291 [0] = {
1292 gen_op_loopnzw,
1293 gen_op_loopzw,
1294 gen_op_jnz_ecxw,
1295 },
1296 [1] = {
1297 gen_op_loopnzl,
1298 gen_op_loopzl,
1299 gen_op_jnz_ecxl,
1300 },
1301#ifdef TARGET_X86_64
1302 [2] = {
1303 gen_op_loopnzq,
1304 gen_op_loopzq,
1305 gen_op_jnz_ecxq,
1306 },
1307#endif
1308};
1309
1310static GenOpFunc *gen_setcc_slow[8] = {
1311 gen_op_seto_T0_cc,
1312 gen_op_setb_T0_cc,
1313 gen_op_setz_T0_cc,
1314 gen_op_setbe_T0_cc,
1315 gen_op_sets_T0_cc,
1316 gen_op_setp_T0_cc,
1317 gen_op_setl_T0_cc,
1318 gen_op_setle_T0_cc,
1319};
1320
1321static GenOpFunc *gen_setcc_sub[4][8] = {
1322 [OT_BYTE] = {
1323 NULL,
1324 gen_op_setb_T0_subb,
1325 gen_op_setz_T0_subb,
1326 gen_op_setbe_T0_subb,
1327 gen_op_sets_T0_subb,
1328 NULL,
1329 gen_op_setl_T0_subb,
1330 gen_op_setle_T0_subb,
1331 },
1332 [OT_WORD] = {
1333 NULL,
1334 gen_op_setb_T0_subw,
1335 gen_op_setz_T0_subw,
1336 gen_op_setbe_T0_subw,
1337 gen_op_sets_T0_subw,
1338 NULL,
1339 gen_op_setl_T0_subw,
1340 gen_op_setle_T0_subw,
1341 },
1342 [OT_LONG] = {
1343 NULL,
1344 gen_op_setb_T0_subl,
1345 gen_op_setz_T0_subl,
1346 gen_op_setbe_T0_subl,
1347 gen_op_sets_T0_subl,
1348 NULL,
1349 gen_op_setl_T0_subl,
1350 gen_op_setle_T0_subl,
1351 },
1352#ifdef TARGET_X86_64
1353 [OT_QUAD] = {
1354 NULL,
1355 gen_op_setb_T0_subq,
1356 gen_op_setz_T0_subq,
1357 gen_op_setbe_T0_subq,
1358 gen_op_sets_T0_subq,
1359 NULL,
1360 gen_op_setl_T0_subq,
1361 gen_op_setle_T0_subq,
1362 },
1363#endif
1364};
1365
1366static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1367 gen_op_fadd_ST0_FT0,
1368 gen_op_fmul_ST0_FT0,
1369 gen_op_fcom_ST0_FT0,
1370 gen_op_fcom_ST0_FT0,
1371 gen_op_fsub_ST0_FT0,
1372 gen_op_fsubr_ST0_FT0,
1373 gen_op_fdiv_ST0_FT0,
1374 gen_op_fdivr_ST0_FT0,
1375};
1376
1377/* NOTE the exception in "r" op ordering */
1378static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1379 gen_op_fadd_STN_ST0,
1380 gen_op_fmul_STN_ST0,
1381 NULL,
1382 NULL,
1383 gen_op_fsubr_STN_ST0,
1384 gen_op_fsub_STN_ST0,
1385 gen_op_fdivr_STN_ST0,
1386 gen_op_fdiv_STN_ST0,
1387};
1388
1389/* if d == OR_TMP0, it means memory operand (address in A0) */
1390static void gen_op(DisasContext *s1, int op, int ot, int d)
1391{
1392 GenOpFunc *gen_update_cc;
1393
1394 if (d != OR_TMP0) {
1395 gen_op_mov_TN_reg[ot][0][d]();
1396 } else {
1397 gen_op_ld_T0_A0[ot + s1->mem_index]();
1398 }
1399 switch(op) {
1400 case OP_ADCL:
1401 case OP_SBBL:
1402 if (s1->cc_op != CC_OP_DYNAMIC)
1403 gen_op_set_cc_op(s1->cc_op);
1404 if (d != OR_TMP0) {
1405 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1406 gen_op_mov_reg_T0[ot][d]();
1407 } else {
1408 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1409 }
1410 s1->cc_op = CC_OP_DYNAMIC;
1411 goto the_end;
1412 case OP_ADDL:
1413 gen_op_addl_T0_T1();
1414 s1->cc_op = CC_OP_ADDB + ot;
1415 gen_update_cc = gen_op_update2_cc;
1416 break;
1417 case OP_SUBL:
1418 gen_op_subl_T0_T1();
1419 s1->cc_op = CC_OP_SUBB + ot;
1420 gen_update_cc = gen_op_update2_cc;
1421 break;
1422 default:
1423 case OP_ANDL:
1424 case OP_ORL:
1425 case OP_XORL:
1426 gen_op_arith_T0_T1_cc[op]();
1427 s1->cc_op = CC_OP_LOGICB + ot;
1428 gen_update_cc = gen_op_update1_cc;
1429 break;
1430 case OP_CMPL:
1431 gen_op_cmpl_T0_T1_cc();
1432 s1->cc_op = CC_OP_SUBB + ot;
1433 gen_update_cc = NULL;
1434 break;
1435 }
1436 if (op != OP_CMPL) {
1437 if (d != OR_TMP0)
1438 gen_op_mov_reg_T0[ot][d]();
1439 else
1440 gen_op_st_T0_A0[ot + s1->mem_index]();
1441 }
1442 /* the flags update must happen after the memory write (precise
1443 exception support) */
1444 if (gen_update_cc)
1445 gen_update_cc();
1446 the_end: ;
1447}
1448
1449/* if d == OR_TMP0, it means memory operand (address in A0) */
1450static void gen_inc(DisasContext *s1, int ot, int d, int c)
1451{
1452 if (d != OR_TMP0)
1453 gen_op_mov_TN_reg[ot][0][d]();
1454 else
1455 gen_op_ld_T0_A0[ot + s1->mem_index]();
1456 if (s1->cc_op != CC_OP_DYNAMIC)
1457 gen_op_set_cc_op(s1->cc_op);
1458 if (c > 0) {
1459 gen_op_incl_T0();
1460 s1->cc_op = CC_OP_INCB + ot;
1461 } else {
1462 gen_op_decl_T0();
1463 s1->cc_op = CC_OP_DECB + ot;
1464 }
1465 if (d != OR_TMP0)
1466 gen_op_mov_reg_T0[ot][d]();
1467 else
1468 gen_op_st_T0_A0[ot + s1->mem_index]();
1469 gen_op_update_inc_cc();
1470}
1471
1472static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1473{
1474 if (d != OR_TMP0)
1475 gen_op_mov_TN_reg[ot][0][d]();
1476 else
1477 gen_op_ld_T0_A0[ot + s1->mem_index]();
1478 if (s != OR_TMP1)
1479 gen_op_mov_TN_reg[ot][1][s]();
1480 /* for zero counts, flags are not updated, so must do it dynamically */
1481 if (s1->cc_op != CC_OP_DYNAMIC)
1482 gen_op_set_cc_op(s1->cc_op);
1483
1484 if (d != OR_TMP0)
1485 gen_op_shift_T0_T1_cc[ot][op]();
1486 else
1487 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1488 if (d != OR_TMP0)
1489 gen_op_mov_reg_T0[ot][d]();
1490 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1491}
1492
1493static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1494{
1495 /* currently not optimized */
1496 gen_op_movl_T1_im(c);
1497 gen_shift(s1, op, ot, d, OR_TMP1);
1498}
1499
1500static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1501{
1502 target_long disp;
1503 int havesib;
1504 int base;
1505 int index;
1506 int scale;
1507 int opreg;
1508 int mod, rm, code, override, must_add_seg;
1509
1510 override = s->override;
1511 must_add_seg = s->addseg;
1512 if (override >= 0)
1513 must_add_seg = 1;
1514 mod = (modrm >> 6) & 3;
1515 rm = modrm & 7;
1516
1517 if (s->aflag) {
1518
1519 havesib = 0;
1520 base = rm;
1521 index = 0;
1522 scale = 0;
1523
1524 if (base == 4) {
1525 havesib = 1;
1526 code = ldub_code(s->pc++);
1527 scale = (code >> 6) & 3;
1528 index = ((code >> 3) & 7) | REX_X(s);
1529 base = (code & 7);
1530 }
1531 base |= REX_B(s);
1532
1533 switch (mod) {
1534 case 0:
1535 if ((base & 7) == 5) {
1536 base = -1;
1537 disp = (int32_t)ldl_code(s->pc);
1538 s->pc += 4;
1539 if (CODE64(s) && !havesib) {
1540 disp += s->pc + s->rip_offset;
1541 }
1542 } else {
1543 disp = 0;
1544 }
1545 break;
1546 case 1:
1547 disp = (int8_t)ldub_code(s->pc++);
1548 break;
1549 default:
1550 case 2:
1551 disp = ldl_code(s->pc);
1552 s->pc += 4;
1553 break;
1554 }
1555
1556 if (base >= 0) {
1557 /* for correct popl handling with esp */
1558 if (base == 4 && s->popl_esp_hack)
1559 disp += s->popl_esp_hack;
1560#ifdef TARGET_X86_64
1561 if (s->aflag == 2) {
1562 gen_op_movq_A0_reg[base]();
1563 if (disp != 0) {
1564 if ((int32_t)disp == disp)
1565 gen_op_addq_A0_im(disp);
1566 else
1567 gen_op_addq_A0_im64(disp >> 32, disp);
1568 }
1569 } else
1570#endif
1571 {
1572 gen_op_movl_A0_reg[base]();
1573 if (disp != 0)
1574 gen_op_addl_A0_im(disp);
1575 }
1576 } else {
1577#ifdef TARGET_X86_64
1578 if (s->aflag == 2) {
1579 if ((int32_t)disp == disp)
1580 gen_op_movq_A0_im(disp);
1581 else
1582 gen_op_movq_A0_im64(disp >> 32, disp);
1583 } else
1584#endif
1585 {
1586 gen_op_movl_A0_im(disp);
1587 }
1588 }
1589 /* XXX: index == 4 is always invalid */
1590 if (havesib && (index != 4 || scale != 0)) {
1591#ifdef TARGET_X86_64
1592 if (s->aflag == 2) {
1593 gen_op_addq_A0_reg_sN[scale][index]();
1594 } else
1595#endif
1596 {
1597 gen_op_addl_A0_reg_sN[scale][index]();
1598 }
1599 }
1600 if (must_add_seg) {
1601 if (override < 0) {
1602 if (base == R_EBP || base == R_ESP)
1603 override = R_SS;
1604 else
1605 override = R_DS;
1606 }
1607#ifdef TARGET_X86_64
1608 if (s->aflag == 2) {
1609 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1610 } else
1611#endif
1612 {
1613 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1614 }
1615 }
1616 } else {
1617 switch (mod) {
1618 case 0:
1619 if (rm == 6) {
1620 disp = lduw_code(s->pc);
1621 s->pc += 2;
1622 gen_op_movl_A0_im(disp);
1623 rm = 0; /* avoid SS override */
1624 goto no_rm;
1625 } else {
1626 disp = 0;
1627 }
1628 break;
1629 case 1:
1630 disp = (int8_t)ldub_code(s->pc++);
1631 break;
1632 default:
1633 case 2:
1634 disp = lduw_code(s->pc);
1635 s->pc += 2;
1636 break;
1637 }
1638 switch(rm) {
1639 case 0:
1640 gen_op_movl_A0_reg[R_EBX]();
1641 gen_op_addl_A0_reg_sN[0][R_ESI]();
1642 break;
1643 case 1:
1644 gen_op_movl_A0_reg[R_EBX]();
1645 gen_op_addl_A0_reg_sN[0][R_EDI]();
1646 break;
1647 case 2:
1648 gen_op_movl_A0_reg[R_EBP]();
1649 gen_op_addl_A0_reg_sN[0][R_ESI]();
1650 break;
1651 case 3:
1652 gen_op_movl_A0_reg[R_EBP]();
1653 gen_op_addl_A0_reg_sN[0][R_EDI]();
1654 break;
1655 case 4:
1656 gen_op_movl_A0_reg[R_ESI]();
1657 break;
1658 case 5:
1659 gen_op_movl_A0_reg[R_EDI]();
1660 break;
1661 case 6:
1662 gen_op_movl_A0_reg[R_EBP]();
1663 break;
1664 default:
1665 case 7:
1666 gen_op_movl_A0_reg[R_EBX]();
1667 break;
1668 }
1669 if (disp != 0)
1670 gen_op_addl_A0_im(disp);
1671 gen_op_andl_A0_ffff();
1672 no_rm:
1673 if (must_add_seg) {
1674 if (override < 0) {
1675 if (rm == 2 || rm == 3 || rm == 6)
1676 override = R_SS;
1677 else
1678 override = R_DS;
1679 }
1680 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1681 }
1682 }
1683
1684 opreg = OR_A0;
1685 disp = 0;
1686 *reg_ptr = opreg;
1687 *offset_ptr = disp;
1688}
1689
1690static void gen_nop_modrm(DisasContext *s, int modrm)
1691{
1692 int mod, rm, base, code;
1693
1694 mod = (modrm >> 6) & 3;
1695 if (mod == 3)
1696 return;
1697 rm = modrm & 7;
1698
1699 if (s->aflag) {
1700
1701 base = rm;
1702
1703 if (base == 4) {
1704 code = ldub_code(s->pc++);
1705 base = (code & 7);
1706 }
1707
1708 switch (mod) {
1709 case 0:
1710 if (base == 5) {
1711 s->pc += 4;
1712 }
1713 break;
1714 case 1:
1715 s->pc++;
1716 break;
1717 default:
1718 case 2:
1719 s->pc += 4;
1720 break;
1721 }
1722 } else {
1723 switch (mod) {
1724 case 0:
1725 if (rm == 6) {
1726 s->pc += 2;
1727 }
1728 break;
1729 case 1:
1730 s->pc++;
1731 break;
1732 default:
1733 case 2:
1734 s->pc += 2;
1735 break;
1736 }
1737 }
1738}
1739
1740/* used for LEA and MOV AX, mem */
1741static void gen_add_A0_ds_seg(DisasContext *s)
1742{
1743 int override, must_add_seg;
1744 must_add_seg = s->addseg;
1745 override = R_DS;
1746 if (s->override >= 0) {
1747 override = s->override;
1748 must_add_seg = 1;
1749 } else {
1750 override = R_DS;
1751 }
1752 if (must_add_seg) {
1753#ifdef TARGET_X86_64
1754 if (CODE64(s)) {
1755 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1756 } else
1757#endif
1758 {
1759 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1760 }
1761 }
1762}
1763
1764/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1765 OR_TMP0 */
1766static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1767{
1768 int mod, rm, opreg, disp;
1769
1770 mod = (modrm >> 6) & 3;
1771 rm = (modrm & 7) | REX_B(s);
1772 if (mod == 3) {
1773 if (is_store) {
1774 if (reg != OR_TMP0)
1775 gen_op_mov_TN_reg[ot][0][reg]();
1776 gen_op_mov_reg_T0[ot][rm]();
1777 } else {
1778 gen_op_mov_TN_reg[ot][0][rm]();
1779 if (reg != OR_TMP0)
1780 gen_op_mov_reg_T0[ot][reg]();
1781 }
1782 } else {
1783 gen_lea_modrm(s, modrm, &opreg, &disp);
1784 if (is_store) {
1785 if (reg != OR_TMP0)
1786 gen_op_mov_TN_reg[ot][0][reg]();
1787 gen_op_st_T0_A0[ot + s->mem_index]();
1788 } else {
1789 gen_op_ld_T0_A0[ot + s->mem_index]();
1790 if (reg != OR_TMP0)
1791 gen_op_mov_reg_T0[ot][reg]();
1792 }
1793 }
1794}
1795
1796static inline uint32_t insn_get(DisasContext *s, int ot)
1797{
1798 uint32_t ret;
1799
1800 switch(ot) {
1801 case OT_BYTE:
1802 ret = ldub_code(s->pc);
1803 s->pc++;
1804 break;
1805 case OT_WORD:
1806 ret = lduw_code(s->pc);
1807 s->pc += 2;
1808 break;
1809 default:
1810 case OT_LONG:
1811 ret = ldl_code(s->pc);
1812 s->pc += 4;
1813 break;
1814 }
1815 return ret;
1816}
1817
1818static inline int insn_const_size(unsigned int ot)
1819{
1820 if (ot <= OT_LONG)
1821 return 1 << ot;
1822 else
1823 return 4;
1824}
1825
1826static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1827{
1828 TranslationBlock *tb;
1829 target_ulong pc;
1830
1831 pc = s->cs_base + eip;
1832 tb = s->tb;
1833 /* NOTE: we handle the case where the TB spans two pages here */
1834 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1835 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1836 /* jump to same page: we can use a direct jump */
1837 if (tb_num == 0)
1838 gen_op_goto_tb0(TBPARAM(tb));
1839 else
1840 gen_op_goto_tb1(TBPARAM(tb));
1841 gen_jmp_im(eip);
1842 gen_op_movl_T0_im((long)tb + tb_num);
1843 gen_op_exit_tb();
1844 } else {
1845 /* jump to another page: currently not optimized */
1846 gen_jmp_im(eip);
1847 gen_eob(s);
1848 }
1849}
1850
1851static inline void gen_jcc(DisasContext *s, int b,
1852 target_ulong val, target_ulong next_eip)
1853{
1854 TranslationBlock *tb;
1855 int inv, jcc_op;
1856 GenOpFunc1 *func;
1857 target_ulong tmp;
1858 int l1, l2;
1859
1860 inv = b & 1;
1861 jcc_op = (b >> 1) & 7;
1862
1863 if (s->jmp_opt) {
1864#ifdef VBOX
1865 gen_check_external_event(s);
1866#endif /* VBOX */
1867 switch(s->cc_op) {
1868 /* we optimize the cmp/jcc case */
1869 case CC_OP_SUBB:
1870 case CC_OP_SUBW:
1871 case CC_OP_SUBL:
1872 case CC_OP_SUBQ:
1873 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1874 break;
1875
1876 /* some jumps are easy to compute */
1877 case CC_OP_ADDB:
1878 case CC_OP_ADDW:
1879 case CC_OP_ADDL:
1880 case CC_OP_ADDQ:
1881
1882 case CC_OP_ADCB:
1883 case CC_OP_ADCW:
1884 case CC_OP_ADCL:
1885 case CC_OP_ADCQ:
1886
1887 case CC_OP_SBBB:
1888 case CC_OP_SBBW:
1889 case CC_OP_SBBL:
1890 case CC_OP_SBBQ:
1891
1892 case CC_OP_LOGICB:
1893 case CC_OP_LOGICW:
1894 case CC_OP_LOGICL:
1895 case CC_OP_LOGICQ:
1896
1897 case CC_OP_INCB:
1898 case CC_OP_INCW:
1899 case CC_OP_INCL:
1900 case CC_OP_INCQ:
1901
1902 case CC_OP_DECB:
1903 case CC_OP_DECW:
1904 case CC_OP_DECL:
1905 case CC_OP_DECQ:
1906
1907 case CC_OP_SHLB:
1908 case CC_OP_SHLW:
1909 case CC_OP_SHLL:
1910 case CC_OP_SHLQ:
1911
1912 case CC_OP_SARB:
1913 case CC_OP_SARW:
1914 case CC_OP_SARL:
1915 case CC_OP_SARQ:
1916 switch(jcc_op) {
1917 case JCC_Z:
1918 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1919 break;
1920 case JCC_S:
1921 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1922 break;
1923 default:
1924 func = NULL;
1925 break;
1926 }
1927 break;
1928 default:
1929 func = NULL;
1930 break;
1931 }
1932
1933 if (s->cc_op != CC_OP_DYNAMIC) {
1934 gen_op_set_cc_op(s->cc_op);
1935 s->cc_op = CC_OP_DYNAMIC;
1936 }
1937
1938 if (!func) {
1939 gen_setcc_slow[jcc_op]();
1940 func = gen_op_jnz_T0_label;
1941 }
1942
1943 if (inv) {
1944 tmp = val;
1945 val = next_eip;
1946 next_eip = tmp;
1947 }
1948 tb = s->tb;
1949
1950 l1 = gen_new_label();
1951 func(l1);
1952
1953 gen_goto_tb(s, 0, next_eip);
1954
1955 gen_set_label(l1);
1956 gen_goto_tb(s, 1, val);
1957
1958 s->is_jmp = 3;
1959 } else {
1960
1961 if (s->cc_op != CC_OP_DYNAMIC) {
1962 gen_op_set_cc_op(s->cc_op);
1963 s->cc_op = CC_OP_DYNAMIC;
1964 }
1965 gen_setcc_slow[jcc_op]();
1966 if (inv) {
1967 tmp = val;
1968 val = next_eip;
1969 next_eip = tmp;
1970 }
1971 l1 = gen_new_label();
1972 l2 = gen_new_label();
1973 gen_op_jnz_T0_label(l1);
1974 gen_jmp_im(next_eip);
1975 gen_op_jmp_label(l2);
1976 gen_set_label(l1);
1977 gen_jmp_im(val);
1978 gen_set_label(l2);
1979 gen_eob(s);
1980 }
1981}
1982
1983static void gen_setcc(DisasContext *s, int b)
1984{
1985 int inv, jcc_op;
1986 GenOpFunc *func;
1987
1988 inv = b & 1;
1989 jcc_op = (b >> 1) & 7;
1990 switch(s->cc_op) {
1991 /* we optimize the cmp/jcc case */
1992 case CC_OP_SUBB:
1993 case CC_OP_SUBW:
1994 case CC_OP_SUBL:
1995 case CC_OP_SUBQ:
1996 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1997 if (!func)
1998 goto slow_jcc;
1999 break;
2000
2001 /* some jumps are easy to compute */
2002 case CC_OP_ADDB:
2003 case CC_OP_ADDW:
2004 case CC_OP_ADDL:
2005 case CC_OP_ADDQ:
2006
2007 case CC_OP_LOGICB:
2008 case CC_OP_LOGICW:
2009 case CC_OP_LOGICL:
2010 case CC_OP_LOGICQ:
2011
2012 case CC_OP_INCB:
2013 case CC_OP_INCW:
2014 case CC_OP_INCL:
2015 case CC_OP_INCQ:
2016
2017 case CC_OP_DECB:
2018 case CC_OP_DECW:
2019 case CC_OP_DECL:
2020 case CC_OP_DECQ:
2021
2022 case CC_OP_SHLB:
2023 case CC_OP_SHLW:
2024 case CC_OP_SHLL:
2025 case CC_OP_SHLQ:
2026 switch(jcc_op) {
2027 case JCC_Z:
2028 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2029 break;
2030 case JCC_S:
2031 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2032 break;
2033 default:
2034 goto slow_jcc;
2035 }
2036 break;
2037 default:
2038 slow_jcc:
2039 if (s->cc_op != CC_OP_DYNAMIC)
2040 gen_op_set_cc_op(s->cc_op);
2041 func = gen_setcc_slow[jcc_op];
2042 break;
2043 }
2044 func();
2045 if (inv) {
2046 gen_op_xor_T0_1();
2047 }
2048}
2049
2050/* move T0 to seg_reg and compute if the CPU state may change. Never
2051 call this function with seg_reg == R_CS */
2052static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2053{
2054 if (s->pe && !s->vm86) {
2055 /* XXX: optimize by finding processor state dynamically */
2056 if (s->cc_op != CC_OP_DYNAMIC)
2057 gen_op_set_cc_op(s->cc_op);
2058 gen_jmp_im(cur_eip);
2059 gen_op_movl_seg_T0(seg_reg);
2060 /* abort translation because the addseg value may change or
2061 because ss32 may change. For R_SS, translation must always
2062 stop as a special handling must be done to disable hardware
2063 interrupts for the next instruction */
2064 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2065 s->is_jmp = 3;
2066 } else {
2067 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2068 if (seg_reg == R_SS)
2069 s->is_jmp = 3;
2070 }
2071}
2072
2073static inline void gen_stack_update(DisasContext *s, int addend)
2074{
2075#ifdef TARGET_X86_64
2076 if (CODE64(s)) {
2077 if (addend == 8)
2078 gen_op_addq_ESP_8();
2079 else
2080 gen_op_addq_ESP_im(addend);
2081 } else
2082#endif
2083 if (s->ss32) {
2084 if (addend == 2)
2085 gen_op_addl_ESP_2();
2086 else if (addend == 4)
2087 gen_op_addl_ESP_4();
2088 else
2089 gen_op_addl_ESP_im(addend);
2090 } else {
2091 if (addend == 2)
2092 gen_op_addw_ESP_2();
2093 else if (addend == 4)
2094 gen_op_addw_ESP_4();
2095 else
2096 gen_op_addw_ESP_im(addend);
2097 }
2098}
2099
2100/* generate a push. It depends on ss32, addseg and dflag */
2101static void gen_push_T0(DisasContext *s)
2102{
2103#ifdef TARGET_X86_64
2104 if (CODE64(s)) {
2105 gen_op_movq_A0_reg[R_ESP]();
2106 if (s->dflag) {
2107 gen_op_subq_A0_8();
2108 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2109 } else {
2110 gen_op_subq_A0_2();
2111 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2112 }
2113 gen_op_movq_ESP_A0();
2114 } else
2115#endif
2116 {
2117 gen_op_movl_A0_reg[R_ESP]();
2118 if (!s->dflag)
2119 gen_op_subl_A0_2();
2120 else
2121 gen_op_subl_A0_4();
2122 if (s->ss32) {
2123 if (s->addseg) {
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_SS();
2126 }
2127 } else {
2128 gen_op_andl_A0_ffff();
2129 gen_op_movl_T1_A0();
2130 gen_op_addl_A0_SS();
2131 }
2132 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2133 if (s->ss32 && !s->addseg)
2134 gen_op_movl_ESP_A0();
2135 else
2136 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2137 }
2138}
2139
2140/* generate a push. It depends on ss32, addseg and dflag */
2141/* slower version for T1, only used for call Ev */
2142static void gen_push_T1(DisasContext *s)
2143{
2144#ifdef TARGET_X86_64
2145 if (CODE64(s)) {
2146 gen_op_movq_A0_reg[R_ESP]();
2147 if (s->dflag) {
2148 gen_op_subq_A0_8();
2149 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2150 } else {
2151 gen_op_subq_A0_2();
2152 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2153 }
2154 gen_op_movq_ESP_A0();
2155 } else
2156#endif
2157 {
2158 gen_op_movl_A0_reg[R_ESP]();
2159 if (!s->dflag)
2160 gen_op_subl_A0_2();
2161 else
2162 gen_op_subl_A0_4();
2163 if (s->ss32) {
2164 if (s->addseg) {
2165 gen_op_addl_A0_SS();
2166 }
2167 } else {
2168 gen_op_andl_A0_ffff();
2169 gen_op_addl_A0_SS();
2170 }
2171 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2172
2173 if (s->ss32 && !s->addseg)
2174 gen_op_movl_ESP_A0();
2175 else
2176 gen_stack_update(s, (-2) << s->dflag);
2177 }
2178}
2179
2180/* two step pop is necessary for precise exceptions */
2181static void gen_pop_T0(DisasContext *s)
2182{
2183#ifdef TARGET_X86_64
2184 if (CODE64(s)) {
2185 gen_op_movq_A0_reg[R_ESP]();
2186 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2187 } else
2188#endif
2189 {
2190 gen_op_movl_A0_reg[R_ESP]();
2191 if (s->ss32) {
2192 if (s->addseg)
2193 gen_op_addl_A0_SS();
2194 } else {
2195 gen_op_andl_A0_ffff();
2196 gen_op_addl_A0_SS();
2197 }
2198 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2199 }
2200}
2201
2202static void gen_pop_update(DisasContext *s)
2203{
2204#ifdef TARGET_X86_64
2205 if (CODE64(s) && s->dflag) {
2206 gen_stack_update(s, 8);
2207 } else
2208#endif
2209 {
2210 gen_stack_update(s, 2 << s->dflag);
2211 }
2212}
2213
2214static void gen_stack_A0(DisasContext *s)
2215{
2216 gen_op_movl_A0_ESP();
2217 if (!s->ss32)
2218 gen_op_andl_A0_ffff();
2219 gen_op_movl_T1_A0();
2220 if (s->addseg)
2221 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2222}
2223
2224/* NOTE: wrap around in 16 bit not fully handled */
2225static void gen_pusha(DisasContext *s)
2226{
2227 int i;
2228 gen_op_movl_A0_ESP();
2229 gen_op_addl_A0_im(-16 << s->dflag);
2230 if (!s->ss32)
2231 gen_op_andl_A0_ffff();
2232 gen_op_movl_T1_A0();
2233 if (s->addseg)
2234 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2235 for(i = 0;i < 8; i++) {
2236 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2237 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2238 gen_op_addl_A0_im(2 << s->dflag);
2239 }
2240 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2241}
2242
2243/* NOTE: wrap around in 16 bit not fully handled */
2244static void gen_popa(DisasContext *s)
2245{
2246 int i;
2247 gen_op_movl_A0_ESP();
2248 if (!s->ss32)
2249 gen_op_andl_A0_ffff();
2250 gen_op_movl_T1_A0();
2251 gen_op_addl_T1_im(16 << s->dflag);
2252 if (s->addseg)
2253 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2254 for(i = 0;i < 8; i++) {
2255 /* ESP is not reloaded */
2256 if (i != 3) {
2257 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2258 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2259 }
2260 gen_op_addl_A0_im(2 << s->dflag);
2261 }
2262 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2263}
2264
2265static void gen_enter(DisasContext *s, int esp_addend, int level)
2266{
2267 int ot, opsize;
2268
2269 level &= 0x1f;
2270#ifdef TARGET_X86_64
2271 if (CODE64(s)) {
2272 ot = s->dflag ? OT_QUAD : OT_WORD;
2273 opsize = 1 << ot;
2274
2275 gen_op_movl_A0_ESP();
2276 gen_op_addq_A0_im(-opsize);
2277 gen_op_movl_T1_A0();
2278
2279 /* push bp */
2280 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2281 gen_op_st_T0_A0[ot + s->mem_index]();
2282 if (level) {
2283 gen_op_enter64_level(level, (ot == OT_QUAD));
2284 }
2285 gen_op_mov_reg_T1[ot][R_EBP]();
2286 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2287 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2288 } else
2289#endif
2290 {
2291 ot = s->dflag + OT_WORD;
2292 opsize = 2 << s->dflag;
2293
2294 gen_op_movl_A0_ESP();
2295 gen_op_addl_A0_im(-opsize);
2296 if (!s->ss32)
2297 gen_op_andl_A0_ffff();
2298 gen_op_movl_T1_A0();
2299 if (s->addseg)
2300 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2301 /* push bp */
2302 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2303 gen_op_st_T0_A0[ot + s->mem_index]();
2304 if (level) {
2305 gen_op_enter_level(level, s->dflag);
2306 }
2307 gen_op_mov_reg_T1[ot][R_EBP]();
2308 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2309 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2310 }
2311}
2312
2313static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2314{
2315 if (s->cc_op != CC_OP_DYNAMIC)
2316 gen_op_set_cc_op(s->cc_op);
2317 gen_jmp_im(cur_eip);
2318 gen_op_raise_exception(trapno);
2319 s->is_jmp = 3;
2320}
2321
2322/* an interrupt is different from an exception because of the
2323 priviledge checks */
2324static void gen_interrupt(DisasContext *s, int intno,
2325 target_ulong cur_eip, target_ulong next_eip)
2326{
2327 if (s->cc_op != CC_OP_DYNAMIC)
2328 gen_op_set_cc_op(s->cc_op);
2329 gen_jmp_im(cur_eip);
2330 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2331 s->is_jmp = 3;
2332}
2333
2334static void gen_debug(DisasContext *s, target_ulong cur_eip)
2335{
2336 if (s->cc_op != CC_OP_DYNAMIC)
2337 gen_op_set_cc_op(s->cc_op);
2338 gen_jmp_im(cur_eip);
2339 gen_op_debug();
2340 s->is_jmp = 3;
2341}
2342
2343/* generate a generic end of block. Trace exception is also generated
2344 if needed */
2345static void gen_eob(DisasContext *s)
2346{
2347 if (s->cc_op != CC_OP_DYNAMIC)
2348 gen_op_set_cc_op(s->cc_op);
2349 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2350 gen_op_reset_inhibit_irq();
2351 }
2352 if (s->singlestep_enabled) {
2353 gen_op_debug();
2354 } else if (s->tf) {
2355 gen_op_single_step();
2356 } else {
2357 gen_op_movl_T0_0();
2358 gen_op_exit_tb();
2359 }
2360 s->is_jmp = 3;
2361}
2362
2363/* generate a jump to eip. No segment change must happen before as a
2364 direct call to the next block may occur */
2365static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2366{
2367 if (s->jmp_opt) {
2368#ifdef VBOX
2369 gen_check_external_event(s);
2370#endif /* VBOX */
2371 if (s->cc_op != CC_OP_DYNAMIC) {
2372 gen_op_set_cc_op(s->cc_op);
2373 s->cc_op = CC_OP_DYNAMIC;
2374 }
2375 gen_goto_tb(s, tb_num, eip);
2376 s->is_jmp = 3;
2377 } else {
2378 gen_jmp_im(eip);
2379 gen_eob(s);
2380 }
2381}
2382
2383static void gen_jmp(DisasContext *s, target_ulong eip)
2384{
2385 gen_jmp_tb(s, eip, 0);
2386}
2387
2388static void gen_movtl_T0_im(target_ulong val)
2389{
2390#ifdef TARGET_X86_64
2391 if ((int32_t)val == val) {
2392 gen_op_movl_T0_im(val);
2393 } else {
2394 gen_op_movq_T0_im64(val >> 32, val);
2395 }
2396#else
2397 gen_op_movl_T0_im(val);
2398#endif
2399}
2400
2401static void gen_movtl_T1_im(target_ulong val)
2402{
2403#ifdef TARGET_X86_64
2404 if ((int32_t)val == val) {
2405 gen_op_movl_T1_im(val);
2406 } else {
2407 gen_op_movq_T1_im64(val >> 32, val);
2408 }
2409#else
2410 gen_op_movl_T1_im(val);
2411#endif
2412}
2413
2414static void gen_add_A0_im(DisasContext *s, int val)
2415{
2416#ifdef TARGET_X86_64
2417 if (CODE64(s))
2418 gen_op_addq_A0_im(val);
2419 else
2420#endif
2421 gen_op_addl_A0_im(val);
2422}
2423
2424static GenOpFunc1 *gen_ldq_env_A0[3] = {
2425 gen_op_ldq_raw_env_A0,
2426#ifndef CONFIG_USER_ONLY
2427 gen_op_ldq_kernel_env_A0,
2428 gen_op_ldq_user_env_A0,
2429#endif
2430};
2431
2432static GenOpFunc1 *gen_stq_env_A0[3] = {
2433 gen_op_stq_raw_env_A0,
2434#ifndef CONFIG_USER_ONLY
2435 gen_op_stq_kernel_env_A0,
2436 gen_op_stq_user_env_A0,
2437#endif
2438};
2439
2440static GenOpFunc1 *gen_ldo_env_A0[3] = {
2441 gen_op_ldo_raw_env_A0,
2442#ifndef CONFIG_USER_ONLY
2443 gen_op_ldo_kernel_env_A0,
2444 gen_op_ldo_user_env_A0,
2445#endif
2446};
2447
2448static GenOpFunc1 *gen_sto_env_A0[3] = {
2449 gen_op_sto_raw_env_A0,
2450#ifndef CONFIG_USER_ONLY
2451 gen_op_sto_kernel_env_A0,
2452 gen_op_sto_user_env_A0,
2453#endif
2454};
2455
2456#define SSE_SPECIAL ((GenOpFunc2 *)1)
2457
2458#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2459#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2460 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2461
2462static GenOpFunc2 *sse_op_table1[256][4] = {
2463 /* pure SSE operations */
2464 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2465 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2466 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2467 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2468 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2469 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2470 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2471 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2472
2473 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2474 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2475 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2476 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2477 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2478 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2479 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2480 [0x2f] = { gen_op_comiss, gen_op_comisd },
2481 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2482 [0x51] = SSE_FOP(sqrt),
2483 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2484 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2485 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2486 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2487 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2488 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2489 [0x58] = SSE_FOP(add),
2490 [0x59] = SSE_FOP(mul),
2491 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2492 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2493 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2494 [0x5c] = SSE_FOP(sub),
2495 [0x5d] = SSE_FOP(min),
2496 [0x5e] = SSE_FOP(div),
2497 [0x5f] = SSE_FOP(max),
2498
2499 [0xc2] = SSE_FOP(cmpeq),
2500 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2501
2502 /* MMX ops and their SSE extensions */
2503 [0x60] = MMX_OP2(punpcklbw),
2504 [0x61] = MMX_OP2(punpcklwd),
2505 [0x62] = MMX_OP2(punpckldq),
2506 [0x63] = MMX_OP2(packsswb),
2507 [0x64] = MMX_OP2(pcmpgtb),
2508 [0x65] = MMX_OP2(pcmpgtw),
2509 [0x66] = MMX_OP2(pcmpgtl),
2510 [0x67] = MMX_OP2(packuswb),
2511 [0x68] = MMX_OP2(punpckhbw),
2512 [0x69] = MMX_OP2(punpckhwd),
2513 [0x6a] = MMX_OP2(punpckhdq),
2514 [0x6b] = MMX_OP2(packssdw),
2515 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2516 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2517 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2518 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2519 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2520 (GenOpFunc2 *)gen_op_pshufd_xmm,
2521 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2522 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2523 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2524 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2525 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2526 [0x74] = MMX_OP2(pcmpeqb),
2527 [0x75] = MMX_OP2(pcmpeqw),
2528 [0x76] = MMX_OP2(pcmpeql),
2529 [0x77] = { SSE_SPECIAL }, /* emms */
2530 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2531 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2532 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2533 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2534 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2535 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2536 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2537 [0xd1] = MMX_OP2(psrlw),
2538 [0xd2] = MMX_OP2(psrld),
2539 [0xd3] = MMX_OP2(psrlq),
2540 [0xd4] = MMX_OP2(paddq),
2541 [0xd5] = MMX_OP2(pmullw),
2542 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2543 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2544 [0xd8] = MMX_OP2(psubusb),
2545 [0xd9] = MMX_OP2(psubusw),
2546 [0xda] = MMX_OP2(pminub),
2547 [0xdb] = MMX_OP2(pand),
2548 [0xdc] = MMX_OP2(paddusb),
2549 [0xdd] = MMX_OP2(paddusw),
2550 [0xde] = MMX_OP2(pmaxub),
2551 [0xdf] = MMX_OP2(pandn),
2552 [0xe0] = MMX_OP2(pavgb),
2553 [0xe1] = MMX_OP2(psraw),
2554 [0xe2] = MMX_OP2(psrad),
2555 [0xe3] = MMX_OP2(pavgw),
2556 [0xe4] = MMX_OP2(pmulhuw),
2557 [0xe5] = MMX_OP2(pmulhw),
2558 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2559 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2560 [0xe8] = MMX_OP2(psubsb),
2561 [0xe9] = MMX_OP2(psubsw),
2562 [0xea] = MMX_OP2(pminsw),
2563 [0xeb] = MMX_OP2(por),
2564 [0xec] = MMX_OP2(paddsb),
2565 [0xed] = MMX_OP2(paddsw),
2566 [0xee] = MMX_OP2(pmaxsw),
2567 [0xef] = MMX_OP2(pxor),
2568 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2569 [0xf1] = MMX_OP2(psllw),
2570 [0xf2] = MMX_OP2(pslld),
2571 [0xf3] = MMX_OP2(psllq),
2572 [0xf4] = MMX_OP2(pmuludq),
2573 [0xf5] = MMX_OP2(pmaddwd),
2574 [0xf6] = MMX_OP2(psadbw),
2575 [0xf7] = MMX_OP2(maskmov),
2576 [0xf8] = MMX_OP2(psubb),
2577 [0xf9] = MMX_OP2(psubw),
2578 [0xfa] = MMX_OP2(psubl),
2579 [0xfb] = MMX_OP2(psubq),
2580 [0xfc] = MMX_OP2(paddb),
2581 [0xfd] = MMX_OP2(paddw),
2582 [0xfe] = MMX_OP2(paddl),
2583};
2584
2585static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2586 [0 + 2] = MMX_OP2(psrlw),
2587 [0 + 4] = MMX_OP2(psraw),
2588 [0 + 6] = MMX_OP2(psllw),
2589 [8 + 2] = MMX_OP2(psrld),
2590 [8 + 4] = MMX_OP2(psrad),
2591 [8 + 6] = MMX_OP2(pslld),
2592 [16 + 2] = MMX_OP2(psrlq),
2593 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2594 [16 + 6] = MMX_OP2(psllq),
2595 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2596};
2597
2598static GenOpFunc1 *sse_op_table3[4 * 3] = {
2599 gen_op_cvtsi2ss,
2600 gen_op_cvtsi2sd,
2601 X86_64_ONLY(gen_op_cvtsq2ss),
2602 X86_64_ONLY(gen_op_cvtsq2sd),
2603
2604 gen_op_cvttss2si,
2605 gen_op_cvttsd2si,
2606 X86_64_ONLY(gen_op_cvttss2sq),
2607 X86_64_ONLY(gen_op_cvttsd2sq),
2608
2609 gen_op_cvtss2si,
2610 gen_op_cvtsd2si,
2611 X86_64_ONLY(gen_op_cvtss2sq),
2612 X86_64_ONLY(gen_op_cvtsd2sq),
2613};
2614
2615static GenOpFunc2 *sse_op_table4[8][4] = {
2616 SSE_FOP(cmpeq),
2617 SSE_FOP(cmplt),
2618 SSE_FOP(cmple),
2619 SSE_FOP(cmpunord),
2620 SSE_FOP(cmpneq),
2621 SSE_FOP(cmpnlt),
2622 SSE_FOP(cmpnle),
2623 SSE_FOP(cmpord),
2624};
2625
2626static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2627{
2628 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2629 int modrm, mod, rm, reg, reg_addr, offset_addr;
2630 GenOpFunc2 *sse_op2;
2631 GenOpFunc3 *sse_op3;
2632
2633 b &= 0xff;
2634 if (s->prefix & PREFIX_DATA)
2635 b1 = 1;
2636 else if (s->prefix & PREFIX_REPZ)
2637 b1 = 2;
2638 else if (s->prefix & PREFIX_REPNZ)
2639 b1 = 3;
2640 else
2641 b1 = 0;
2642 sse_op2 = sse_op_table1[b][b1];
2643 if (!sse_op2)
2644 goto illegal_op;
2645 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2646 is_xmm = 1;
2647 } else {
2648 if (b1 == 0) {
2649 /* MMX case */
2650 is_xmm = 0;
2651 } else {
2652 is_xmm = 1;
2653 }
2654 }
2655 /* simple MMX/SSE operation */
2656 if (s->flags & HF_TS_MASK) {
2657 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2658 return;
2659 }
2660 if (s->flags & HF_EM_MASK) {
2661 illegal_op:
2662 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2663 return;
2664 }
2665 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2666 goto illegal_op;
2667 if (b == 0x77) {
2668 /* emms */
2669 gen_op_emms();
2670 return;
2671 }
2672 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2673 the static cpu state) */
2674 if (!is_xmm) {
2675 gen_op_enter_mmx();
2676 }
2677
2678 modrm = ldub_code(s->pc++);
2679 reg = ((modrm >> 3) & 7);
2680 if (is_xmm)
2681 reg |= rex_r;
2682 mod = (modrm >> 6) & 3;
2683 if (sse_op2 == SSE_SPECIAL) {
2684 b |= (b1 << 8);
2685 switch(b) {
2686 case 0x0e7: /* movntq */
2687 if (mod == 3)
2688 goto illegal_op;
2689 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2690 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2691 break;
2692 case 0x1e7: /* movntdq */
2693 case 0x02b: /* movntps */
2694 case 0x12b: /* movntps */
2695 case 0x3f0: /* lddqu */
2696 if (mod == 3)
2697 goto illegal_op;
2698 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2699 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2700 break;
2701 case 0x6e: /* movd mm, ea */
2702#ifdef TARGET_X86_64
2703 if (s->dflag == 2) {
2704 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2705 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2706 } else
2707#endif
2708 {
2709 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2710 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2711 }
2712 break;
2713 case 0x16e: /* movd xmm, ea */
2714#ifdef TARGET_X86_64
2715 if (s->dflag == 2) {
2716 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2717 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2718 } else
2719#endif
2720 {
2721 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2722 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2723 }
2724 break;
2725 case 0x6f: /* movq mm, ea */
2726 if (mod != 3) {
2727 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2728 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2729 } else {
2730 rm = (modrm & 7);
2731 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2732 offsetof(CPUX86State,fpregs[rm].mmx));
2733 }
2734 break;
2735 case 0x010: /* movups */
2736 case 0x110: /* movupd */
2737 case 0x028: /* movaps */
2738 case 0x128: /* movapd */
2739 case 0x16f: /* movdqa xmm, ea */
2740 case 0x26f: /* movdqu xmm, ea */
2741 if (mod != 3) {
2742 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2743 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2744 } else {
2745 rm = (modrm & 7) | REX_B(s);
2746 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2747 offsetof(CPUX86State,xmm_regs[rm]));
2748 }
2749 break;
2750 case 0x210: /* movss xmm, ea */
2751 if (mod != 3) {
2752 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2753 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2754 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2755 gen_op_movl_T0_0();
2756 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2757 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2758 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2759 } else {
2760 rm = (modrm & 7) | REX_B(s);
2761 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2762 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2763 }
2764 break;
2765 case 0x310: /* movsd xmm, ea */
2766 if (mod != 3) {
2767 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2768 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2769 gen_op_movl_T0_0();
2770 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2771 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2772 } else {
2773 rm = (modrm & 7) | REX_B(s);
2774 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2775 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2776 }
2777 break;
2778 case 0x012: /* movlps */
2779 case 0x112: /* movlpd */
2780 if (mod != 3) {
2781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2783 } else {
2784 /* movhlps */
2785 rm = (modrm & 7) | REX_B(s);
2786 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2787 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2788 }
2789 break;
2790 case 0x212: /* movsldup */
2791 if (mod != 3) {
2792 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2793 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2794 } else {
2795 rm = (modrm & 7) | REX_B(s);
2796 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2797 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2798 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2799 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2800 }
2801 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2802 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2803 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2804 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2805 break;
2806 case 0x312: /* movddup */
2807 if (mod != 3) {
2808 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2809 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2810 } else {
2811 rm = (modrm & 7) | REX_B(s);
2812 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2813 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2814 }
2815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2816 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2817 break;
2818 case 0x016: /* movhps */
2819 case 0x116: /* movhpd */
2820 if (mod != 3) {
2821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2822 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2823 } else {
2824 /* movlhps */
2825 rm = (modrm & 7) | REX_B(s);
2826 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2827 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2828 }
2829 break;
2830 case 0x216: /* movshdup */
2831 if (mod != 3) {
2832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2833 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2834 } else {
2835 rm = (modrm & 7) | REX_B(s);
2836 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2837 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2838 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2839 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2840 }
2841 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2842 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2843 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2844 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2845 break;
2846 case 0x7e: /* movd ea, mm */
2847#ifdef TARGET_X86_64
2848 if (s->dflag == 2) {
2849 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2850 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2851 } else
2852#endif
2853 {
2854 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2855 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2856 }
2857 break;
2858 case 0x17e: /* movd ea, xmm */
2859#ifdef TARGET_X86_64
2860 if (s->dflag == 2) {
2861 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2862 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2863 } else
2864#endif
2865 {
2866 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2867 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2868 }
2869 break;
2870 case 0x27e: /* movq xmm, ea */
2871 if (mod != 3) {
2872 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2873 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2874 } else {
2875 rm = (modrm & 7) | REX_B(s);
2876 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2877 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2878 }
2879 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2880 break;
2881 case 0x7f: /* movq ea, mm */
2882 if (mod != 3) {
2883 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2884 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2885 } else {
2886 rm = (modrm & 7);
2887 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2888 offsetof(CPUX86State,fpregs[reg].mmx));
2889 }
2890 break;
2891 case 0x011: /* movups */
2892 case 0x111: /* movupd */
2893 case 0x029: /* movaps */
2894 case 0x129: /* movapd */
2895 case 0x17f: /* movdqa ea, xmm */
2896 case 0x27f: /* movdqu ea, xmm */
2897 if (mod != 3) {
2898 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2899 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2900 } else {
2901 rm = (modrm & 7) | REX_B(s);
2902 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2903 offsetof(CPUX86State,xmm_regs[reg]));
2904 }
2905 break;
2906 case 0x211: /* movss ea, xmm */
2907 if (mod != 3) {
2908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2909 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2910 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2911 } else {
2912 rm = (modrm & 7) | REX_B(s);
2913 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2914 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2915 }
2916 break;
2917 case 0x311: /* movsd ea, xmm */
2918 if (mod != 3) {
2919 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2920 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2921 } else {
2922 rm = (modrm & 7) | REX_B(s);
2923 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2924 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2925 }
2926 break;
2927 case 0x013: /* movlps */
2928 case 0x113: /* movlpd */
2929 if (mod != 3) {
2930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2931 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2932 } else {
2933 goto illegal_op;
2934 }
2935 break;
2936 case 0x017: /* movhps */
2937 case 0x117: /* movhpd */
2938 if (mod != 3) {
2939 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2940 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2941 } else {
2942 goto illegal_op;
2943 }
2944 break;
2945 case 0x71: /* shift mm, im */
2946 case 0x72:
2947 case 0x73:
2948 case 0x171: /* shift xmm, im */
2949 case 0x172:
2950 case 0x173:
2951 val = ldub_code(s->pc++);
2952 if (is_xmm) {
2953 gen_op_movl_T0_im(val);
2954 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2955 gen_op_movl_T0_0();
2956 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2957 op1_offset = offsetof(CPUX86State,xmm_t0);
2958 } else {
2959 gen_op_movl_T0_im(val);
2960 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2961 gen_op_movl_T0_0();
2962 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2963 op1_offset = offsetof(CPUX86State,mmx_t0);
2964 }
2965 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2966 if (!sse_op2)
2967 goto illegal_op;
2968 if (is_xmm) {
2969 rm = (modrm & 7) | REX_B(s);
2970 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2971 } else {
2972 rm = (modrm & 7);
2973 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2974 }
2975 sse_op2(op2_offset, op1_offset);
2976 break;
2977 case 0x050: /* movmskps */
2978 rm = (modrm & 7) | REX_B(s);
2979 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2980 gen_op_mov_reg_T0[OT_LONG][reg]();
2981 break;
2982 case 0x150: /* movmskpd */
2983 rm = (modrm & 7) | REX_B(s);
2984 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2985 gen_op_mov_reg_T0[OT_LONG][reg]();
2986 break;
2987 case 0x02a: /* cvtpi2ps */
2988 case 0x12a: /* cvtpi2pd */
2989 gen_op_enter_mmx();
2990 if (mod != 3) {
2991 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2992 op2_offset = offsetof(CPUX86State,mmx_t0);
2993 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2994 } else {
2995 rm = (modrm & 7);
2996 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2997 }
2998 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2999 switch(b >> 8) {
3000 case 0x0:
3001 gen_op_cvtpi2ps(op1_offset, op2_offset);
3002 break;
3003 default:
3004 case 0x1:
3005 gen_op_cvtpi2pd(op1_offset, op2_offset);
3006 break;
3007 }
3008 break;
3009 case 0x22a: /* cvtsi2ss */
3010 case 0x32a: /* cvtsi2sd */
3011 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3012 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3013 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3014 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3015 break;
3016 case 0x02c: /* cvttps2pi */
3017 case 0x12c: /* cvttpd2pi */
3018 case 0x02d: /* cvtps2pi */
3019 case 0x12d: /* cvtpd2pi */
3020 gen_op_enter_mmx();
3021 if (mod != 3) {
3022 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3023 op2_offset = offsetof(CPUX86State,xmm_t0);
3024 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3025 } else {
3026 rm = (modrm & 7) | REX_B(s);
3027 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3028 }
3029 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3030 switch(b) {
3031 case 0x02c:
3032 gen_op_cvttps2pi(op1_offset, op2_offset);
3033 break;
3034 case 0x12c:
3035 gen_op_cvttpd2pi(op1_offset, op2_offset);
3036 break;
3037 case 0x02d:
3038 gen_op_cvtps2pi(op1_offset, op2_offset);
3039 break;
3040 case 0x12d:
3041 gen_op_cvtpd2pi(op1_offset, op2_offset);
3042 break;
3043 }
3044 break;
3045 case 0x22c: /* cvttss2si */
3046 case 0x32c: /* cvttsd2si */
3047 case 0x22d: /* cvtss2si */
3048 case 0x32d: /* cvtsd2si */
3049 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3050 if (mod != 3) {
3051 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3052 if ((b >> 8) & 1) {
3053 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3054 } else {
3055 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3056 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3057 }
3058 op2_offset = offsetof(CPUX86State,xmm_t0);
3059 } else {
3060 rm = (modrm & 7) | REX_B(s);
3061 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3062 }
3063 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3064 (b & 1) * 4](op2_offset);
3065 gen_op_mov_reg_T0[ot][reg]();
3066 break;
3067 case 0xc4: /* pinsrw */
3068 case 0x1c4:
3069 s->rip_offset = 1;
3070 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3071 val = ldub_code(s->pc++);
3072 if (b1) {
3073 val &= 7;
3074 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3075 } else {
3076 val &= 3;
3077 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3078 }
3079 break;
3080 case 0xc5: /* pextrw */
3081 case 0x1c5:
3082 if (mod != 3)
3083 goto illegal_op;
3084 val = ldub_code(s->pc++);
3085 if (b1) {
3086 val &= 7;
3087 rm = (modrm & 7) | REX_B(s);
3088 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3089 } else {
3090 val &= 3;
3091 rm = (modrm & 7);
3092 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3093 }
3094 reg = ((modrm >> 3) & 7) | rex_r;
3095 gen_op_mov_reg_T0[OT_LONG][reg]();
3096 break;
3097 case 0x1d6: /* movq ea, xmm */
3098 if (mod != 3) {
3099 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3100 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3101 } else {
3102 rm = (modrm & 7) | REX_B(s);
3103 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3104 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3105 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3106 }
3107 break;
3108 case 0x2d6: /* movq2dq */
3109 gen_op_enter_mmx();
3110 rm = (modrm & 7);
3111 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3112 offsetof(CPUX86State,fpregs[rm].mmx));
3113 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3114 break;
3115 case 0x3d6: /* movdq2q */
3116 gen_op_enter_mmx();
3117 rm = (modrm & 7) | REX_B(s);
3118 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3119 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3120 break;
3121 case 0xd7: /* pmovmskb */
3122 case 0x1d7:
3123 if (mod != 3)
3124 goto illegal_op;
3125 if (b1) {
3126 rm = (modrm & 7) | REX_B(s);
3127 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3128 } else {
3129 rm = (modrm & 7);
3130 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3131 }
3132 reg = ((modrm >> 3) & 7) | rex_r;
3133 gen_op_mov_reg_T0[OT_LONG][reg]();
3134 break;
3135 default:
3136 goto illegal_op;
3137 }
3138 } else {
3139 /* generic MMX or SSE operation */
3140 switch(b) {
3141 case 0xf7:
3142 /* maskmov : we must prepare A0 */
3143 if (mod != 3)
3144 goto illegal_op;
3145#ifdef TARGET_X86_64
3146 if (s->aflag == 2) {
3147 gen_op_movq_A0_reg[R_EDI]();
3148 } else
3149#endif
3150 {
3151 gen_op_movl_A0_reg[R_EDI]();
3152 if (s->aflag == 0)
3153 gen_op_andl_A0_ffff();
3154 }
3155 gen_add_A0_ds_seg(s);
3156 break;
3157 case 0x70: /* pshufx insn */
3158 case 0xc6: /* pshufx insn */
3159 case 0xc2: /* compare insns */
3160 s->rip_offset = 1;
3161 break;
3162 default:
3163 break;
3164 }
3165 if (is_xmm) {
3166 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3167 if (mod != 3) {
3168 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3169 op2_offset = offsetof(CPUX86State,xmm_t0);
3170 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3171 b == 0xc2)) {
3172 /* specific case for SSE single instructions */
3173 if (b1 == 2) {
3174 /* 32 bit access */
3175 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3176 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3177 } else {
3178 /* 64 bit access */
3179 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3180 }
3181 } else {
3182 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3183 }
3184 } else {
3185 rm = (modrm & 7) | REX_B(s);
3186 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3187 }
3188 } else {
3189 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3190 if (mod != 3) {
3191 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3192 op2_offset = offsetof(CPUX86State,mmx_t0);
3193 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3194 } else {
3195 rm = (modrm & 7);
3196 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3197 }
3198 }
3199 switch(b) {
3200 case 0x70: /* pshufx insn */
3201 case 0xc6: /* pshufx insn */
3202 val = ldub_code(s->pc++);
3203 sse_op3 = (GenOpFunc3 *)sse_op2;
3204 sse_op3(op1_offset, op2_offset, val);
3205 break;
3206 case 0xc2:
3207 /* compare insns */
3208 val = ldub_code(s->pc++);
3209 if (val >= 8)
3210 goto illegal_op;
3211 sse_op2 = sse_op_table4[val][b1];
3212 sse_op2(op1_offset, op2_offset);
3213 break;
3214 default:
3215 sse_op2(op1_offset, op2_offset);
3216 break;
3217 }
3218 if (b == 0x2e || b == 0x2f) {
3219 s->cc_op = CC_OP_EFLAGS;
3220 }
3221 }
3222}
3223
3224#ifdef VBOX
3225/* Checks if it's an invalid lock sequence. Only a few instructions
3226 can be used together with the lock prefix and of those only the
3227 form that write a memory operand. So, this is kind of annoying
3228 work to do...
3229 The AMD manual lists the following instructions.
3230 ADC
3231 ADD
3232 AND
3233 BTC
3234 BTR
3235 BTS
3236 CMPXCHG
3237 CMPXCHG8B
3238 CMPXCHG16B
3239 DEC
3240 INC
3241 NEG
3242 NOT
3243 OR
3244 SBB
3245 SUB
3246 XADD
3247 XCHG
3248 XOR */
3249static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
3250{
3251 target_ulong pc = s->pc;
3252 int modrm, mod, op;
3253
3254 /* X={8,16,32,64} Y={16,32,64} */
3255 switch (b)
3256 {
3257 /* /2: ADC reg/memX, immX */
3258 /* /0: ADD reg/memX, immX */
3259 /* /4: AND reg/memX, immX */
3260 /* /1: OR reg/memX, immX */
3261 /* /3: SBB reg/memX, immX */
3262 /* /5: SUB reg/memX, immX */
3263 /* /6: XOR reg/memX, immX */
3264 case 0x80:
3265 case 0x81:
3266 case 0x83:
3267 modrm = ldub_code(pc++);
3268 op = (modrm >> 3) & 7;
3269 if (op == 7) /* /7: CMP */
3270 break;
3271 mod = (modrm >> 6) & 3;
3272 if (mod == 3) /* register destination */
3273 break;
3274 return false;
3275
3276 case 0x10: /* /r: ADC reg/mem8, reg8 */
3277 case 0x11: /* /r: ADC reg/memX, regY */
3278 case 0x00: /* /r: ADD reg/mem8, reg8 */
3279 case 0x01: /* /r: ADD reg/memX, regY */
3280 case 0x20: /* /r: AND reg/mem8, reg8 */
3281 case 0x21: /* /r: AND reg/memY, regY */
3282 case 0x08: /* /r: OR reg/mem8, reg8 */
3283 case 0x09: /* /r: OR reg/memY, regY */
3284 case 0x18: /* /r: SBB reg/mem8, reg8 */
3285 case 0x19: /* /r: SBB reg/memY, regY */
3286 case 0x28: /* /r: SUB reg/mem8, reg8 */
3287 case 0x29: /* /r: SUB reg/memY, regY */
3288 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
3289 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
3290 case 0x30: /* /r: XOR reg/mem8, reg8 */
3291 case 0x31: /* /r: XOR reg/memY, regY */
3292 modrm = ldub_code(pc++);
3293 mod = (modrm >> 6) & 3;
3294 if (mod == 3) /* register destination */
3295 break;
3296 return false;
3297
3298 /* /1: DEC reg/memX */
3299 /* /0: INC reg/memX */
3300 case 0xfe:
3301 case 0xff:
3302 modrm = ldub_code(pc++);
3303 mod = (modrm >> 6) & 3;
3304 if (mod == 3) /* register destination */
3305 break;
3306 return false;
3307
3308 /* /3: NEG reg/memX */
3309 /* /2: NOT reg/memX */
3310 case 0xf6:
3311 case 0xf7:
3312 modrm = ldub_code(pc++);
3313 mod = (modrm >> 6) & 3;
3314 if (mod == 3) /* register destination */
3315 break;
3316 return false;
3317
3318 case 0x0f:
3319 b = ldub_code(pc++);
3320 switch (b)
3321 {
3322 /* /7: BTC reg/memY, imm8 */
3323 /* /6: BTR reg/memY, imm8 */
3324 /* /5: BTS reg/memY, imm8 */
3325 case 0xba:
3326 modrm = ldub_code(pc++);
3327 op = (modrm >> 3) & 7;
3328 if (op < 5)
3329 break;
3330 mod = (modrm >> 6) & 3;
3331 if (mod == 3) /* register destination */
3332 break;
3333 return false;
3334
3335 case 0xbb: /* /r: BTC reg/memY, regY */
3336 case 0xb3: /* /r: BTR reg/memY, regY */
3337 case 0xab: /* /r: BTS reg/memY, regY */
3338 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
3339 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
3340 case 0xc0: /* /r: XADD reg/mem8, reg8 */
3341 case 0xc1: /* /r: XADD reg/memY, regY */
3342 modrm = ldub_code(pc++);
3343 mod = (modrm >> 6) & 3;
3344 if (mod == 3) /* register destination */
3345 break;
3346 return false;
3347
3348 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
3349 case 0xc7:
3350 modrm = ldub_code(pc++);
3351 op = (modrm >> 3) & 7;
3352 if (op != 1)
3353 break;
3354 return false;
3355 }
3356 break;
3357 }
3358
3359 /* illegal sequence. The s->pc is past the lock prefix and that
3360 is sufficient for the TB, I think. */
3361 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
3362 return true;
3363}
3364#endif /* VBOX */
3365
3366
3367/* convert one instruction. s->is_jmp is set if the translation must
3368 be stopped. Return the next pc value */
3369static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3370{
3371 int b, prefixes, aflag, dflag;
3372 int shift, ot;
3373 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3374 target_ulong next_eip, tval;
3375 int rex_w, rex_r;
3376
3377 s->pc = pc_start;
3378 prefixes = 0;
3379 aflag = s->code32;
3380 dflag = s->code32;
3381 s->override = -1;
3382 rex_w = -1;
3383 rex_r = 0;
3384#ifdef TARGET_X86_64
3385 s->rex_x = 0;
3386 s->rex_b = 0;
3387 x86_64_hregs = 0;
3388#endif
3389 s->rip_offset = 0; /* for relative ip address */
3390
3391#ifdef VBOX
3392 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3393 gen_update_eip(pc_start - s->cs_base);
3394#endif
3395
3396 next_byte:
3397 b = ldub_code(s->pc);
3398 s->pc++;
3399 /* check prefixes */
3400#ifdef TARGET_X86_64
3401 if (CODE64(s)) {
3402 switch (b) {
3403 case 0xf3:
3404 prefixes |= PREFIX_REPZ;
3405 goto next_byte;
3406 case 0xf2:
3407 prefixes |= PREFIX_REPNZ;
3408 goto next_byte;
3409 case 0xf0:
3410 prefixes |= PREFIX_LOCK;
3411 goto next_byte;
3412 case 0x2e:
3413 s->override = R_CS;
3414 goto next_byte;
3415 case 0x36:
3416 s->override = R_SS;
3417 goto next_byte;
3418 case 0x3e:
3419 s->override = R_DS;
3420 goto next_byte;
3421 case 0x26:
3422 s->override = R_ES;
3423 goto next_byte;
3424 case 0x64:
3425 s->override = R_FS;
3426 goto next_byte;
3427 case 0x65:
3428 s->override = R_GS;
3429 goto next_byte;
3430 case 0x66:
3431 prefixes |= PREFIX_DATA;
3432 goto next_byte;
3433 case 0x67:
3434 prefixes |= PREFIX_ADR;
3435 goto next_byte;
3436 case 0x40 ... 0x4f:
3437 /* REX prefix */
3438 rex_w = (b >> 3) & 1;
3439 rex_r = (b & 0x4) << 1;
3440 s->rex_x = (b & 0x2) << 2;
3441 REX_B(s) = (b & 0x1) << 3;
3442 x86_64_hregs = 1; /* select uniform byte register addressing */
3443 goto next_byte;
3444 }
3445 if (rex_w == 1) {
3446 /* 0x66 is ignored if rex.w is set */
3447 dflag = 2;
3448 } else {
3449 if (prefixes & PREFIX_DATA)
3450 dflag ^= 1;
3451 }
3452 if (!(prefixes & PREFIX_ADR))
3453 aflag = 2;
3454 } else
3455#endif
3456 {
3457 switch (b) {
3458 case 0xf3:
3459 prefixes |= PREFIX_REPZ;
3460 goto next_byte;
3461 case 0xf2:
3462 prefixes |= PREFIX_REPNZ;
3463 goto next_byte;
3464 case 0xf0:
3465 prefixes |= PREFIX_LOCK;
3466 goto next_byte;
3467 case 0x2e:
3468 s->override = R_CS;
3469 goto next_byte;
3470 case 0x36:
3471 s->override = R_SS;
3472 goto next_byte;
3473 case 0x3e:
3474 s->override = R_DS;
3475 goto next_byte;
3476 case 0x26:
3477 s->override = R_ES;
3478 goto next_byte;
3479 case 0x64:
3480 s->override = R_FS;
3481 goto next_byte;
3482 case 0x65:
3483 s->override = R_GS;
3484 goto next_byte;
3485 case 0x66:
3486 prefixes |= PREFIX_DATA;
3487 goto next_byte;
3488 case 0x67:
3489 prefixes |= PREFIX_ADR;
3490 goto next_byte;
3491 }
3492 if (prefixes & PREFIX_DATA)
3493 dflag ^= 1;
3494 if (prefixes & PREFIX_ADR)
3495 aflag ^= 1;
3496 }
3497
3498 s->prefix = prefixes;
3499 s->aflag = aflag;
3500 s->dflag = dflag;
3501
3502 /* lock generation */
3503#ifndef VBOX
3504 if (prefixes & PREFIX_LOCK)
3505 gen_op_lock();
3506#else /* VBOX */
3507 if (prefixes & PREFIX_LOCK) {
3508 if (is_invalid_lock_sequence(s, pc_start, b)) {
3509 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3510 return s->pc;
3511 }
3512 gen_op_lock();
3513 }
3514#endif /* VBOX */
3515
3516 /* now check op code */
3517 reswitch:
3518 switch(b) {
3519 case 0x0f:
3520 /**************************/
3521 /* extended op code */
3522 b = ldub_code(s->pc++) | 0x100;
3523 goto reswitch;
3524
3525 /**************************/
3526 /* arith & logic */
3527 case 0x00 ... 0x05:
3528 case 0x08 ... 0x0d:
3529 case 0x10 ... 0x15:
3530 case 0x18 ... 0x1d:
3531 case 0x20 ... 0x25:
3532 case 0x28 ... 0x2d:
3533 case 0x30 ... 0x35:
3534 case 0x38 ... 0x3d:
3535 {
3536 int op, f, val;
3537 op = (b >> 3) & 7;
3538 f = (b >> 1) & 3;
3539
3540 if ((b & 1) == 0)
3541 ot = OT_BYTE;
3542 else
3543 ot = dflag + OT_WORD;
3544
3545 switch(f) {
3546 case 0: /* OP Ev, Gv */
3547 modrm = ldub_code(s->pc++);
3548 reg = ((modrm >> 3) & 7) | rex_r;
3549 mod = (modrm >> 6) & 3;
3550 rm = (modrm & 7) | REX_B(s);
3551 if (mod != 3) {
3552 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3553 opreg = OR_TMP0;
3554 } else if (op == OP_XORL && rm == reg) {
3555 xor_zero:
3556 /* xor reg, reg optimisation */
3557 gen_op_movl_T0_0();
3558 s->cc_op = CC_OP_LOGICB + ot;
3559 gen_op_mov_reg_T0[ot][reg]();
3560 gen_op_update1_cc();
3561 break;
3562 } else {
3563 opreg = rm;
3564 }
3565 gen_op_mov_TN_reg[ot][1][reg]();
3566 gen_op(s, op, ot, opreg);
3567 break;
3568 case 1: /* OP Gv, Ev */
3569 modrm = ldub_code(s->pc++);
3570 mod = (modrm >> 6) & 3;
3571 reg = ((modrm >> 3) & 7) | rex_r;
3572 rm = (modrm & 7) | REX_B(s);
3573 if (mod != 3) {
3574 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3575 gen_op_ld_T1_A0[ot + s->mem_index]();
3576 } else if (op == OP_XORL && rm == reg) {
3577 goto xor_zero;
3578 } else {
3579 gen_op_mov_TN_reg[ot][1][rm]();
3580 }
3581 gen_op(s, op, ot, reg);
3582 break;
3583 case 2: /* OP A, Iv */
3584 val = insn_get(s, ot);
3585 gen_op_movl_T1_im(val);
3586 gen_op(s, op, ot, OR_EAX);
3587 break;
3588 }
3589 }
3590 break;
3591
3592 case 0x80: /* GRP1 */
3593 case 0x81:
3594 case 0x82:
3595 case 0x83:
3596 {
3597 int val;
3598
3599 if ((b & 1) == 0)
3600 ot = OT_BYTE;
3601 else
3602 ot = dflag + OT_WORD;
3603
3604 modrm = ldub_code(s->pc++);
3605 mod = (modrm >> 6) & 3;
3606 rm = (modrm & 7) | REX_B(s);
3607 op = (modrm >> 3) & 7;
3608
3609 if (mod != 3) {
3610 if (b == 0x83)
3611 s->rip_offset = 1;
3612 else
3613 s->rip_offset = insn_const_size(ot);
3614 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3615 opreg = OR_TMP0;
3616 } else {
3617 opreg = rm;
3618 }
3619
3620 switch(b) {
3621 default:
3622 case 0x80:
3623 case 0x81:
3624 case 0x82:
3625 val = insn_get(s, ot);
3626 break;
3627 case 0x83:
3628 val = (int8_t)insn_get(s, OT_BYTE);
3629 break;
3630 }
3631 gen_op_movl_T1_im(val);
3632 gen_op(s, op, ot, opreg);
3633 }
3634 break;
3635
3636 /**************************/
3637 /* inc, dec, and other misc arith */
3638 case 0x40 ... 0x47: /* inc Gv */
3639 ot = dflag ? OT_LONG : OT_WORD;
3640 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3641 break;
3642 case 0x48 ... 0x4f: /* dec Gv */
3643 ot = dflag ? OT_LONG : OT_WORD;
3644 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3645 break;
3646 case 0xf6: /* GRP3 */
3647 case 0xf7:
3648 if ((b & 1) == 0)
3649 ot = OT_BYTE;
3650 else
3651 ot = dflag + OT_WORD;
3652
3653 modrm = ldub_code(s->pc++);
3654 mod = (modrm >> 6) & 3;
3655 rm = (modrm & 7) | REX_B(s);
3656 op = (modrm >> 3) & 7;
3657 if (mod != 3) {
3658 if (op == 0)
3659 s->rip_offset = insn_const_size(ot);
3660 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3661 gen_op_ld_T0_A0[ot + s->mem_index]();
3662 } else {
3663 gen_op_mov_TN_reg[ot][0][rm]();
3664 }
3665
3666 switch(op) {
3667 case 0: /* test */
3668 val = insn_get(s, ot);
3669 gen_op_movl_T1_im(val);
3670 gen_op_testl_T0_T1_cc();
3671 s->cc_op = CC_OP_LOGICB + ot;
3672 break;
3673 case 2: /* not */
3674 gen_op_notl_T0();
3675 if (mod != 3) {
3676 gen_op_st_T0_A0[ot + s->mem_index]();
3677 } else {
3678 gen_op_mov_reg_T0[ot][rm]();
3679 }
3680 break;
3681 case 3: /* neg */
3682 gen_op_negl_T0();
3683 if (mod != 3) {
3684 gen_op_st_T0_A0[ot + s->mem_index]();
3685 } else {
3686 gen_op_mov_reg_T0[ot][rm]();
3687 }
3688 gen_op_update_neg_cc();
3689 s->cc_op = CC_OP_SUBB + ot;
3690 break;
3691 case 4: /* mul */
3692 switch(ot) {
3693 case OT_BYTE:
3694 gen_op_mulb_AL_T0();
3695 s->cc_op = CC_OP_MULB;
3696 break;
3697 case OT_WORD:
3698 gen_op_mulw_AX_T0();
3699 s->cc_op = CC_OP_MULW;
3700 break;
3701 default:
3702 case OT_LONG:
3703 gen_op_mull_EAX_T0();
3704 s->cc_op = CC_OP_MULL;
3705 break;
3706#ifdef TARGET_X86_64
3707 case OT_QUAD:
3708 gen_op_mulq_EAX_T0();
3709 s->cc_op = CC_OP_MULQ;
3710 break;
3711#endif
3712 }
3713 break;
3714 case 5: /* imul */
3715 switch(ot) {
3716 case OT_BYTE:
3717 gen_op_imulb_AL_T0();
3718 s->cc_op = CC_OP_MULB;
3719 break;
3720 case OT_WORD:
3721 gen_op_imulw_AX_T0();
3722 s->cc_op = CC_OP_MULW;
3723 break;
3724 default:
3725 case OT_LONG:
3726 gen_op_imull_EAX_T0();
3727 s->cc_op = CC_OP_MULL;
3728 break;
3729#ifdef TARGET_X86_64
3730 case OT_QUAD:
3731 gen_op_imulq_EAX_T0();
3732 s->cc_op = CC_OP_MULQ;
3733 break;
3734#endif
3735 }
3736 break;
3737 case 6: /* div */
3738 switch(ot) {
3739 case OT_BYTE:
3740 gen_jmp_im(pc_start - s->cs_base);
3741 gen_op_divb_AL_T0();
3742 break;
3743 case OT_WORD:
3744 gen_jmp_im(pc_start - s->cs_base);
3745 gen_op_divw_AX_T0();
3746 break;
3747 default:
3748 case OT_LONG:
3749 gen_jmp_im(pc_start - s->cs_base);
3750 gen_op_divl_EAX_T0();
3751 break;
3752#ifdef TARGET_X86_64
3753 case OT_QUAD:
3754 gen_jmp_im(pc_start - s->cs_base);
3755 gen_op_divq_EAX_T0();
3756 break;
3757#endif
3758 }
3759 break;
3760 case 7: /* idiv */
3761 switch(ot) {
3762 case OT_BYTE:
3763 gen_jmp_im(pc_start - s->cs_base);
3764 gen_op_idivb_AL_T0();
3765 break;
3766 case OT_WORD:
3767 gen_jmp_im(pc_start - s->cs_base);
3768 gen_op_idivw_AX_T0();
3769 break;
3770 default:
3771 case OT_LONG:
3772 gen_jmp_im(pc_start - s->cs_base);
3773 gen_op_idivl_EAX_T0();
3774 break;
3775#ifdef TARGET_X86_64
3776 case OT_QUAD:
3777 gen_jmp_im(pc_start - s->cs_base);
3778 gen_op_idivq_EAX_T0();
3779 break;
3780#endif
3781 }
3782 break;
3783 default:
3784 goto illegal_op;
3785 }
3786 break;
3787
3788 case 0xfe: /* GRP4 */
3789 case 0xff: /* GRP5 */
3790 if ((b & 1) == 0)
3791 ot = OT_BYTE;
3792 else
3793 ot = dflag + OT_WORD;
3794
3795 modrm = ldub_code(s->pc++);
3796 mod = (modrm >> 6) & 3;
3797 rm = (modrm & 7) | REX_B(s);
3798 op = (modrm >> 3) & 7;
3799 if (op >= 2 && b == 0xfe) {
3800 goto illegal_op;
3801 }
3802 if (CODE64(s)) {
3803 if (op == 2 || op == 4) {
3804 /* operand size for jumps is 64 bit */
3805 ot = OT_QUAD;
3806 } else if (op == 3 || op == 5) {
3807 /* for call calls, the operand is 16 or 32 bit, even
3808 in long mode */
3809 ot = dflag ? OT_LONG : OT_WORD;
3810 } else if (op == 6) {
3811 /* default push size is 64 bit */
3812 ot = dflag ? OT_QUAD : OT_WORD;
3813 }
3814 }
3815 if (mod != 3) {
3816 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3817 if (op >= 2 && op != 3 && op != 5)
3818 gen_op_ld_T0_A0[ot + s->mem_index]();
3819 } else {
3820 gen_op_mov_TN_reg[ot][0][rm]();
3821 }
3822
3823 switch(op) {
3824 case 0: /* inc Ev */
3825 if (mod != 3)
3826 opreg = OR_TMP0;
3827 else
3828 opreg = rm;
3829 gen_inc(s, ot, opreg, 1);
3830 break;
3831 case 1: /* dec Ev */
3832 if (mod != 3)
3833 opreg = OR_TMP0;
3834 else
3835 opreg = rm;
3836 gen_inc(s, ot, opreg, -1);
3837 break;
3838 case 2: /* call Ev */
3839 /* XXX: optimize if memory (no 'and' is necessary) */
3840#ifdef VBOX_WITH_CALL_RECORD
3841 if (s->record_call)
3842 gen_op_record_call();
3843#endif
3844 if (s->dflag == 0)
3845 gen_op_andl_T0_ffff();
3846 next_eip = s->pc - s->cs_base;
3847 gen_movtl_T1_im(next_eip);
3848 gen_push_T1(s);
3849 gen_op_jmp_T0();
3850 gen_eob(s);
3851 break;
3852 case 3: /* lcall Ev */
3853 gen_op_ld_T1_A0[ot + s->mem_index]();
3854 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3855 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3856 do_lcall:
3857 if (s->pe && !s->vm86) {
3858 if (s->cc_op != CC_OP_DYNAMIC)
3859 gen_op_set_cc_op(s->cc_op);
3860 gen_jmp_im(pc_start - s->cs_base);
3861 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3862 } else {
3863 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3864 }
3865 gen_eob(s);
3866 break;
3867 case 4: /* jmp Ev */
3868 if (s->dflag == 0)
3869 gen_op_andl_T0_ffff();
3870 gen_op_jmp_T0();
3871 gen_eob(s);
3872 break;
3873 case 5: /* ljmp Ev */
3874 gen_op_ld_T1_A0[ot + s->mem_index]();
3875 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3876 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3877 do_ljmp:
3878 if (s->pe && !s->vm86) {
3879 if (s->cc_op != CC_OP_DYNAMIC)
3880 gen_op_set_cc_op(s->cc_op);
3881 gen_jmp_im(pc_start - s->cs_base);
3882 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3883 } else {
3884 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3885 gen_op_movl_T0_T1();
3886 gen_op_jmp_T0();
3887 }
3888 gen_eob(s);
3889 break;
3890 case 6: /* push Ev */
3891 gen_push_T0(s);
3892 break;
3893 default:
3894 goto illegal_op;
3895 }
3896 break;
3897
3898 case 0x84: /* test Ev, Gv */
3899 case 0x85:
3900 if ((b & 1) == 0)
3901 ot = OT_BYTE;
3902 else
3903 ot = dflag + OT_WORD;
3904
3905 modrm = ldub_code(s->pc++);
3906 mod = (modrm >> 6) & 3;
3907 rm = (modrm & 7) | REX_B(s);
3908 reg = ((modrm >> 3) & 7) | rex_r;
3909
3910 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3911 gen_op_mov_TN_reg[ot][1][reg]();
3912 gen_op_testl_T0_T1_cc();
3913 s->cc_op = CC_OP_LOGICB + ot;
3914 break;
3915
3916 case 0xa8: /* test eAX, Iv */
3917 case 0xa9:
3918 if ((b & 1) == 0)
3919 ot = OT_BYTE;
3920 else
3921 ot = dflag + OT_WORD;
3922 val = insn_get(s, ot);
3923
3924 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3925 gen_op_movl_T1_im(val);
3926 gen_op_testl_T0_T1_cc();
3927 s->cc_op = CC_OP_LOGICB + ot;
3928 break;
3929
3930 case 0x98: /* CWDE/CBW */
3931#ifdef TARGET_X86_64
3932 if (dflag == 2) {
3933 gen_op_movslq_RAX_EAX();
3934 } else
3935#endif
3936 if (dflag == 1)
3937 gen_op_movswl_EAX_AX();
3938 else
3939 gen_op_movsbw_AX_AL();
3940 break;
3941 case 0x99: /* CDQ/CWD */
3942#ifdef TARGET_X86_64
3943 if (dflag == 2) {
3944 gen_op_movsqo_RDX_RAX();
3945 } else
3946#endif
3947 if (dflag == 1)
3948 gen_op_movslq_EDX_EAX();
3949 else
3950 gen_op_movswl_DX_AX();
3951 break;
3952 case 0x1af: /* imul Gv, Ev */
3953 case 0x69: /* imul Gv, Ev, I */
3954 case 0x6b:
3955 ot = dflag + OT_WORD;
3956 modrm = ldub_code(s->pc++);
3957 reg = ((modrm >> 3) & 7) | rex_r;
3958 if (b == 0x69)
3959 s->rip_offset = insn_const_size(ot);
3960 else if (b == 0x6b)
3961 s->rip_offset = 1;
3962 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3963 if (b == 0x69) {
3964 val = insn_get(s, ot);
3965 gen_op_movl_T1_im(val);
3966 } else if (b == 0x6b) {
3967 val = (int8_t)insn_get(s, OT_BYTE);
3968 gen_op_movl_T1_im(val);
3969 } else {
3970 gen_op_mov_TN_reg[ot][1][reg]();
3971 }
3972
3973#ifdef TARGET_X86_64
3974 if (ot == OT_QUAD) {
3975 gen_op_imulq_T0_T1();
3976 } else
3977#endif
3978 if (ot == OT_LONG) {
3979 gen_op_imull_T0_T1();
3980 } else {
3981 gen_op_imulw_T0_T1();
3982 }
3983 gen_op_mov_reg_T0[ot][reg]();
3984 s->cc_op = CC_OP_MULB + ot;
3985 break;
3986 case 0x1c0:
3987 case 0x1c1: /* xadd Ev, Gv */
3988 if ((b & 1) == 0)
3989 ot = OT_BYTE;
3990 else
3991 ot = dflag + OT_WORD;
3992 modrm = ldub_code(s->pc++);
3993 reg = ((modrm >> 3) & 7) | rex_r;
3994 mod = (modrm >> 6) & 3;
3995 if (mod == 3) {
3996 rm = (modrm & 7) | REX_B(s);
3997 gen_op_mov_TN_reg[ot][0][reg]();
3998 gen_op_mov_TN_reg[ot][1][rm]();
3999 gen_op_addl_T0_T1();
4000 gen_op_mov_reg_T1[ot][reg]();
4001 gen_op_mov_reg_T0[ot][rm]();
4002 } else {
4003 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4004 gen_op_mov_TN_reg[ot][0][reg]();
4005 gen_op_ld_T1_A0[ot + s->mem_index]();
4006 gen_op_addl_T0_T1();
4007 gen_op_st_T0_A0[ot + s->mem_index]();
4008 gen_op_mov_reg_T1[ot][reg]();
4009 }
4010 gen_op_update2_cc();
4011 s->cc_op = CC_OP_ADDB + ot;
4012 break;
4013 case 0x1b0:
4014 case 0x1b1: /* cmpxchg Ev, Gv */
4015 if ((b & 1) == 0)
4016 ot = OT_BYTE;
4017 else
4018 ot = dflag + OT_WORD;
4019 modrm = ldub_code(s->pc++);
4020 reg = ((modrm >> 3) & 7) | rex_r;
4021 mod = (modrm >> 6) & 3;
4022 gen_op_mov_TN_reg[ot][1][reg]();
4023 if (mod == 3) {
4024 rm = (modrm & 7) | REX_B(s);
4025 gen_op_mov_TN_reg[ot][0][rm]();
4026 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
4027 gen_op_mov_reg_T0[ot][rm]();
4028 } else {
4029 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4030 gen_op_ld_T0_A0[ot + s->mem_index]();
4031 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
4032 }
4033 s->cc_op = CC_OP_SUBB + ot;
4034 break;
4035 case 0x1c7: /* cmpxchg8b */
4036 modrm = ldub_code(s->pc++);
4037 mod = (modrm >> 6) & 3;
4038 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4039 goto illegal_op;
4040 if (s->cc_op != CC_OP_DYNAMIC)
4041 gen_op_set_cc_op(s->cc_op);
4042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4043 gen_op_cmpxchg8b();
4044 s->cc_op = CC_OP_EFLAGS;
4045 break;
4046
4047 /**************************/
4048 /* push/pop */
4049 case 0x50 ... 0x57: /* push */
4050 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
4051 gen_push_T0(s);
4052 break;
4053 case 0x58 ... 0x5f: /* pop */
4054 if (CODE64(s)) {
4055 ot = dflag ? OT_QUAD : OT_WORD;
4056 } else {
4057 ot = dflag + OT_WORD;
4058 }
4059 gen_pop_T0(s);
4060 /* NOTE: order is important for pop %sp */
4061 gen_pop_update(s);
4062 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
4063 break;
4064 case 0x60: /* pusha */
4065 if (CODE64(s))
4066 goto illegal_op;
4067 gen_pusha(s);
4068 break;
4069 case 0x61: /* popa */
4070 if (CODE64(s))
4071 goto illegal_op;
4072 gen_popa(s);
4073 break;
4074 case 0x68: /* push Iv */
4075 case 0x6a:
4076 if (CODE64(s)) {
4077 ot = dflag ? OT_QUAD : OT_WORD;
4078 } else {
4079 ot = dflag + OT_WORD;
4080 }
4081 if (b == 0x68)
4082 val = insn_get(s, ot);
4083 else
4084 val = (int8_t)insn_get(s, OT_BYTE);
4085 gen_op_movl_T0_im(val);
4086 gen_push_T0(s);
4087 break;
4088 case 0x8f: /* pop Ev */
4089 if (CODE64(s)) {
4090 ot = dflag ? OT_QUAD : OT_WORD;
4091 } else {
4092 ot = dflag + OT_WORD;
4093 }
4094 modrm = ldub_code(s->pc++);
4095 mod = (modrm >> 6) & 3;
4096 gen_pop_T0(s);
4097 if (mod == 3) {
4098 /* NOTE: order is important for pop %sp */
4099 gen_pop_update(s);
4100 rm = (modrm & 7) | REX_B(s);
4101 gen_op_mov_reg_T0[ot][rm]();
4102 } else {
4103 /* NOTE: order is important too for MMU exceptions */
4104 s->popl_esp_hack = 1 << ot;
4105 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4106 s->popl_esp_hack = 0;
4107 gen_pop_update(s);
4108 }
4109 break;
4110 case 0xc8: /* enter */
4111 {
4112 int level;
4113 val = lduw_code(s->pc);
4114 s->pc += 2;
4115 level = ldub_code(s->pc++);
4116 gen_enter(s, val, level);
4117 }
4118 break;
4119 case 0xc9: /* leave */
4120 /* XXX: exception not precise (ESP is updated before potential exception) */
4121 if (CODE64(s)) {
4122 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
4123 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
4124 } else if (s->ss32) {
4125 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
4126 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
4127 } else {
4128 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
4129 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
4130 }
4131 gen_pop_T0(s);
4132 if (CODE64(s)) {
4133 ot = dflag ? OT_QUAD : OT_WORD;
4134 } else {
4135 ot = dflag + OT_WORD;
4136 }
4137 gen_op_mov_reg_T0[ot][R_EBP]();
4138 gen_pop_update(s);
4139 break;
4140 case 0x06: /* push es */
4141 case 0x0e: /* push cs */
4142 case 0x16: /* push ss */
4143 case 0x1e: /* push ds */
4144 if (CODE64(s))
4145 goto illegal_op;
4146 gen_op_movl_T0_seg(b >> 3);
4147 gen_push_T0(s);
4148 break;
4149 case 0x1a0: /* push fs */
4150 case 0x1a8: /* push gs */
4151 gen_op_movl_T0_seg((b >> 3) & 7);
4152 gen_push_T0(s);
4153 break;
4154 case 0x07: /* pop es */
4155 case 0x17: /* pop ss */
4156 case 0x1f: /* pop ds */
4157 if (CODE64(s))
4158 goto illegal_op;
4159 reg = b >> 3;
4160 gen_pop_T0(s);
4161 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4162 gen_pop_update(s);
4163 if (reg == R_SS) {
4164 /* if reg == SS, inhibit interrupts/trace. */
4165 /* If several instructions disable interrupts, only the
4166 _first_ does it */
4167 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4168 gen_op_set_inhibit_irq();
4169 s->tf = 0;
4170 }
4171 if (s->is_jmp) {
4172 gen_jmp_im(s->pc - s->cs_base);
4173 gen_eob(s);
4174 }
4175 break;
4176 case 0x1a1: /* pop fs */
4177 case 0x1a9: /* pop gs */
4178 gen_pop_T0(s);
4179 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4180 gen_pop_update(s);
4181 if (s->is_jmp) {
4182 gen_jmp_im(s->pc - s->cs_base);
4183 gen_eob(s);
4184 }
4185 break;
4186
4187 /**************************/
4188 /* mov */
4189 case 0x88:
4190 case 0x89: /* mov Gv, Ev */
4191 if ((b & 1) == 0)
4192 ot = OT_BYTE;
4193 else
4194 ot = dflag + OT_WORD;
4195 modrm = ldub_code(s->pc++);
4196 reg = ((modrm >> 3) & 7) | rex_r;
4197
4198 /* generate a generic store */
4199 gen_ldst_modrm(s, modrm, ot, reg, 1);
4200 break;
4201 case 0xc6:
4202 case 0xc7: /* mov Ev, Iv */
4203 if ((b & 1) == 0)
4204 ot = OT_BYTE;
4205 else
4206 ot = dflag + OT_WORD;
4207 modrm = ldub_code(s->pc++);
4208 mod = (modrm >> 6) & 3;
4209 if (mod != 3) {
4210 s->rip_offset = insn_const_size(ot);
4211 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4212 }
4213 val = insn_get(s, ot);
4214 gen_op_movl_T0_im(val);
4215 if (mod != 3)
4216 gen_op_st_T0_A0[ot + s->mem_index]();
4217 else
4218 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4219 break;
4220 case 0x8a:
4221 case 0x8b: /* mov Ev, Gv */
4222#ifdef VBOX /* dtrace hot fix */
4223 if (prefixes & PREFIX_LOCK)
4224 goto illegal_op;
4225#endif
4226 if ((b & 1) == 0)
4227 ot = OT_BYTE;
4228 else
4229 ot = OT_WORD + dflag;
4230 modrm = ldub_code(s->pc++);
4231 reg = ((modrm >> 3) & 7) | rex_r;
4232
4233 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4234 gen_op_mov_reg_T0[ot][reg]();
4235 break;
4236 case 0x8e: /* mov seg, Gv */
4237 modrm = ldub_code(s->pc++);
4238 reg = (modrm >> 3) & 7;
4239 if (reg >= 6 || reg == R_CS)
4240 goto illegal_op;
4241 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4242 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4243 if (reg == R_SS) {
4244 /* if reg == SS, inhibit interrupts/trace */
4245 /* If several instructions disable interrupts, only the
4246 _first_ does it */
4247 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4248 gen_op_set_inhibit_irq();
4249 s->tf = 0;
4250 }
4251 if (s->is_jmp) {
4252 gen_jmp_im(s->pc - s->cs_base);
4253 gen_eob(s);
4254 }
4255 break;
4256 case 0x8c: /* mov Gv, seg */
4257 modrm = ldub_code(s->pc++);
4258 reg = (modrm >> 3) & 7;
4259 mod = (modrm >> 6) & 3;
4260 if (reg >= 6)
4261 goto illegal_op;
4262 gen_op_movl_T0_seg(reg);
4263 if (mod == 3)
4264 ot = OT_WORD + dflag;
4265 else
4266 ot = OT_WORD;
4267 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4268 break;
4269
4270 case 0x1b6: /* movzbS Gv, Eb */
4271 case 0x1b7: /* movzwS Gv, Eb */
4272 case 0x1be: /* movsbS Gv, Eb */
4273 case 0x1bf: /* movswS Gv, Eb */
4274 {
4275 int d_ot;
4276 /* d_ot is the size of destination */
4277 d_ot = dflag + OT_WORD;
4278 /* ot is the size of source */
4279 ot = (b & 1) + OT_BYTE;
4280 modrm = ldub_code(s->pc++);
4281 reg = ((modrm >> 3) & 7) | rex_r;
4282 mod = (modrm >> 6) & 3;
4283 rm = (modrm & 7) | REX_B(s);
4284
4285 if (mod == 3) {
4286 gen_op_mov_TN_reg[ot][0][rm]();
4287 switch(ot | (b & 8)) {
4288 case OT_BYTE:
4289 gen_op_movzbl_T0_T0();
4290 break;
4291 case OT_BYTE | 8:
4292 gen_op_movsbl_T0_T0();
4293 break;
4294 case OT_WORD:
4295 gen_op_movzwl_T0_T0();
4296 break;
4297 default:
4298 case OT_WORD | 8:
4299 gen_op_movswl_T0_T0();
4300 break;
4301 }
4302 gen_op_mov_reg_T0[d_ot][reg]();
4303 } else {
4304 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4305 if (b & 8) {
4306 gen_op_lds_T0_A0[ot + s->mem_index]();
4307 } else {
4308 gen_op_ldu_T0_A0[ot + s->mem_index]();
4309 }
4310 gen_op_mov_reg_T0[d_ot][reg]();
4311 }
4312 }
4313 break;
4314
4315 case 0x8d: /* lea */
4316 ot = dflag + OT_WORD;
4317 modrm = ldub_code(s->pc++);
4318 mod = (modrm >> 6) & 3;
4319 if (mod == 3)
4320 goto illegal_op;
4321 reg = ((modrm >> 3) & 7) | rex_r;
4322 /* we must ensure that no segment is added */
4323 s->override = -1;
4324 val = s->addseg;
4325 s->addseg = 0;
4326 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4327 s->addseg = val;
4328 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4329 break;
4330
4331 case 0xa0: /* mov EAX, Ov */
4332 case 0xa1:
4333 case 0xa2: /* mov Ov, EAX */
4334 case 0xa3:
4335 {
4336 target_ulong offset_addr;
4337
4338 if ((b & 1) == 0)
4339 ot = OT_BYTE;
4340 else
4341 ot = dflag + OT_WORD;
4342#ifdef TARGET_X86_64
4343 if (s->aflag == 2) {
4344 offset_addr = ldq_code(s->pc);
4345 s->pc += 8;
4346 if (offset_addr == (int32_t)offset_addr)
4347 gen_op_movq_A0_im(offset_addr);
4348 else
4349 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4350 } else
4351#endif
4352 {
4353 if (s->aflag) {
4354 offset_addr = insn_get(s, OT_LONG);
4355 } else {
4356 offset_addr = insn_get(s, OT_WORD);
4357 }
4358 gen_op_movl_A0_im(offset_addr);
4359 }
4360 gen_add_A0_ds_seg(s);
4361 if ((b & 2) == 0) {
4362 gen_op_ld_T0_A0[ot + s->mem_index]();
4363 gen_op_mov_reg_T0[ot][R_EAX]();
4364 } else {
4365 gen_op_mov_TN_reg[ot][0][R_EAX]();
4366 gen_op_st_T0_A0[ot + s->mem_index]();
4367 }
4368 }
4369 break;
4370 case 0xd7: /* xlat */
4371#ifdef TARGET_X86_64
4372 if (s->aflag == 2) {
4373 gen_op_movq_A0_reg[R_EBX]();
4374 gen_op_addq_A0_AL();
4375 } else
4376#endif
4377 {
4378 gen_op_movl_A0_reg[R_EBX]();
4379 gen_op_addl_A0_AL();
4380 if (s->aflag == 0)
4381 gen_op_andl_A0_ffff();
4382 }
4383 gen_add_A0_ds_seg(s);
4384 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4385 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4386 break;
4387 case 0xb0 ... 0xb7: /* mov R, Ib */
4388 val = insn_get(s, OT_BYTE);
4389 gen_op_movl_T0_im(val);
4390 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4391 break;
4392 case 0xb8 ... 0xbf: /* mov R, Iv */
4393#ifdef TARGET_X86_64
4394 if (dflag == 2) {
4395 uint64_t tmp;
4396 /* 64 bit case */
4397 tmp = ldq_code(s->pc);
4398 s->pc += 8;
4399 reg = (b & 7) | REX_B(s);
4400 gen_movtl_T0_im(tmp);
4401 gen_op_mov_reg_T0[OT_QUAD][reg]();
4402 } else
4403#endif
4404 {
4405 ot = dflag ? OT_LONG : OT_WORD;
4406 val = insn_get(s, ot);
4407 reg = (b & 7) | REX_B(s);
4408 gen_op_movl_T0_im(val);
4409 gen_op_mov_reg_T0[ot][reg]();
4410 }
4411 break;
4412
4413 case 0x91 ... 0x97: /* xchg R, EAX */
4414 ot = dflag + OT_WORD;
4415 reg = (b & 7) | REX_B(s);
4416 rm = R_EAX;
4417 goto do_xchg_reg;
4418 case 0x86:
4419 case 0x87: /* xchg Ev, Gv */
4420 if ((b & 1) == 0)
4421 ot = OT_BYTE;
4422 else
4423 ot = dflag + OT_WORD;
4424 modrm = ldub_code(s->pc++);
4425 reg = ((modrm >> 3) & 7) | rex_r;
4426 mod = (modrm >> 6) & 3;
4427 if (mod == 3) {
4428 rm = (modrm & 7) | REX_B(s);
4429 do_xchg_reg:
4430 gen_op_mov_TN_reg[ot][0][reg]();
4431 gen_op_mov_TN_reg[ot][1][rm]();
4432 gen_op_mov_reg_T0[ot][rm]();
4433 gen_op_mov_reg_T1[ot][reg]();
4434 } else {
4435 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4436 gen_op_mov_TN_reg[ot][0][reg]();
4437 /* for xchg, lock is implicit */
4438 if (!(prefixes & PREFIX_LOCK))
4439 gen_op_lock();
4440 gen_op_ld_T1_A0[ot + s->mem_index]();
4441 gen_op_st_T0_A0[ot + s->mem_index]();
4442 if (!(prefixes & PREFIX_LOCK))
4443 gen_op_unlock();
4444 gen_op_mov_reg_T1[ot][reg]();
4445 }
4446 break;
4447 case 0xc4: /* les Gv */
4448 if (CODE64(s))
4449 goto illegal_op;
4450 op = R_ES;
4451 goto do_lxx;
4452 case 0xc5: /* lds Gv */
4453 if (CODE64(s))
4454 goto illegal_op;
4455 op = R_DS;
4456 goto do_lxx;
4457 case 0x1b2: /* lss Gv */
4458 op = R_SS;
4459 goto do_lxx;
4460 case 0x1b4: /* lfs Gv */
4461 op = R_FS;
4462 goto do_lxx;
4463 case 0x1b5: /* lgs Gv */
4464 op = R_GS;
4465 do_lxx:
4466 ot = dflag ? OT_LONG : OT_WORD;
4467 modrm = ldub_code(s->pc++);
4468 reg = ((modrm >> 3) & 7) | rex_r;
4469 mod = (modrm >> 6) & 3;
4470 if (mod == 3)
4471 goto illegal_op;
4472 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4473 gen_op_ld_T1_A0[ot + s->mem_index]();
4474 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4475 /* load the segment first to handle exceptions properly */
4476 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4477 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4478 /* then put the data */
4479 gen_op_mov_reg_T1[ot][reg]();
4480 if (s->is_jmp) {
4481 gen_jmp_im(s->pc - s->cs_base);
4482 gen_eob(s);
4483 }
4484 break;
4485
4486 /************************/
4487 /* shifts */
4488 case 0xc0:
4489 case 0xc1:
4490 /* shift Ev,Ib */
4491 shift = 2;
4492 grp2:
4493 {
4494 if ((b & 1) == 0)
4495 ot = OT_BYTE;
4496 else
4497 ot = dflag + OT_WORD;
4498
4499 modrm = ldub_code(s->pc++);
4500 mod = (modrm >> 6) & 3;
4501 op = (modrm >> 3) & 7;
4502
4503 if (mod != 3) {
4504 if (shift == 2) {
4505 s->rip_offset = 1;
4506 }
4507 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4508 opreg = OR_TMP0;
4509 } else {
4510 opreg = (modrm & 7) | REX_B(s);
4511 }
4512
4513 /* simpler op */
4514 if (shift == 0) {
4515 gen_shift(s, op, ot, opreg, OR_ECX);
4516 } else {
4517 if (shift == 2) {
4518 shift = ldub_code(s->pc++);
4519 }
4520 gen_shifti(s, op, ot, opreg, shift);
4521 }
4522 }
4523 break;
4524 case 0xd0:
4525 case 0xd1:
4526 /* shift Ev,1 */
4527 shift = 1;
4528 goto grp2;
4529 case 0xd2:
4530 case 0xd3:
4531 /* shift Ev,cl */
4532 shift = 0;
4533 goto grp2;
4534
4535 case 0x1a4: /* shld imm */
4536 op = 0;
4537 shift = 1;
4538 goto do_shiftd;
4539 case 0x1a5: /* shld cl */
4540 op = 0;
4541 shift = 0;
4542 goto do_shiftd;
4543 case 0x1ac: /* shrd imm */
4544 op = 1;
4545 shift = 1;
4546 goto do_shiftd;
4547 case 0x1ad: /* shrd cl */
4548 op = 1;
4549 shift = 0;
4550 do_shiftd:
4551 ot = dflag + OT_WORD;
4552 modrm = ldub_code(s->pc++);
4553 mod = (modrm >> 6) & 3;
4554 rm = (modrm & 7) | REX_B(s);
4555 reg = ((modrm >> 3) & 7) | rex_r;
4556
4557 if (mod != 3) {
4558 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4559 gen_op_ld_T0_A0[ot + s->mem_index]();
4560 } else {
4561 gen_op_mov_TN_reg[ot][0][rm]();
4562 }
4563 gen_op_mov_TN_reg[ot][1][reg]();
4564
4565 if (shift) {
4566 val = ldub_code(s->pc++);
4567 if (ot == OT_QUAD)
4568 val &= 0x3f;
4569 else
4570 val &= 0x1f;
4571 if (val) {
4572 if (mod == 3)
4573 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4574 else
4575 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4576 if (op == 0 && ot != OT_WORD)
4577 s->cc_op = CC_OP_SHLB + ot;
4578 else
4579 s->cc_op = CC_OP_SARB + ot;
4580 }
4581 } else {
4582 if (s->cc_op != CC_OP_DYNAMIC)
4583 gen_op_set_cc_op(s->cc_op);
4584 if (mod == 3)
4585 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4586 else
4587 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4588 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4589 }
4590 if (mod == 3) {
4591 gen_op_mov_reg_T0[ot][rm]();
4592 }
4593 break;
4594
4595 /************************/
4596 /* floats */
4597 case 0xd8 ... 0xdf:
4598 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4599 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4600 /* XXX: what to do if illegal op ? */
4601 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4602 break;
4603 }
4604 modrm = ldub_code(s->pc++);
4605 mod = (modrm >> 6) & 3;
4606 rm = modrm & 7;
4607 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4608 if (mod != 3) {
4609 /* memory op */
4610 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4611 switch(op) {
4612 case 0x00 ... 0x07: /* fxxxs */
4613 case 0x10 ... 0x17: /* fixxxl */
4614 case 0x20 ... 0x27: /* fxxxl */
4615 case 0x30 ... 0x37: /* fixxx */
4616 {
4617 int op1;
4618 op1 = op & 7;
4619
4620 switch(op >> 4) {
4621 case 0:
4622 gen_op_flds_FT0_A0();
4623 break;
4624 case 1:
4625 gen_op_fildl_FT0_A0();
4626 break;
4627 case 2:
4628 gen_op_fldl_FT0_A0();
4629 break;
4630 case 3:
4631 default:
4632 gen_op_fild_FT0_A0();
4633 break;
4634 }
4635
4636 gen_op_fp_arith_ST0_FT0[op1]();
4637 if (op1 == 3) {
4638 /* fcomp needs pop */
4639 gen_op_fpop();
4640 }
4641 }
4642 break;
4643 case 0x08: /* flds */
4644 case 0x0a: /* fsts */
4645 case 0x0b: /* fstps */
4646 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4647 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4648 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4649 switch(op & 7) {
4650 case 0:
4651 switch(op >> 4) {
4652 case 0:
4653 gen_op_flds_ST0_A0();
4654 break;
4655 case 1:
4656 gen_op_fildl_ST0_A0();
4657 break;
4658 case 2:
4659 gen_op_fldl_ST0_A0();
4660 break;
4661 case 3:
4662 default:
4663 gen_op_fild_ST0_A0();
4664 break;
4665 }
4666 break;
4667 case 1:
4668 switch(op >> 4) {
4669 case 1:
4670 gen_op_fisttl_ST0_A0();
4671 break;
4672 case 2:
4673 gen_op_fisttll_ST0_A0();
4674 break;
4675 case 3:
4676 default:
4677 gen_op_fistt_ST0_A0();
4678 }
4679 gen_op_fpop();
4680 break;
4681 default:
4682 switch(op >> 4) {
4683 case 0:
4684 gen_op_fsts_ST0_A0();
4685 break;
4686 case 1:
4687 gen_op_fistl_ST0_A0();
4688 break;
4689 case 2:
4690 gen_op_fstl_ST0_A0();
4691 break;
4692 case 3:
4693 default:
4694 gen_op_fist_ST0_A0();
4695 break;
4696 }
4697 if ((op & 7) == 3)
4698 gen_op_fpop();
4699 break;
4700 }
4701 break;
4702 case 0x0c: /* fldenv mem */
4703 gen_op_fldenv_A0(s->dflag);
4704 break;
4705 case 0x0d: /* fldcw mem */
4706 gen_op_fldcw_A0();
4707 break;
4708 case 0x0e: /* fnstenv mem */
4709 gen_op_fnstenv_A0(s->dflag);
4710 break;
4711 case 0x0f: /* fnstcw mem */
4712 gen_op_fnstcw_A0();
4713 break;
4714 case 0x1d: /* fldt mem */
4715 gen_op_fldt_ST0_A0();
4716 break;
4717 case 0x1f: /* fstpt mem */
4718 gen_op_fstt_ST0_A0();
4719 gen_op_fpop();
4720 break;
4721 case 0x2c: /* frstor mem */
4722 gen_op_frstor_A0(s->dflag);
4723 break;
4724 case 0x2e: /* fnsave mem */
4725 gen_op_fnsave_A0(s->dflag);
4726 break;
4727 case 0x2f: /* fnstsw mem */
4728 gen_op_fnstsw_A0();
4729 break;
4730 case 0x3c: /* fbld */
4731 gen_op_fbld_ST0_A0();
4732 break;
4733 case 0x3e: /* fbstp */
4734 gen_op_fbst_ST0_A0();
4735 gen_op_fpop();
4736 break;
4737 case 0x3d: /* fildll */
4738 gen_op_fildll_ST0_A0();
4739 break;
4740 case 0x3f: /* fistpll */
4741 gen_op_fistll_ST0_A0();
4742 gen_op_fpop();
4743 break;
4744 default:
4745 goto illegal_op;
4746 }
4747 } else {
4748 /* register float ops */
4749 opreg = rm;
4750
4751 switch(op) {
4752 case 0x08: /* fld sti */
4753 gen_op_fpush();
4754 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4755 break;
4756 case 0x09: /* fxchg sti */
4757 case 0x29: /* fxchg4 sti, undocumented op */
4758 case 0x39: /* fxchg7 sti, undocumented op */
4759 gen_op_fxchg_ST0_STN(opreg);
4760 break;
4761 case 0x0a: /* grp d9/2 */
4762 switch(rm) {
4763 case 0: /* fnop */
4764 /* check exceptions (FreeBSD FPU probe) */
4765 if (s->cc_op != CC_OP_DYNAMIC)
4766 gen_op_set_cc_op(s->cc_op);
4767 gen_jmp_im(pc_start - s->cs_base);
4768 gen_op_fwait();
4769 break;
4770 default:
4771 goto illegal_op;
4772 }
4773 break;
4774 case 0x0c: /* grp d9/4 */
4775 switch(rm) {
4776 case 0: /* fchs */
4777 gen_op_fchs_ST0();
4778 break;
4779 case 1: /* fabs */
4780 gen_op_fabs_ST0();
4781 break;
4782 case 4: /* ftst */
4783 gen_op_fldz_FT0();
4784 gen_op_fcom_ST0_FT0();
4785 break;
4786 case 5: /* fxam */
4787 gen_op_fxam_ST0();
4788 break;
4789 default:
4790 goto illegal_op;
4791 }
4792 break;
4793 case 0x0d: /* grp d9/5 */
4794 {
4795 switch(rm) {
4796 case 0:
4797 gen_op_fpush();
4798 gen_op_fld1_ST0();
4799 break;
4800 case 1:
4801 gen_op_fpush();
4802 gen_op_fldl2t_ST0();
4803 break;
4804 case 2:
4805 gen_op_fpush();
4806 gen_op_fldl2e_ST0();
4807 break;
4808 case 3:
4809 gen_op_fpush();
4810 gen_op_fldpi_ST0();
4811 break;
4812 case 4:
4813 gen_op_fpush();
4814 gen_op_fldlg2_ST0();
4815 break;
4816 case 5:
4817 gen_op_fpush();
4818 gen_op_fldln2_ST0();
4819 break;
4820 case 6:
4821 gen_op_fpush();
4822 gen_op_fldz_ST0();
4823 break;
4824 default:
4825 goto illegal_op;
4826 }
4827 }
4828 break;
4829 case 0x0e: /* grp d9/6 */
4830 switch(rm) {
4831 case 0: /* f2xm1 */
4832 gen_op_f2xm1();
4833 break;
4834 case 1: /* fyl2x */
4835 gen_op_fyl2x();
4836 break;
4837 case 2: /* fptan */
4838 gen_op_fptan();
4839 break;
4840 case 3: /* fpatan */
4841 gen_op_fpatan();
4842 break;
4843 case 4: /* fxtract */
4844 gen_op_fxtract();
4845 break;
4846 case 5: /* fprem1 */
4847 gen_op_fprem1();
4848 break;
4849 case 6: /* fdecstp */
4850 gen_op_fdecstp();
4851 break;
4852 default:
4853 case 7: /* fincstp */
4854 gen_op_fincstp();
4855 break;
4856 }
4857 break;
4858 case 0x0f: /* grp d9/7 */
4859 switch(rm) {
4860 case 0: /* fprem */
4861 gen_op_fprem();
4862 break;
4863 case 1: /* fyl2xp1 */
4864 gen_op_fyl2xp1();
4865 break;
4866 case 2: /* fsqrt */
4867 gen_op_fsqrt();
4868 break;
4869 case 3: /* fsincos */
4870 gen_op_fsincos();
4871 break;
4872 case 5: /* fscale */
4873 gen_op_fscale();
4874 break;
4875 case 4: /* frndint */
4876 gen_op_frndint();
4877 break;
4878 case 6: /* fsin */
4879 gen_op_fsin();
4880 break;
4881 default:
4882 case 7: /* fcos */
4883 gen_op_fcos();
4884 break;
4885 }
4886 break;
4887 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4888 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4889 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4890 {
4891 int op1;
4892
4893 op1 = op & 7;
4894 if (op >= 0x20) {
4895 gen_op_fp_arith_STN_ST0[op1](opreg);
4896 if (op >= 0x30)
4897 gen_op_fpop();
4898 } else {
4899 gen_op_fmov_FT0_STN(opreg);
4900 gen_op_fp_arith_ST0_FT0[op1]();
4901 }
4902 }
4903 break;
4904 case 0x02: /* fcom */
4905 case 0x22: /* fcom2, undocumented op */
4906 gen_op_fmov_FT0_STN(opreg);
4907 gen_op_fcom_ST0_FT0();
4908 break;
4909 case 0x03: /* fcomp */
4910 case 0x23: /* fcomp3, undocumented op */
4911 case 0x32: /* fcomp5, undocumented op */
4912 gen_op_fmov_FT0_STN(opreg);
4913 gen_op_fcom_ST0_FT0();
4914 gen_op_fpop();
4915 break;
4916 case 0x15: /* da/5 */
4917 switch(rm) {
4918 case 1: /* fucompp */
4919 gen_op_fmov_FT0_STN(1);
4920 gen_op_fucom_ST0_FT0();
4921 gen_op_fpop();
4922 gen_op_fpop();
4923 break;
4924 default:
4925 goto illegal_op;
4926 }
4927 break;
4928 case 0x1c:
4929 switch(rm) {
4930 case 0: /* feni (287 only, just do nop here) */
4931 break;
4932 case 1: /* fdisi (287 only, just do nop here) */
4933 break;
4934 case 2: /* fclex */
4935 gen_op_fclex();
4936 break;
4937 case 3: /* fninit */
4938 gen_op_fninit();
4939 break;
4940 case 4: /* fsetpm (287 only, just do nop here) */
4941 break;
4942 default:
4943 goto illegal_op;
4944 }
4945 break;
4946 case 0x1d: /* fucomi */
4947 if (s->cc_op != CC_OP_DYNAMIC)
4948 gen_op_set_cc_op(s->cc_op);
4949 gen_op_fmov_FT0_STN(opreg);
4950 gen_op_fucomi_ST0_FT0();
4951 s->cc_op = CC_OP_EFLAGS;
4952 break;
4953 case 0x1e: /* fcomi */
4954 if (s->cc_op != CC_OP_DYNAMIC)
4955 gen_op_set_cc_op(s->cc_op);
4956 gen_op_fmov_FT0_STN(opreg);
4957 gen_op_fcomi_ST0_FT0();
4958 s->cc_op = CC_OP_EFLAGS;
4959 break;
4960 case 0x28: /* ffree sti */
4961 gen_op_ffree_STN(opreg);
4962 break;
4963 case 0x2a: /* fst sti */
4964 gen_op_fmov_STN_ST0(opreg);
4965 break;
4966 case 0x2b: /* fstp sti */
4967 case 0x0b: /* fstp1 sti, undocumented op */
4968 case 0x3a: /* fstp8 sti, undocumented op */
4969 case 0x3b: /* fstp9 sti, undocumented op */
4970 gen_op_fmov_STN_ST0(opreg);
4971 gen_op_fpop();
4972 break;
4973 case 0x2c: /* fucom st(i) */
4974 gen_op_fmov_FT0_STN(opreg);
4975 gen_op_fucom_ST0_FT0();
4976 break;
4977 case 0x2d: /* fucomp st(i) */
4978 gen_op_fmov_FT0_STN(opreg);
4979 gen_op_fucom_ST0_FT0();
4980 gen_op_fpop();
4981 break;
4982 case 0x33: /* de/3 */
4983 switch(rm) {
4984 case 1: /* fcompp */
4985 gen_op_fmov_FT0_STN(1);
4986 gen_op_fcom_ST0_FT0();
4987 gen_op_fpop();
4988 gen_op_fpop();
4989 break;
4990 default:
4991 goto illegal_op;
4992 }
4993 break;
4994 case 0x38: /* ffreep sti, undocumented op */
4995 gen_op_ffree_STN(opreg);
4996 gen_op_fpop();
4997 break;
4998 case 0x3c: /* df/4 */
4999 switch(rm) {
5000 case 0:
5001 gen_op_fnstsw_EAX();
5002 break;
5003 default:
5004 goto illegal_op;
5005 }
5006 break;
5007 case 0x3d: /* fucomip */
5008 if (s->cc_op != CC_OP_DYNAMIC)
5009 gen_op_set_cc_op(s->cc_op);
5010 gen_op_fmov_FT0_STN(opreg);
5011 gen_op_fucomi_ST0_FT0();
5012 gen_op_fpop();
5013 s->cc_op = CC_OP_EFLAGS;
5014 break;
5015 case 0x3e: /* fcomip */
5016 if (s->cc_op != CC_OP_DYNAMIC)
5017 gen_op_set_cc_op(s->cc_op);
5018 gen_op_fmov_FT0_STN(opreg);
5019 gen_op_fcomi_ST0_FT0();
5020 gen_op_fpop();
5021 s->cc_op = CC_OP_EFLAGS;
5022 break;
5023 case 0x10 ... 0x13: /* fcmovxx */
5024 case 0x18 ... 0x1b:
5025 {
5026 int op1;
5027 const static uint8_t fcmov_cc[8] = {
5028 (JCC_B << 1),
5029 (JCC_Z << 1),
5030 (JCC_BE << 1),
5031 (JCC_P << 1),
5032 };
5033 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5034 gen_setcc(s, op1);
5035 gen_op_fcmov_ST0_STN_T0(opreg);
5036 }
5037 break;
5038 default:
5039 goto illegal_op;
5040 }
5041 }
5042#ifdef USE_CODE_COPY
5043 s->tb->cflags |= CF_TB_FP_USED;
5044#endif
5045 break;
5046 /************************/
5047 /* string ops */
5048
5049 case 0xa4: /* movsS */
5050 case 0xa5:
5051 if ((b & 1) == 0)
5052 ot = OT_BYTE;
5053 else
5054 ot = dflag + OT_WORD;
5055
5056 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5057 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5058 } else {
5059 gen_movs(s, ot);
5060 }
5061 break;
5062
5063 case 0xaa: /* stosS */
5064 case 0xab:
5065 if ((b & 1) == 0)
5066 ot = OT_BYTE;
5067 else
5068 ot = dflag + OT_WORD;
5069
5070 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5071 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5072 } else {
5073 gen_stos(s, ot);
5074 }
5075 break;
5076 case 0xac: /* lodsS */
5077 case 0xad:
5078 if ((b & 1) == 0)
5079 ot = OT_BYTE;
5080 else
5081 ot = dflag + OT_WORD;
5082 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5083 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5084 } else {
5085 gen_lods(s, ot);
5086 }
5087 break;
5088 case 0xae: /* scasS */
5089 case 0xaf:
5090 if ((b & 1) == 0)
5091 ot = OT_BYTE;
5092 else
5093 ot = dflag + OT_WORD;
5094 if (prefixes & PREFIX_REPNZ) {
5095 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5096 } else if (prefixes & PREFIX_REPZ) {
5097 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5098 } else {
5099 gen_scas(s, ot);
5100 s->cc_op = CC_OP_SUBB + ot;
5101 }
5102 break;
5103
5104 case 0xa6: /* cmpsS */
5105 case 0xa7:
5106 if ((b & 1) == 0)
5107 ot = OT_BYTE;
5108 else
5109 ot = dflag + OT_WORD;
5110 if (prefixes & PREFIX_REPNZ) {
5111 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5112 } else if (prefixes & PREFIX_REPZ) {
5113 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5114 } else {
5115 gen_cmps(s, ot);
5116 s->cc_op = CC_OP_SUBB + ot;
5117 }
5118 break;
5119 case 0x6c: /* insS */
5120 case 0x6d:
5121 if ((b & 1) == 0)
5122 ot = OT_BYTE;
5123 else
5124 ot = dflag ? OT_LONG : OT_WORD;
5125 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5126 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5127 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5128 } else {
5129 gen_ins(s, ot);
5130 }
5131 break;
5132 case 0x6e: /* outsS */
5133 case 0x6f:
5134 if ((b & 1) == 0)
5135 ot = OT_BYTE;
5136 else
5137 ot = dflag ? OT_LONG : OT_WORD;
5138 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5139 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5140 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5141 } else {
5142 gen_outs(s, ot);
5143 }
5144 break;
5145
5146 /************************/
5147 /* port I/O */
5148 case 0xe4:
5149 case 0xe5:
5150 if ((b & 1) == 0)
5151 ot = OT_BYTE;
5152 else
5153 ot = dflag ? OT_LONG : OT_WORD;
5154 val = ldub_code(s->pc++);
5155 gen_op_movl_T0_im(val);
5156 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5157 gen_op_in[ot]();
5158 gen_op_mov_reg_T1[ot][R_EAX]();
5159 break;
5160 case 0xe6:
5161 case 0xe7:
5162 if ((b & 1) == 0)
5163 ot = OT_BYTE;
5164 else
5165 ot = dflag ? OT_LONG : OT_WORD;
5166 val = ldub_code(s->pc++);
5167 gen_op_movl_T0_im(val);
5168 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5169#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
5170 if (val == 0x80)
5171 break;
5172#endif /* VBOX */
5173 gen_op_mov_TN_reg[ot][1][R_EAX]();
5174 gen_op_out[ot]();
5175 break;
5176 case 0xec:
5177 case 0xed:
5178 if ((b & 1) == 0)
5179 ot = OT_BYTE;
5180 else
5181 ot = dflag ? OT_LONG : OT_WORD;
5182 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5183 gen_op_andl_T0_ffff();
5184 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5185 gen_op_in[ot]();
5186 gen_op_mov_reg_T1[ot][R_EAX]();
5187 break;
5188 case 0xee:
5189 case 0xef:
5190 if ((b & 1) == 0)
5191 ot = OT_BYTE;
5192 else
5193 ot = dflag ? OT_LONG : OT_WORD;
5194 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5195 gen_op_andl_T0_ffff();
5196 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5197 gen_op_mov_TN_reg[ot][1][R_EAX]();
5198 gen_op_out[ot]();
5199 break;
5200
5201 /************************/
5202 /* control */
5203 case 0xc2: /* ret im */
5204 val = ldsw_code(s->pc);
5205 s->pc += 2;
5206 gen_pop_T0(s);
5207 if (CODE64(s) && s->dflag)
5208 s->dflag = 2;
5209 gen_stack_update(s, val + (2 << s->dflag));
5210 if (s->dflag == 0)
5211 gen_op_andl_T0_ffff();
5212 gen_op_jmp_T0();
5213 gen_eob(s);
5214 break;
5215 case 0xc3: /* ret */
5216 gen_pop_T0(s);
5217 gen_pop_update(s);
5218 if (s->dflag == 0)
5219 gen_op_andl_T0_ffff();
5220 gen_op_jmp_T0();
5221 gen_eob(s);
5222 break;
5223 case 0xca: /* lret im */
5224 val = ldsw_code(s->pc);
5225 s->pc += 2;
5226 do_lret:
5227 if (s->pe && !s->vm86) {
5228 if (s->cc_op != CC_OP_DYNAMIC)
5229 gen_op_set_cc_op(s->cc_op);
5230 gen_jmp_im(pc_start - s->cs_base);
5231 gen_op_lret_protected(s->dflag, val);
5232 } else {
5233 gen_stack_A0(s);
5234 /* pop offset */
5235 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5236 if (s->dflag == 0)
5237 gen_op_andl_T0_ffff();
5238 /* NOTE: keeping EIP updated is not a problem in case of
5239 exception */
5240 gen_op_jmp_T0();
5241 /* pop selector */
5242 gen_op_addl_A0_im(2 << s->dflag);
5243 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5244 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5245 /* add stack offset */
5246 gen_stack_update(s, val + (4 << s->dflag));
5247 }
5248 gen_eob(s);
5249 break;
5250 case 0xcb: /* lret */
5251 val = 0;
5252 goto do_lret;
5253 case 0xcf: /* iret */
5254 if (!s->pe) {
5255 /* real mode */
5256 gen_op_iret_real(s->dflag);
5257 s->cc_op = CC_OP_EFLAGS;
5258 } else if (s->vm86) {
5259#ifdef VBOX
5260 if (s->iopl != 3 && (!s->vme || s->dflag)) {
5261#else
5262 if (s->iopl != 3) {
5263#endif
5264 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5265 } else {
5266 gen_op_iret_real(s->dflag);
5267 s->cc_op = CC_OP_EFLAGS;
5268 }
5269 } else {
5270 if (s->cc_op != CC_OP_DYNAMIC)
5271 gen_op_set_cc_op(s->cc_op);
5272 gen_jmp_im(pc_start - s->cs_base);
5273 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5274 s->cc_op = CC_OP_EFLAGS;
5275 }
5276 gen_eob(s);
5277 break;
5278 case 0xe8: /* call im */
5279 {
5280 if (dflag)
5281 tval = (int32_t)insn_get(s, OT_LONG);
5282 else
5283 tval = (int16_t)insn_get(s, OT_WORD);
5284 next_eip = s->pc - s->cs_base;
5285 tval += next_eip;
5286 if (s->dflag == 0)
5287 tval &= 0xffff;
5288 gen_movtl_T0_im(next_eip);
5289 gen_push_T0(s);
5290 gen_jmp(s, tval);
5291 }
5292 break;
5293 case 0x9a: /* lcall im */
5294 {
5295 unsigned int selector, offset;
5296
5297 if (CODE64(s))
5298 goto illegal_op;
5299 ot = dflag ? OT_LONG : OT_WORD;
5300 offset = insn_get(s, ot);
5301 selector = insn_get(s, OT_WORD);
5302
5303 gen_op_movl_T0_im(selector);
5304 gen_op_movl_T1_imu(offset);
5305 }
5306 goto do_lcall;
5307 case 0xe9: /* jmp im */
5308 if (dflag)
5309 tval = (int32_t)insn_get(s, OT_LONG);
5310 else
5311 tval = (int16_t)insn_get(s, OT_WORD);
5312 tval += s->pc - s->cs_base;
5313 if (s->dflag == 0)
5314 tval &= 0xffff;
5315 gen_jmp(s, tval);
5316 break;
5317 case 0xea: /* ljmp im */
5318 {
5319 unsigned int selector, offset;
5320
5321 if (CODE64(s))
5322 goto illegal_op;
5323 ot = dflag ? OT_LONG : OT_WORD;
5324 offset = insn_get(s, ot);
5325 selector = insn_get(s, OT_WORD);
5326
5327 gen_op_movl_T0_im(selector);
5328 gen_op_movl_T1_imu(offset);
5329 }
5330 goto do_ljmp;
5331 case 0xeb: /* jmp Jb */
5332 tval = (int8_t)insn_get(s, OT_BYTE);
5333 tval += s->pc - s->cs_base;
5334 if (s->dflag == 0)
5335 tval &= 0xffff;
5336 gen_jmp(s, tval);
5337 break;
5338 case 0x70 ... 0x7f: /* jcc Jb */
5339 tval = (int8_t)insn_get(s, OT_BYTE);
5340 goto do_jcc;
5341 case 0x180 ... 0x18f: /* jcc Jv */
5342 if (dflag) {
5343 tval = (int32_t)insn_get(s, OT_LONG);
5344 } else {
5345 tval = (int16_t)insn_get(s, OT_WORD);
5346 }
5347 do_jcc:
5348 next_eip = s->pc - s->cs_base;
5349 tval += next_eip;
5350 if (s->dflag == 0)
5351 tval &= 0xffff;
5352 gen_jcc(s, b, tval, next_eip);
5353 break;
5354
5355 case 0x190 ... 0x19f: /* setcc Gv */
5356 modrm = ldub_code(s->pc++);
5357 gen_setcc(s, b);
5358 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5359 break;
5360 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5361 ot = dflag + OT_WORD;
5362 modrm = ldub_code(s->pc++);
5363 reg = ((modrm >> 3) & 7) | rex_r;
5364 mod = (modrm >> 6) & 3;
5365 gen_setcc(s, b);
5366 if (mod != 3) {
5367 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5368 gen_op_ld_T1_A0[ot + s->mem_index]();
5369 } else {
5370 rm = (modrm & 7) | REX_B(s);
5371 gen_op_mov_TN_reg[ot][1][rm]();
5372 }
5373 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5374 break;
5375
5376 /************************/
5377 /* flags */
5378 case 0x9c: /* pushf */
5379#ifdef VBOX
5380 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5381#else
5382 if (s->vm86 && s->iopl != 3) {
5383#endif
5384 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5385 } else {
5386 if (s->cc_op != CC_OP_DYNAMIC)
5387 gen_op_set_cc_op(s->cc_op);
5388#ifdef VBOX
5389 if (s->vm86 && s->vme && s->iopl != 3)
5390 gen_op_movl_T0_eflags_vme();
5391 else
5392#endif
5393 gen_op_movl_T0_eflags();
5394 gen_push_T0(s);
5395 }
5396 break;
5397 case 0x9d: /* popf */
5398#ifdef VBOX
5399 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5400#else
5401 if (s->vm86 && s->iopl != 3) {
5402#endif
5403 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5404 } else {
5405 gen_pop_T0(s);
5406 if (s->cpl == 0) {
5407 if (s->dflag) {
5408 gen_op_movl_eflags_T0_cpl0();
5409 } else {
5410 gen_op_movw_eflags_T0_cpl0();
5411 }
5412 } else {
5413 if (s->cpl <= s->iopl) {
5414 if (s->dflag) {
5415 gen_op_movl_eflags_T0_io();
5416 } else {
5417 gen_op_movw_eflags_T0_io();
5418 }
5419 } else {
5420 if (s->dflag) {
5421 gen_op_movl_eflags_T0();
5422 } else {
5423#ifdef VBOX
5424 if (s->vm86 && s->vme)
5425 gen_op_movw_eflags_T0_vme();
5426 else
5427#endif
5428 gen_op_movw_eflags_T0();
5429 }
5430 }
5431 }
5432 gen_pop_update(s);
5433 s->cc_op = CC_OP_EFLAGS;
5434 /* abort translation because TF flag may change */
5435 gen_jmp_im(s->pc - s->cs_base);
5436 gen_eob(s);
5437 }
5438 break;
5439 case 0x9e: /* sahf */
5440 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5441 goto illegal_op;
5442 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5443 if (s->cc_op != CC_OP_DYNAMIC)
5444 gen_op_set_cc_op(s->cc_op);
5445 gen_op_movb_eflags_T0();
5446 s->cc_op = CC_OP_EFLAGS;
5447 break;
5448 case 0x9f: /* lahf */
5449 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5450 goto illegal_op;
5451 if (s->cc_op != CC_OP_DYNAMIC)
5452 gen_op_set_cc_op(s->cc_op);
5453 gen_op_movl_T0_eflags();
5454 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5455 break;
5456 case 0xf5: /* cmc */
5457 if (s->cc_op != CC_OP_DYNAMIC)
5458 gen_op_set_cc_op(s->cc_op);
5459 gen_op_cmc();
5460 s->cc_op = CC_OP_EFLAGS;
5461 break;
5462 case 0xf8: /* clc */
5463 if (s->cc_op != CC_OP_DYNAMIC)
5464 gen_op_set_cc_op(s->cc_op);
5465 gen_op_clc();
5466 s->cc_op = CC_OP_EFLAGS;
5467 break;
5468 case 0xf9: /* stc */
5469 if (s->cc_op != CC_OP_DYNAMIC)
5470 gen_op_set_cc_op(s->cc_op);
5471 gen_op_stc();
5472 s->cc_op = CC_OP_EFLAGS;
5473 break;
5474 case 0xfc: /* cld */
5475 gen_op_cld();
5476 break;
5477 case 0xfd: /* std */
5478 gen_op_std();
5479 break;
5480
5481 /************************/
5482 /* bit operations */
5483 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5484 ot = dflag + OT_WORD;
5485 modrm = ldub_code(s->pc++);
5486 op = (modrm >> 3) & 7;
5487 mod = (modrm >> 6) & 3;
5488 rm = (modrm & 7) | REX_B(s);
5489 if (mod != 3) {
5490 s->rip_offset = 1;
5491 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5492 gen_op_ld_T0_A0[ot + s->mem_index]();
5493 } else {
5494 gen_op_mov_TN_reg[ot][0][rm]();
5495 }
5496 /* load shift */
5497 val = ldub_code(s->pc++);
5498 gen_op_movl_T1_im(val);
5499 if (op < 4)
5500 goto illegal_op;
5501 op -= 4;
5502 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5503 s->cc_op = CC_OP_SARB + ot;
5504 if (op != 0) {
5505 if (mod != 3)
5506 gen_op_st_T0_A0[ot + s->mem_index]();
5507 else
5508 gen_op_mov_reg_T0[ot][rm]();
5509 gen_op_update_bt_cc();
5510 }
5511 break;
5512 case 0x1a3: /* bt Gv, Ev */
5513 op = 0;
5514 goto do_btx;
5515 case 0x1ab: /* bts */
5516 op = 1;
5517 goto do_btx;
5518 case 0x1b3: /* btr */
5519 op = 2;
5520 goto do_btx;
5521 case 0x1bb: /* btc */
5522 op = 3;
5523 do_btx:
5524 ot = dflag + OT_WORD;
5525 modrm = ldub_code(s->pc++);
5526 reg = ((modrm >> 3) & 7) | rex_r;
5527 mod = (modrm >> 6) & 3;
5528 rm = (modrm & 7) | REX_B(s);
5529 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5530 if (mod != 3) {
5531 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5532 /* specific case: we need to add a displacement */
5533 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5534 gen_op_ld_T0_A0[ot + s->mem_index]();
5535 } else {
5536 gen_op_mov_TN_reg[ot][0][rm]();
5537 }
5538 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5539 s->cc_op = CC_OP_SARB + ot;
5540 if (op != 0) {
5541 if (mod != 3)
5542 gen_op_st_T0_A0[ot + s->mem_index]();
5543 else
5544 gen_op_mov_reg_T0[ot][rm]();
5545 gen_op_update_bt_cc();
5546 }
5547 break;
5548 case 0x1bc: /* bsf */
5549 case 0x1bd: /* bsr */
5550 ot = dflag + OT_WORD;
5551 modrm = ldub_code(s->pc++);
5552 reg = ((modrm >> 3) & 7) | rex_r;
5553 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5554 /* NOTE: in order to handle the 0 case, we must load the
5555 result. It could be optimized with a generated jump */
5556 gen_op_mov_TN_reg[ot][1][reg]();
5557 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5558 gen_op_mov_reg_T1[ot][reg]();
5559 s->cc_op = CC_OP_LOGICB + ot;
5560 break;
5561 /************************/
5562 /* bcd */
5563 case 0x27: /* daa */
5564 if (CODE64(s))
5565 goto illegal_op;
5566 if (s->cc_op != CC_OP_DYNAMIC)
5567 gen_op_set_cc_op(s->cc_op);
5568 gen_op_daa();
5569 s->cc_op = CC_OP_EFLAGS;
5570 break;
5571 case 0x2f: /* das */
5572 if (CODE64(s))
5573 goto illegal_op;
5574 if (s->cc_op != CC_OP_DYNAMIC)
5575 gen_op_set_cc_op(s->cc_op);
5576 gen_op_das();
5577 s->cc_op = CC_OP_EFLAGS;
5578 break;
5579 case 0x37: /* aaa */
5580 if (CODE64(s))
5581 goto illegal_op;
5582 if (s->cc_op != CC_OP_DYNAMIC)
5583 gen_op_set_cc_op(s->cc_op);
5584 gen_op_aaa();
5585 s->cc_op = CC_OP_EFLAGS;
5586 break;
5587 case 0x3f: /* aas */
5588 if (CODE64(s))
5589 goto illegal_op;
5590 if (s->cc_op != CC_OP_DYNAMIC)
5591 gen_op_set_cc_op(s->cc_op);
5592 gen_op_aas();
5593 s->cc_op = CC_OP_EFLAGS;
5594 break;
5595 case 0xd4: /* aam */
5596 if (CODE64(s))
5597 goto illegal_op;
5598 val = ldub_code(s->pc++);
5599 if (val == 0) {
5600 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5601 } else {
5602 gen_op_aam(val);
5603 s->cc_op = CC_OP_LOGICB;
5604 }
5605 break;
5606 case 0xd5: /* aad */
5607 if (CODE64(s))
5608 goto illegal_op;
5609 val = ldub_code(s->pc++);
5610 gen_op_aad(val);
5611 s->cc_op = CC_OP_LOGICB;
5612 break;
5613 /************************/
5614 /* misc */
5615 case 0x90: /* nop */
5616 /* XXX: xchg + rex handling */
5617 /* XXX: correct lock test for all insn */
5618 if (prefixes & PREFIX_LOCK)
5619 goto illegal_op;
5620 break;
5621 case 0x9b: /* fwait */
5622 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5623 (HF_MP_MASK | HF_TS_MASK)) {
5624 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5625 } else {
5626 if (s->cc_op != CC_OP_DYNAMIC)
5627 gen_op_set_cc_op(s->cc_op);
5628 gen_jmp_im(pc_start - s->cs_base);
5629 gen_op_fwait();
5630 }
5631 break;
5632 case 0xcc: /* int3 */
5633#ifdef VBOX
5634 if (s->vm86 && s->iopl != 3 && !s->vme) {
5635 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5636 } else
5637#endif
5638 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5639 break;
5640 case 0xcd: /* int N */
5641 val = ldub_code(s->pc++);
5642#ifdef VBOX
5643 if (s->vm86 && s->iopl != 3 && !s->vme) {
5644#else
5645 if (s->vm86 && s->iopl != 3) {
5646#endif
5647 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5648 } else {
5649 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5650 }
5651 break;
5652 case 0xce: /* into */
5653 if (CODE64(s))
5654 goto illegal_op;
5655 if (s->cc_op != CC_OP_DYNAMIC)
5656 gen_op_set_cc_op(s->cc_op);
5657 gen_jmp_im(pc_start - s->cs_base);
5658 gen_op_into(s->pc - pc_start);
5659 break;
5660 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5661#if 1
5662 gen_debug(s, pc_start - s->cs_base);
5663#else
5664 /* start debug */
5665 tb_flush(cpu_single_env);
5666 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5667#endif
5668 break;
5669 case 0xfa: /* cli */
5670 if (!s->vm86) {
5671 if (s->cpl <= s->iopl) {
5672 gen_op_cli();
5673 } else {
5674 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5675 }
5676 } else {
5677 if (s->iopl == 3) {
5678 gen_op_cli();
5679#ifdef VBOX
5680 } else if (s->iopl != 3 && s->vme) {
5681 gen_op_cli_vme();
5682#endif
5683 } else {
5684 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5685 }
5686 }
5687 break;
5688 case 0xfb: /* sti */
5689 if (!s->vm86) {
5690 if (s->cpl <= s->iopl) {
5691 gen_sti:
5692 gen_op_sti();
5693 /* interruptions are enabled only the first insn after sti */
5694 /* If several instructions disable interrupts, only the
5695 _first_ does it */
5696 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5697 gen_op_set_inhibit_irq();
5698 /* give a chance to handle pending irqs */
5699 gen_jmp_im(s->pc - s->cs_base);
5700 gen_eob(s);
5701 } else {
5702 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5703 }
5704 } else {
5705 if (s->iopl == 3) {
5706 goto gen_sti;
5707#ifdef VBOX
5708 } else if (s->iopl != 3 && s->vme) {
5709 gen_op_sti_vme();
5710 /* give a chance to handle pending irqs */
5711 gen_jmp_im(s->pc - s->cs_base);
5712 gen_eob(s);
5713#endif
5714 } else {
5715 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5716 }
5717 }
5718 break;
5719 case 0x62: /* bound */
5720 if (CODE64(s))
5721 goto illegal_op;
5722 ot = dflag ? OT_LONG : OT_WORD;
5723 modrm = ldub_code(s->pc++);
5724 reg = (modrm >> 3) & 7;
5725 mod = (modrm >> 6) & 3;
5726 if (mod == 3)
5727 goto illegal_op;
5728 gen_op_mov_TN_reg[ot][0][reg]();
5729 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5730 gen_jmp_im(pc_start - s->cs_base);
5731 if (ot == OT_WORD)
5732 gen_op_boundw();
5733 else
5734 gen_op_boundl();
5735 break;
5736 case 0x1c8 ... 0x1cf: /* bswap reg */
5737 reg = (b & 7) | REX_B(s);
5738#ifdef TARGET_X86_64
5739 if (dflag == 2) {
5740 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5741 gen_op_bswapq_T0();
5742 gen_op_mov_reg_T0[OT_QUAD][reg]();
5743 } else
5744#endif
5745 {
5746 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5747 gen_op_bswapl_T0();
5748 gen_op_mov_reg_T0[OT_LONG][reg]();
5749 }
5750 break;
5751 case 0xd6: /* salc */
5752 if (CODE64(s))
5753 goto illegal_op;
5754 if (s->cc_op != CC_OP_DYNAMIC)
5755 gen_op_set_cc_op(s->cc_op);
5756 gen_op_salc();
5757 break;
5758 case 0xe0: /* loopnz */
5759 case 0xe1: /* loopz */
5760 if (s->cc_op != CC_OP_DYNAMIC)
5761 gen_op_set_cc_op(s->cc_op);
5762 /* FALL THRU */
5763 case 0xe2: /* loop */
5764 case 0xe3: /* jecxz */
5765 {
5766 int l1, l2;
5767
5768 tval = (int8_t)insn_get(s, OT_BYTE);
5769 next_eip = s->pc - s->cs_base;
5770 tval += next_eip;
5771 if (s->dflag == 0)
5772 tval &= 0xffff;
5773
5774 l1 = gen_new_label();
5775 l2 = gen_new_label();
5776 b &= 3;
5777 if (b == 3) {
5778 gen_op_jz_ecx[s->aflag](l1);
5779 } else {
5780 gen_op_dec_ECX[s->aflag]();
5781 if (b <= 1)
5782 gen_op_mov_T0_cc();
5783 gen_op_loop[s->aflag][b](l1);
5784 }
5785
5786 gen_jmp_im(next_eip);
5787 gen_op_jmp_label(l2);
5788 gen_set_label(l1);
5789 gen_jmp_im(tval);
5790 gen_set_label(l2);
5791 gen_eob(s);
5792 }
5793 break;
5794 case 0x130: /* wrmsr */
5795 case 0x132: /* rdmsr */
5796 if (s->cpl != 0) {
5797 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5798 } else {
5799 if (b & 2)
5800 gen_op_rdmsr();
5801 else
5802 gen_op_wrmsr();
5803 }
5804 break;
5805 case 0x131: /* rdtsc */
5806 gen_jmp_im(pc_start - s->cs_base);
5807 gen_op_rdtsc();
5808 break;
5809 case 0x134: /* sysenter */
5810 if (CODE64(s))
5811 goto illegal_op;
5812 if (!s->pe) {
5813 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5814 } else {
5815 if (s->cc_op != CC_OP_DYNAMIC) {
5816 gen_op_set_cc_op(s->cc_op);
5817 s->cc_op = CC_OP_DYNAMIC;
5818 }
5819 gen_jmp_im(pc_start - s->cs_base);
5820 gen_op_sysenter();
5821 gen_eob(s);
5822 }
5823 break;
5824 case 0x135: /* sysexit */
5825 if (CODE64(s))
5826 goto illegal_op;
5827 if (!s->pe) {
5828 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5829 } else {
5830 if (s->cc_op != CC_OP_DYNAMIC) {
5831 gen_op_set_cc_op(s->cc_op);
5832 s->cc_op = CC_OP_DYNAMIC;
5833 }
5834 gen_jmp_im(pc_start - s->cs_base);
5835 gen_op_sysexit();
5836 gen_eob(s);
5837 }
5838 break;
5839#ifdef TARGET_X86_64
5840 case 0x105: /* syscall */
5841 /* XXX: is it usable in real mode ? */
5842 if (s->cc_op != CC_OP_DYNAMIC) {
5843 gen_op_set_cc_op(s->cc_op);
5844 s->cc_op = CC_OP_DYNAMIC;
5845 }
5846 gen_jmp_im(pc_start - s->cs_base);
5847 gen_op_syscall(s->pc - pc_start);
5848 gen_eob(s);
5849 break;
5850 case 0x107: /* sysret */
5851 if (!s->pe) {
5852 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5853 } else {
5854 if (s->cc_op != CC_OP_DYNAMIC) {
5855 gen_op_set_cc_op(s->cc_op);
5856 s->cc_op = CC_OP_DYNAMIC;
5857 }
5858 gen_jmp_im(pc_start - s->cs_base);
5859 gen_op_sysret(s->dflag);
5860 /* condition codes are modified only in long mode */
5861 if (s->lma)
5862 s->cc_op = CC_OP_EFLAGS;
5863 gen_eob(s);
5864 }
5865 break;
5866#endif
5867 case 0x1a2: /* cpuid */
5868 gen_op_cpuid();
5869 break;
5870 case 0xf4: /* hlt */
5871 if (s->cpl != 0) {
5872 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5873 } else {
5874 if (s->cc_op != CC_OP_DYNAMIC)
5875 gen_op_set_cc_op(s->cc_op);
5876 gen_jmp_im(s->pc - s->cs_base);
5877 gen_op_hlt();
5878 s->is_jmp = 3;
5879 }
5880 break;
5881 case 0x100:
5882 modrm = ldub_code(s->pc++);
5883 mod = (modrm >> 6) & 3;
5884 op = (modrm >> 3) & 7;
5885 switch(op) {
5886 case 0: /* sldt */
5887 if (!s->pe || s->vm86)
5888 goto illegal_op;
5889 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5890 ot = OT_WORD;
5891 if (mod == 3)
5892 ot += s->dflag;
5893 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5894 break;
5895 case 2: /* lldt */
5896 if (!s->pe || s->vm86)
5897 goto illegal_op;
5898 if (s->cpl != 0) {
5899 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5900 } else {
5901 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5902 gen_jmp_im(pc_start - s->cs_base);
5903 gen_op_lldt_T0();
5904 }
5905 break;
5906 case 1: /* str */
5907 if (!s->pe || s->vm86)
5908 goto illegal_op;
5909 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5910 ot = OT_WORD;
5911 if (mod == 3)
5912 ot += s->dflag;
5913 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5914 break;
5915 case 3: /* ltr */
5916 if (!s->pe || s->vm86)
5917 goto illegal_op;
5918 if (s->cpl != 0) {
5919 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5920 } else {
5921 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5922 gen_jmp_im(pc_start - s->cs_base);
5923 gen_op_ltr_T0();
5924 }
5925 break;
5926 case 4: /* verr */
5927 case 5: /* verw */
5928 if (!s->pe || s->vm86)
5929 goto illegal_op;
5930 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5931 if (s->cc_op != CC_OP_DYNAMIC)
5932 gen_op_set_cc_op(s->cc_op);
5933 if (op == 4)
5934 gen_op_verr();
5935 else
5936 gen_op_verw();
5937 s->cc_op = CC_OP_EFLAGS;
5938 break;
5939 default:
5940 goto illegal_op;
5941 }
5942 break;
5943 case 0x101:
5944 modrm = ldub_code(s->pc++);
5945 mod = (modrm >> 6) & 3;
5946 op = (modrm >> 3) & 7;
5947 rm = modrm & 7;
5948 switch(op) {
5949 case 0: /* sgdt */
5950 if (mod == 3)
5951 goto illegal_op;
5952 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5953 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5954 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5955 gen_add_A0_im(s, 2);
5956 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5957 if (!s->dflag)
5958 gen_op_andl_T0_im(0xffffff);
5959 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5960 break;
5961 case 1:
5962 if (mod == 3) {
5963 switch (rm) {
5964 case 0: /* monitor */
5965 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5966 s->cpl != 0)
5967 goto illegal_op;
5968 gen_jmp_im(pc_start - s->cs_base);
5969#ifdef TARGET_X86_64
5970 if (s->aflag == 2) {
5971 gen_op_movq_A0_reg[R_EBX]();
5972 gen_op_addq_A0_AL();
5973 } else
5974#endif
5975 {
5976 gen_op_movl_A0_reg[R_EBX]();
5977 gen_op_addl_A0_AL();
5978 if (s->aflag == 0)
5979 gen_op_andl_A0_ffff();
5980 }
5981 gen_add_A0_ds_seg(s);
5982 gen_op_monitor();
5983 break;
5984 case 1: /* mwait */
5985 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5986 s->cpl != 0)
5987 goto illegal_op;
5988 if (s->cc_op != CC_OP_DYNAMIC) {
5989 gen_op_set_cc_op(s->cc_op);
5990 s->cc_op = CC_OP_DYNAMIC;
5991 }
5992 gen_jmp_im(s->pc - s->cs_base);
5993 gen_op_mwait();
5994 gen_eob(s);
5995 break;
5996 default:
5997 goto illegal_op;
5998 }
5999 } else { /* sidt */
6000 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6001 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6002 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
6003 gen_add_A0_im(s, 2);
6004 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6005 if (!s->dflag)
6006 gen_op_andl_T0_im(0xffffff);
6007 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6008 }
6009 break;
6010 case 2: /* lgdt */
6011 case 3: /* lidt */
6012 if (mod == 3)
6013 goto illegal_op;
6014 if (s->cpl != 0) {
6015 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6016 } else {
6017 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6018 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
6019 gen_add_A0_im(s, 2);
6020 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6021 if (!s->dflag)
6022 gen_op_andl_T0_im(0xffffff);
6023 if (op == 2) {
6024 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6025 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6026 } else {
6027 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6028 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6029 }
6030 }
6031 break;
6032 case 4: /* smsw */
6033 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6034 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6035 break;
6036 case 6: /* lmsw */
6037 if (s->cpl != 0) {
6038 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6039 } else {
6040 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6041 gen_op_lmsw_T0();
6042 gen_jmp_im(s->pc - s->cs_base);
6043 gen_eob(s);
6044 }
6045 break;
6046 case 7: /* invlpg */
6047 if (s->cpl != 0) {
6048 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6049 } else {
6050 if (mod == 3) {
6051#ifdef TARGET_X86_64
6052 if (CODE64(s) && rm == 0) {
6053 /* swapgs */
6054 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6055 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6056 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6057 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6058 } else
6059#endif
6060 {
6061 goto illegal_op;
6062 }
6063 } else {
6064 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6065 gen_op_invlpg_A0();
6066 gen_jmp_im(s->pc - s->cs_base);
6067 gen_eob(s);
6068 }
6069 }
6070 break;
6071 default:
6072 goto illegal_op;
6073 }
6074 break;
6075 case 0x108: /* invd */
6076 case 0x109: /* wbinvd */
6077 if (s->cpl != 0) {
6078 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6079 } else {
6080 /* nothing to do */
6081 }
6082 break;
6083 case 0x63: /* arpl or movslS (x86_64) */
6084#ifdef TARGET_X86_64
6085 if (CODE64(s)) {
6086 int d_ot;
6087 /* d_ot is the size of destination */
6088 d_ot = dflag + OT_WORD;
6089
6090 modrm = ldub_code(s->pc++);
6091 reg = ((modrm >> 3) & 7) | rex_r;
6092 mod = (modrm >> 6) & 3;
6093 rm = (modrm & 7) | REX_B(s);
6094
6095 if (mod == 3) {
6096 gen_op_mov_TN_reg[OT_LONG][0][rm]();
6097 /* sign extend */
6098 if (d_ot == OT_QUAD)
6099 gen_op_movslq_T0_T0();
6100 gen_op_mov_reg_T0[d_ot][reg]();
6101 } else {
6102 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6103 if (d_ot == OT_QUAD) {
6104 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
6105 } else {
6106 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6107 }
6108 gen_op_mov_reg_T0[d_ot][reg]();
6109 }
6110 } else
6111#endif
6112 {
6113 if (!s->pe || s->vm86)
6114 goto illegal_op;
6115 ot = dflag ? OT_LONG : OT_WORD;
6116 modrm = ldub_code(s->pc++);
6117 reg = (modrm >> 3) & 7;
6118 mod = (modrm >> 6) & 3;
6119 rm = modrm & 7;
6120#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
6121 gen_op_mov_TN_reg[ot][1][reg]();
6122#endif
6123 if (mod != 3) {
6124 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6125 gen_op_ld_T0_A0[ot + s->mem_index]();
6126 } else {
6127 gen_op_mov_TN_reg[ot][0][rm]();
6128 }
6129 if (s->cc_op != CC_OP_DYNAMIC)
6130 gen_op_set_cc_op(s->cc_op);
6131 gen_op_arpl();
6132 s->cc_op = CC_OP_EFLAGS;
6133 if (mod != 3) {
6134 gen_op_st_T0_A0[ot + s->mem_index]();
6135 } else {
6136 gen_op_mov_reg_T0[ot][rm]();
6137 }
6138 gen_op_arpl_update();
6139 }
6140 break;
6141 case 0x102: /* lar */
6142 case 0x103: /* lsl */
6143 if (!s->pe || s->vm86)
6144 goto illegal_op;
6145 ot = dflag ? OT_LONG : OT_WORD;
6146 modrm = ldub_code(s->pc++);
6147 reg = ((modrm >> 3) & 7) | rex_r;
6148 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6149 gen_op_mov_TN_reg[ot][1][reg]();
6150 if (s->cc_op != CC_OP_DYNAMIC)
6151 gen_op_set_cc_op(s->cc_op);
6152 if (b == 0x102)
6153 gen_op_lar();
6154 else
6155 gen_op_lsl();
6156 s->cc_op = CC_OP_EFLAGS;
6157 gen_op_mov_reg_T1[ot][reg]();
6158 break;
6159 case 0x118:
6160 modrm = ldub_code(s->pc++);
6161 mod = (modrm >> 6) & 3;
6162 op = (modrm >> 3) & 7;
6163 switch(op) {
6164 case 0: /* prefetchnta */
6165 case 1: /* prefetchnt0 */
6166 case 2: /* prefetchnt0 */
6167 case 3: /* prefetchnt0 */
6168 if (mod == 3)
6169 goto illegal_op;
6170 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6171 /* nothing more to do */
6172 break;
6173 default: /* nop (multi byte) */
6174 gen_nop_modrm(s, modrm);
6175 break;
6176 }
6177 break;
6178 case 0x119 ... 0x11f: /* nop (multi byte) */
6179 modrm = ldub_code(s->pc++);
6180 gen_nop_modrm(s, modrm);
6181 break;
6182 case 0x120: /* mov reg, crN */
6183 case 0x122: /* mov crN, reg */
6184 if (s->cpl != 0) {
6185 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6186 } else {
6187 modrm = ldub_code(s->pc++);
6188 if ((modrm & 0xc0) != 0xc0)
6189 goto illegal_op;
6190 rm = (modrm & 7) | REX_B(s);
6191 reg = ((modrm >> 3) & 7) | rex_r;
6192 if (CODE64(s))
6193 ot = OT_QUAD;
6194 else
6195 ot = OT_LONG;
6196 switch(reg) {
6197 case 0:
6198 case 2:
6199 case 3:
6200 case 4:
6201 case 8:
6202 if (b & 2) {
6203 gen_op_mov_TN_reg[ot][0][rm]();
6204 gen_op_movl_crN_T0(reg);
6205 gen_jmp_im(s->pc - s->cs_base);
6206 gen_eob(s);
6207 } else {
6208#if !defined(CONFIG_USER_ONLY)
6209 if (reg == 8)
6210 gen_op_movtl_T0_cr8();
6211 else
6212#endif
6213 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6214 gen_op_mov_reg_T0[ot][rm]();
6215 }
6216 break;
6217 default:
6218 goto illegal_op;
6219 }
6220 }
6221 break;
6222 case 0x121: /* mov reg, drN */
6223 case 0x123: /* mov drN, reg */
6224 if (s->cpl != 0) {
6225 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6226 } else {
6227 modrm = ldub_code(s->pc++);
6228 if ((modrm & 0xc0) != 0xc0)
6229 goto illegal_op;
6230 rm = (modrm & 7) | REX_B(s);
6231 reg = ((modrm >> 3) & 7) | rex_r;
6232 if (CODE64(s))
6233 ot = OT_QUAD;
6234 else
6235 ot = OT_LONG;
6236 /* XXX: do it dynamically with CR4.DE bit */
6237 if (reg == 4 || reg == 5 || reg >= 8)
6238 goto illegal_op;
6239 if (b & 2) {
6240 gen_op_mov_TN_reg[ot][0][rm]();
6241 gen_op_movl_drN_T0(reg);
6242 gen_jmp_im(s->pc - s->cs_base);
6243 gen_eob(s);
6244 } else {
6245 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6246 gen_op_mov_reg_T0[ot][rm]();
6247 }
6248 }
6249 break;
6250 case 0x106: /* clts */
6251 if (s->cpl != 0) {
6252 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6253 } else {
6254 gen_op_clts();
6255 /* abort block because static cpu state changed */
6256 gen_jmp_im(s->pc - s->cs_base);
6257 gen_eob(s);
6258 }
6259 break;
6260 /* MMX/SSE/SSE2/PNI support */
6261 case 0x1c3: /* MOVNTI reg, mem */
6262 if (!(s->cpuid_features & CPUID_SSE2))
6263 goto illegal_op;
6264 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6265 modrm = ldub_code(s->pc++);
6266 mod = (modrm >> 6) & 3;
6267 if (mod == 3)
6268 goto illegal_op;
6269 reg = ((modrm >> 3) & 7) | rex_r;
6270 /* generate a generic store */
6271 gen_ldst_modrm(s, modrm, ot, reg, 1);
6272 break;
6273 case 0x1ae:
6274 modrm = ldub_code(s->pc++);
6275 mod = (modrm >> 6) & 3;
6276 op = (modrm >> 3) & 7;
6277 switch(op) {
6278 case 0: /* fxsave */
6279 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6280 (s->flags & HF_EM_MASK))
6281 goto illegal_op;
6282 if (s->flags & HF_TS_MASK) {
6283 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6284 break;
6285 }
6286 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6287 gen_op_fxsave_A0((s->dflag == 2));
6288 break;
6289 case 1: /* fxrstor */
6290 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6291 (s->flags & HF_EM_MASK))
6292 goto illegal_op;
6293 if (s->flags & HF_TS_MASK) {
6294 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6295 break;
6296 }
6297 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6298 gen_op_fxrstor_A0((s->dflag == 2));
6299 break;
6300 case 2: /* ldmxcsr */
6301 case 3: /* stmxcsr */
6302 if (s->flags & HF_TS_MASK) {
6303 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6304 break;
6305 }
6306 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6307 mod == 3)
6308 goto illegal_op;
6309 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6310 if (op == 2) {
6311 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6312 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6313 } else {
6314 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6315 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6316 }
6317 break;
6318 case 5: /* lfence */
6319 case 6: /* mfence */
6320 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6321 goto illegal_op;
6322 break;
6323 case 7: /* sfence / clflush */
6324 if ((modrm & 0xc7) == 0xc0) {
6325 /* sfence */
6326 if (!(s->cpuid_features & CPUID_SSE))
6327 goto illegal_op;
6328 } else {
6329 /* clflush */
6330 if (!(s->cpuid_features & CPUID_CLFLUSH))
6331 goto illegal_op;
6332 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6333 }
6334 break;
6335 default:
6336 goto illegal_op;
6337 }
6338 break;
6339 case 0x10d: /* prefetch */
6340 modrm = ldub_code(s->pc++);
6341 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6342 /* ignore for now */
6343 break;
6344 case 0x1aa: /* rsm */
6345 if (!(s->flags & HF_SMM_MASK))
6346 goto illegal_op;
6347 if (s->cc_op != CC_OP_DYNAMIC) {
6348 gen_op_set_cc_op(s->cc_op);
6349 s->cc_op = CC_OP_DYNAMIC;
6350 }
6351 gen_jmp_im(s->pc - s->cs_base);
6352 gen_op_rsm();
6353 gen_eob(s);
6354 break;
6355 case 0x110 ... 0x117:
6356 case 0x128 ... 0x12f:
6357 case 0x150 ... 0x177:
6358 case 0x17c ... 0x17f:
6359 case 0x1c2:
6360 case 0x1c4 ... 0x1c6:
6361 case 0x1d0 ... 0x1fe:
6362 gen_sse(s, b, pc_start, rex_r);
6363 break;
6364 default:
6365 goto illegal_op;
6366 }
6367 /* lock generation */
6368 if (s->prefix & PREFIX_LOCK)
6369 gen_op_unlock();
6370 return s->pc;
6371 illegal_op:
6372 if (s->prefix & PREFIX_LOCK)
6373 gen_op_unlock();
6374 /* XXX: ensure that no lock was generated */
6375 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6376 return s->pc;
6377}
6378
6379#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6380#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6381
6382/* flags read by an operation */
6383static uint16_t opc_read_flags[NB_OPS] = {
6384 [INDEX_op_aas] = CC_A,
6385 [INDEX_op_aaa] = CC_A,
6386 [INDEX_op_das] = CC_A | CC_C,
6387 [INDEX_op_daa] = CC_A | CC_C,
6388
6389 /* subtle: due to the incl/decl implementation, C is used */
6390 [INDEX_op_update_inc_cc] = CC_C,
6391
6392 [INDEX_op_into] = CC_O,
6393
6394 [INDEX_op_jb_subb] = CC_C,
6395 [INDEX_op_jb_subw] = CC_C,
6396 [INDEX_op_jb_subl] = CC_C,
6397
6398 [INDEX_op_jz_subb] = CC_Z,
6399 [INDEX_op_jz_subw] = CC_Z,
6400 [INDEX_op_jz_subl] = CC_Z,
6401
6402 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6403 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6404 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6405
6406 [INDEX_op_js_subb] = CC_S,
6407 [INDEX_op_js_subw] = CC_S,
6408 [INDEX_op_js_subl] = CC_S,
6409
6410 [INDEX_op_jl_subb] = CC_O | CC_S,
6411 [INDEX_op_jl_subw] = CC_O | CC_S,
6412 [INDEX_op_jl_subl] = CC_O | CC_S,
6413
6414 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6415 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6416 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6417
6418 [INDEX_op_loopnzw] = CC_Z,
6419 [INDEX_op_loopnzl] = CC_Z,
6420 [INDEX_op_loopzw] = CC_Z,
6421 [INDEX_op_loopzl] = CC_Z,
6422
6423 [INDEX_op_seto_T0_cc] = CC_O,
6424 [INDEX_op_setb_T0_cc] = CC_C,
6425 [INDEX_op_setz_T0_cc] = CC_Z,
6426 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6427 [INDEX_op_sets_T0_cc] = CC_S,
6428 [INDEX_op_setp_T0_cc] = CC_P,
6429 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6430 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6431
6432 [INDEX_op_setb_T0_subb] = CC_C,
6433 [INDEX_op_setb_T0_subw] = CC_C,
6434 [INDEX_op_setb_T0_subl] = CC_C,
6435
6436 [INDEX_op_setz_T0_subb] = CC_Z,
6437 [INDEX_op_setz_T0_subw] = CC_Z,
6438 [INDEX_op_setz_T0_subl] = CC_Z,
6439
6440 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6441 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6442 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6443
6444 [INDEX_op_sets_T0_subb] = CC_S,
6445 [INDEX_op_sets_T0_subw] = CC_S,
6446 [INDEX_op_sets_T0_subl] = CC_S,
6447
6448 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6449 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6450 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6451
6452 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6453 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6454 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6455
6456 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6457 [INDEX_op_cmc] = CC_C,
6458 [INDEX_op_salc] = CC_C,
6459
6460 /* needed for correct flag optimisation before string ops */
6461 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6462 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6463 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6464 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6465
6466#ifdef TARGET_X86_64
6467 [INDEX_op_jb_subq] = CC_C,
6468 [INDEX_op_jz_subq] = CC_Z,
6469 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6470 [INDEX_op_js_subq] = CC_S,
6471 [INDEX_op_jl_subq] = CC_O | CC_S,
6472 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6473
6474 [INDEX_op_loopnzq] = CC_Z,
6475 [INDEX_op_loopzq] = CC_Z,
6476
6477 [INDEX_op_setb_T0_subq] = CC_C,
6478 [INDEX_op_setz_T0_subq] = CC_Z,
6479 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6480 [INDEX_op_sets_T0_subq] = CC_S,
6481 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6482 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6483
6484 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6485 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6486#endif
6487
6488#define DEF_READF(SUFFIX)\
6489 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6490 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6491 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6492 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6493 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6494 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6495 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6496 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6497\
6498 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6499 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6500 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6501 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6502 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6503 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6504 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6505 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6506
6507 DEF_READF( )
6508 DEF_READF(_raw)
6509#ifndef CONFIG_USER_ONLY
6510 DEF_READF(_kernel)
6511 DEF_READF(_user)
6512#endif
6513};
6514
6515/* flags written by an operation */
6516static uint16_t opc_write_flags[NB_OPS] = {
6517 [INDEX_op_update2_cc] = CC_OSZAPC,
6518 [INDEX_op_update1_cc] = CC_OSZAPC,
6519 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6520 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6521 /* subtle: due to the incl/decl implementation, C is used */
6522 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6523 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6524
6525 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6526 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6527 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6528 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6529 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6530 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6531 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6532 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6533 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6534 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6535 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6536
6537 /* sse */
6538 [INDEX_op_ucomiss] = CC_OSZAPC,
6539 [INDEX_op_ucomisd] = CC_OSZAPC,
6540 [INDEX_op_comiss] = CC_OSZAPC,
6541 [INDEX_op_comisd] = CC_OSZAPC,
6542
6543 /* bcd */
6544 [INDEX_op_aam] = CC_OSZAPC,
6545 [INDEX_op_aad] = CC_OSZAPC,
6546 [INDEX_op_aas] = CC_OSZAPC,
6547 [INDEX_op_aaa] = CC_OSZAPC,
6548 [INDEX_op_das] = CC_OSZAPC,
6549 [INDEX_op_daa] = CC_OSZAPC,
6550
6551 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6552 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6553 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6554 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6555 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6556 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6557 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6558 [INDEX_op_clc] = CC_C,
6559 [INDEX_op_stc] = CC_C,
6560 [INDEX_op_cmc] = CC_C,
6561
6562 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6563 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6564 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6565 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6566 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6567 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6568 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6569 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6570 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6571 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6572 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6573 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6574
6575 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6576 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6577 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6578 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6579 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6580 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6581
6582 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6583 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6584 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6585 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6586
6587 [INDEX_op_cmpxchg8b] = CC_Z,
6588 [INDEX_op_lar] = CC_Z,
6589 [INDEX_op_lsl] = CC_Z,
6590 [INDEX_op_verr] = CC_Z,
6591 [INDEX_op_verw] = CC_Z,
6592 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6593 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6594
6595#define DEF_WRITEF(SUFFIX)\
6596 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6597 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6598 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6599 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6600 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6601 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6602 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6603 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6604\
6605 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6606 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6607 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6608 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6609 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6610 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6611 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6612 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6613\
6614 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6615 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6616 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6617 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6618 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6619 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6620 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6621 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6622\
6623 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6624 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6625 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6626 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6627\
6628 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6629 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6630 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6631 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6632\
6633 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6634 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6635 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6636 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6637\
6638 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6639 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6640 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6641 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6642 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6643 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6644\
6645 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6646 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6647 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6648 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6649 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6650 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6651\
6652 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6653 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6654 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6655 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6656
6657
6658 DEF_WRITEF( )
6659 DEF_WRITEF(_raw)
6660#ifndef CONFIG_USER_ONLY
6661 DEF_WRITEF(_kernel)
6662 DEF_WRITEF(_user)
6663#endif
6664};
6665
6666/* simpler form of an operation if no flags need to be generated */
6667static uint16_t opc_simpler[NB_OPS] = {
6668 [INDEX_op_update2_cc] = INDEX_op_nop,
6669 [INDEX_op_update1_cc] = INDEX_op_nop,
6670 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6671#if 0
6672 /* broken: CC_OP logic must be rewritten */
6673 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6674#endif
6675
6676 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6677 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6678 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6679 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6680
6681 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6682 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6683 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6684 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6685
6686 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6687 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6688 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6689 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6690
6691#define DEF_SIMPLER(SUFFIX)\
6692 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6693 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6694 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6695 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6696\
6697 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6698 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6699 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6700 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6701
6702 DEF_SIMPLER( )
6703 DEF_SIMPLER(_raw)
6704#ifndef CONFIG_USER_ONLY
6705 DEF_SIMPLER(_kernel)
6706 DEF_SIMPLER(_user)
6707#endif
6708};
6709
6710void optimize_flags_init(void)
6711{
6712 int i;
6713 /* put default values in arrays */
6714 for(i = 0; i < NB_OPS; i++) {
6715 if (opc_simpler[i] == 0)
6716 opc_simpler[i] = i;
6717 }
6718}
6719
6720/* CPU flags computation optimization: we move backward thru the
6721 generated code to see which flags are needed. The operation is
6722 modified if suitable */
6723static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6724{
6725 uint16_t *opc_ptr;
6726 int live_flags, write_flags, op;
6727
6728 opc_ptr = opc_buf + opc_buf_len;
6729 /* live_flags contains the flags needed by the next instructions
6730 in the code. At the end of the bloc, we consider that all the
6731 flags are live. */
6732 live_flags = CC_OSZAPC;
6733 while (opc_ptr > opc_buf) {
6734 op = *--opc_ptr;
6735 /* if none of the flags written by the instruction is used,
6736 then we can try to find a simpler instruction */
6737 write_flags = opc_write_flags[op];
6738 if ((live_flags & write_flags) == 0) {
6739 *opc_ptr = opc_simpler[op];
6740 }
6741 /* compute the live flags before the instruction */
6742 live_flags &= ~write_flags;
6743 live_flags |= opc_read_flags[op];
6744 }
6745}
6746
6747/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6748 basic block 'tb'. If search_pc is TRUE, also generate PC
6749 information for each intermediate instruction. */
6750static inline int gen_intermediate_code_internal(CPUState *env,
6751 TranslationBlock *tb,
6752 int search_pc)
6753{
6754 DisasContext dc1, *dc = &dc1;
6755 target_ulong pc_ptr;
6756 uint16_t *gen_opc_end;
6757 int flags, j, lj, cflags;
6758 target_ulong pc_start;
6759 target_ulong cs_base;
6760
6761 /* generate intermediate code */
6762 pc_start = tb->pc;
6763 cs_base = tb->cs_base;
6764 flags = tb->flags;
6765 cflags = tb->cflags;
6766
6767 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6768 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6769 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6770 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6771 dc->f_st = 0;
6772 dc->vm86 = (flags >> VM_SHIFT) & 1;
6773#ifdef VBOX_WITH_CALL_RECORD
6774 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6775 if ( !(env->state & CPU_RAW_RING0)
6776 && (env->cr[0] & CR0_PG_MASK)
6777 && !(env->eflags & X86_EFL_IF)
6778 && dc->code32)
6779 dc->record_call = 1;
6780 else
6781 dc->record_call = 0;
6782#endif
6783 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6784 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6785 dc->tf = (flags >> TF_SHIFT) & 1;
6786 dc->singlestep_enabled = env->singlestep_enabled;
6787 dc->cc_op = CC_OP_DYNAMIC;
6788 dc->cs_base = cs_base;
6789 dc->tb = tb;
6790 dc->popl_esp_hack = 0;
6791 /* select memory access functions */
6792 dc->mem_index = 0;
6793 if (flags & HF_SOFTMMU_MASK) {
6794 if (dc->cpl == 3)
6795 dc->mem_index = 2 * 4;
6796 else
6797 dc->mem_index = 1 * 4;
6798 }
6799 dc->cpuid_features = env->cpuid_features;
6800 dc->cpuid_ext_features = env->cpuid_ext_features;
6801 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6802 dc->cpuid_ext3_features = env->cpuid_ext3_features;
6803#ifdef TARGET_X86_64
6804 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6805 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6806#endif
6807 dc->flags = flags;
6808 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6809 (flags & HF_INHIBIT_IRQ_MASK)
6810#ifndef CONFIG_SOFTMMU
6811 || (flags & HF_SOFTMMU_MASK)
6812#endif
6813 );
6814#if 0
6815 /* check addseg logic */
6816 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6817 printf("ERROR addseg\n");
6818#endif
6819
6820 gen_opc_ptr = gen_opc_buf;
6821 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6822 gen_opparam_ptr = gen_opparam_buf;
6823 nb_gen_labels = 0;
6824
6825 dc->is_jmp = DISAS_NEXT;
6826 pc_ptr = pc_start;
6827 lj = -1;
6828
6829 for(;;) {
6830 if (env->nb_breakpoints > 0) {
6831 for(j = 0; j < env->nb_breakpoints; j++) {
6832 if (env->breakpoints[j] == pc_ptr) {
6833 gen_debug(dc, pc_ptr - dc->cs_base);
6834 break;
6835 }
6836 }
6837 }
6838 if (search_pc) {
6839 j = gen_opc_ptr - gen_opc_buf;
6840 if (lj < j) {
6841 lj++;
6842 while (lj < j)
6843 gen_opc_instr_start[lj++] = 0;
6844 }
6845 gen_opc_pc[lj] = pc_ptr;
6846 gen_opc_cc_op[lj] = dc->cc_op;
6847 gen_opc_instr_start[lj] = 1;
6848 }
6849 pc_ptr = disas_insn(dc, pc_ptr);
6850 /* stop translation if indicated */
6851 if (dc->is_jmp)
6852 break;
6853
6854#ifdef VBOX
6855#ifdef DEBUG
6856/*
6857 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6858 {
6859 //should never happen as the jump to the patch code terminates the translation block
6860 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6861 }
6862*/
6863#endif
6864 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6865 {
6866 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6867 gen_jmp_im(pc_ptr - dc->cs_base);
6868 gen_eob(dc);
6869 break;
6870 }
6871#endif /* VBOX */
6872
6873 /* if single step mode, we generate only one instruction and
6874 generate an exception */
6875 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6876 the flag and abort the translation to give the irqs a
6877 change to be happen */
6878 if (dc->tf || dc->singlestep_enabled ||
6879 (flags & HF_INHIBIT_IRQ_MASK) ||
6880 (cflags & CF_SINGLE_INSN)) {
6881 gen_jmp_im(pc_ptr - dc->cs_base);
6882 gen_eob(dc);
6883 break;
6884 }
6885 /* if too long translation, stop generation too */
6886 if (gen_opc_ptr >= gen_opc_end ||
6887 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6888 gen_jmp_im(pc_ptr - dc->cs_base);
6889 gen_eob(dc);
6890 break;
6891 }
6892 }
6893 *gen_opc_ptr = INDEX_op_end;
6894 /* we don't forget to fill the last values */
6895 if (search_pc) {
6896 j = gen_opc_ptr - gen_opc_buf;
6897 lj++;
6898 while (lj <= j)
6899 gen_opc_instr_start[lj++] = 0;
6900 }
6901
6902#ifdef DEBUG_DISAS
6903 if (loglevel & CPU_LOG_TB_CPU) {
6904 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6905 }
6906 if (loglevel & CPU_LOG_TB_IN_ASM) {
6907 int disas_flags;
6908 fprintf(logfile, "----------------\n");
6909 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6910#ifdef TARGET_X86_64
6911 if (dc->code64)
6912 disas_flags = 2;
6913 else
6914#endif
6915 disas_flags = !dc->code32;
6916 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6917 fprintf(logfile, "\n");
6918 if (loglevel & CPU_LOG_TB_OP) {
6919 fprintf(logfile, "OP:\n");
6920 dump_ops(gen_opc_buf, gen_opparam_buf);
6921 fprintf(logfile, "\n");
6922 }
6923 }
6924#endif
6925
6926 /* optimize flag computations */
6927 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6928
6929#ifdef DEBUG_DISAS
6930 if (loglevel & CPU_LOG_TB_OP_OPT) {
6931 fprintf(logfile, "AFTER FLAGS OPT:\n");
6932 dump_ops(gen_opc_buf, gen_opparam_buf);
6933 fprintf(logfile, "\n");
6934 }
6935#endif
6936 if (!search_pc)
6937 tb->size = pc_ptr - pc_start;
6938 return 0;
6939}
6940
6941int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6942{
6943 return gen_intermediate_code_internal(env, tb, 0);
6944}
6945
6946int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6947{
6948 return gen_intermediate_code_internal(env, tb, 1);
6949}
6950
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette