- Timestamp:
- May 22, 2017 12:26:25 PM (7 years ago)
- Location:
- trunk
- Files:
-
- 6 edited
-
include/VBox/disopcode.h (modified) (1 diff)
-
src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py (modified) (2 diffs)
-
src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h (modified) (1 diff)
-
src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h (modified) (1 diff)
-
src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1-template.c (modified) (1 diff)
-
src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1.h (modified) (3 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/disopcode.h
r67010 r67012 1082 1082 1083 1083 /* For making IEM / bs3-cpu-generated-1 happy: */ 1084 #define OP_PARM_Ed_WO OP_PARM_Ed /**< Annotates write only operand. */ 1084 1085 #define OP_PARM_Eq (OP_PARM_E+OP_PARM_q) 1086 #define OP_PARM_Eq_WO OP_PARM_Eq /**< Annotates write only operand. */ 1085 1087 #define OP_PARM_Gv_RO OP_PARM_Gv /**< Annotates read only first operand (default is readwrite). */ 1086 1088 #define OP_PARM_HssHi OP_PARM_Hx /**< Register referenced by VEX.vvvv, bits [127:32]. */ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r67007 r67012 215 215 'Eb': ( 'IDX_UseModRM', 'rm', '%Eb', 'Eb', ), 216 216 'Ed': ( 'IDX_UseModRM', 'rm', '%Ed', 'Ed', ), 217 'Ed_WO': ( 'IDX_UseModRM', 'rm', '%Ed', 'Ed', ), 217 218 'Eq': ( 'IDX_UseModRM', 'rm', '%Eq', 'Eq', ), 219 'Eq_WO': ( 'IDX_UseModRM', 'rm', '%Eq', 'Eq', ), 218 220 'Ew': ( 'IDX_UseModRM', 'rm', '%Ew', 'Ew', ), 219 221 'Ev': ( 'IDX_UseModRM', 'rm', '%Ev', 'Ev', ), … … 265 267 'Pq': ( 'IDX_UseModRM', 'reg', '%Pq', 'Pq', ), 266 268 'Pq_WO': ( 'IDX_UseModRM', 'reg', '%Pq', 'Pq', ), 269 'Vd': ( 'IDX_UseModRM', 'reg', '%Vd', 'Vd', ), 267 270 'Vd_WO': ( 'IDX_UseModRM', 'reg', '%Vd', 'Vd', ), 268 271 'VdZx_WO': ( 'IDX_UseModRM', 'reg', '%Vd', 'Vd', ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r67011 r67012 3782 3782 * @optest op1=1 op2=2 -> op1=2 3783 3783 * @optest op1=0 op2=-42 -> op1=-42 3784 * @oponly3785 3784 */ 3786 3785 FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq) -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r67010 r67012 2735 2735 /* Opcode VEX.0F 0x7e - invalid */ 2736 2736 2737 /** Opcode VEX.66.0F 0x7e - vmovd_q Ey, Vy */ 2738 FNIEMOP_STUB(iemOp_vmovd_q_Ey_Vy); 2739 //FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy) 2740 //{ 2741 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2742 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2743 // IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq"); 2744 // else 2745 // IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd"); 2746 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2747 // { 2748 // /* greg, XMM */ 2749 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2750 // IEM_MC_BEGIN(0, 1); 2751 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2752 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2753 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2754 // { 2755 // IEM_MC_LOCAL(uint64_t, u64Tmp); 2756 // IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2757 // IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 2758 // } 2759 // else 2760 // { 2761 // IEM_MC_LOCAL(uint32_t, u32Tmp); 2762 // IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2763 // IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 2764 // } 2765 // IEM_MC_ADVANCE_RIP(); 2766 // IEM_MC_END(); 2767 // } 2768 // else 2769 // { 2770 // /* [mem], XMM */ 2771 // IEM_MC_BEGIN(0, 2); 2772 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2773 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2774 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2775 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2776 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2777 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2778 // { 2779 // IEM_MC_LOCAL(uint64_t, u64Tmp); 2780 // IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2781 // IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 2782 // } 2783 // else 2784 // { 2785 // IEM_MC_LOCAL(uint32_t, u32Tmp); 2786 // IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2787 // IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 2788 // } 2789 // IEM_MC_ADVANCE_RIP(); 2790 // IEM_MC_END(); 2791 // } 2792 // return VINF_SUCCESS; 2793 //} 2737 FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy) 2738 { 2739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2741 { 2742 /** 2743 * @opcode 0x7e 2744 * @opcodesub rex.w=1 2745 * @oppfx 0x66 2746 * @opcpuid avx 2747 * @opgroup og_avx_simdint_datamov 2748 * @opxcpttype 5 2749 * @optest 64-bit / op1=1 op2=2 -> op1=2 2750 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 2751 * @oponly 2752 */ 2753 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX); 2754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2755 { 2756 /* greg64, XMM */ 2757 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV(); 2758 IEM_MC_BEGIN(0, 1); 2759 IEM_MC_LOCAL(uint64_t, u64Tmp); 2760 2761 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2762 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 2763 2764 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2765 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 2766 2767 IEM_MC_ADVANCE_RIP(); 2768 IEM_MC_END(); 2769 } 2770 else 2771 { 2772 /* [mem64], XMM */ 2773 IEM_MC_BEGIN(0, 2); 2774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2775 IEM_MC_LOCAL(uint64_t, u64Tmp); 2776 2777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2778 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV(); 2779 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2780 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 2781 2782 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2783 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 2784 2785 IEM_MC_ADVANCE_RIP(); 2786 IEM_MC_END(); 2787 } 2788 } 2789 else 2790 { 2791 /** 2792 * @opdone 2793 * @opcode 0x7e 2794 * @opcodesub rex.w=0 2795 * @oppfx 0x66 2796 * @opcpuid avx 2797 * @opgroup og_avx_simdint_datamov 2798 * @opxcpttype 5 2799 * @opfunction iemOp_vmovd_q_Vy_Ey 2800 * @optest op1=1 op2=2 -> op1=2 2801 * @optest op1=0 op2=-42 -> op1=-42 2802 * @oponly 2803 */ 2804 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX); 2805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2806 { 2807 /* greg32, XMM */ 2808 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV(); 2809 IEM_MC_BEGIN(0, 1); 2810 IEM_MC_LOCAL(uint32_t, u32Tmp); 2811 2812 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2813 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 2814 2815 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2816 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 2817 2818 IEM_MC_ADVANCE_RIP(); 2819 IEM_MC_END(); 2820 } 2821 else 2822 { 2823 /* [mem32], XMM */ 2824 IEM_MC_BEGIN(0, 2); 2825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2826 IEM_MC_LOCAL(uint32_t, u32Tmp); 2827 2828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2829 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV(); 2830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 2832 2833 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2834 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 2835 2836 IEM_MC_ADVANCE_RIP(); 2837 IEM_MC_END(); 2838 } 2839 } 2840 return VINF_SUCCESS; 2841 } 2794 2842 2795 2843 /** Opcode VEX.F3.0F 0x7e - vmovq Vq, Wq */ -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1-template.c
r67009 r67012 4573 4573 break; 4574 4574 4575 case BS3CG1ENC_VEX_MODRM_Ed_WO_Vd_WZ: 4576 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_Vd_WO_Ed_WZ; 4577 pThis->iRmOp = 0; 4578 pThis->iRegOp = 1; 4579 pThis->aOperands[0].cbOp = 4; 4580 pThis->aOperands[1].cbOp = 4; 4581 pThis->aOperands[0].idxFieldBase = BS3CG1DST_EAX; 4582 pThis->aOperands[1].idxFieldBase = BS3CG1DST_XMM0_DW0_ZX; 4583 pThis->aOperands[0].enmLocation = BS3CG1OPLOC_CTX; 4584 pThis->aOperands[0].enmLocationReg = BS3CG1OPLOC_CTX; 4585 pThis->aOperands[0].enmLocationMem = BS3CG1OPLOC_MEM_WO; 4586 pThis->aOperands[1].enmLocation = BS3CG1OPLOC_CTX; 4587 break; 4588 4589 case BS3CG1ENC_VEX_MODRM_Eq_WO_Vq_WNZ: 4590 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_Vq_WO_Eq_WNZ; 4591 pThis->iRmOp = 0; 4592 pThis->iRegOp = 1; 4593 pThis->aOperands[0].cbOp = 8; 4594 pThis->aOperands[1].cbOp = 8; 4595 pThis->aOperands[0].idxFieldBase = BS3CG1DST_RAX; 4596 pThis->aOperands[1].idxFieldBase = BS3CG1DST_XMM0_LO_ZX; 4597 pThis->aOperands[0].enmLocation = BS3CG1OPLOC_CTX; 4598 pThis->aOperands[0].enmLocationReg = BS3CG1OPLOC_CTX; 4599 pThis->aOperands[0].enmLocationMem = BS3CG1OPLOC_MEM_WO; 4600 pThis->aOperands[1].enmLocation = BS3CG1OPLOC_CTX; 4601 break; 4602 4575 4603 case BS3CG1ENC_VEX_MODRM_Md_WO: 4576 4604 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_Md_WO; -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1.h
r67007 r67012 44 44 BS3CG1OP_Eb, 45 45 BS3CG1OP_Ed, 46 BS3CG1OP_Ed_WO, 46 47 BS3CG1OP_Eq, 48 BS3CG1OP_Eq_WO, 47 49 BS3CG1OP_Ev, 48 50 BS3CG1OP_Qq, … … 80 82 BS3CG1OP_Usd, 81 83 BS3CG1OP_Usd_WO, 84 BS3CG1OP_Vd, 82 85 BS3CG1OP_Vd_WO, 83 86 BS3CG1OP_VdZx_WO, … … 180 183 BS3CG1ENC_VEX_MODRM_VsdZx_WO_Mq, 181 184 BS3CG1ENC_VEX_MODRM_Vx_WO_Wx, 185 BS3CG1ENC_VEX_MODRM_Ed_WO_Vd_WZ, 186 BS3CG1ENC_VEX_MODRM_Eq_WO_Vq_WNZ, 182 187 BS3CG1ENC_VEX_MODRM_Md_WO, 183 188 BS3CG1ENC_VEX_MODRM_Mq_WO_Vq,
Note:
See TracChangeset
for help on using the changeset viewer.

