- Timestamp:
- May 23, 2017 11:31:06 AM (7 years ago)
- Location:
- trunk
- Files:
-
- 5 edited
-
include/VBox/disopcode.h (modified) (2 diffs)
-
src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py (modified) (1 diff)
-
src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h (modified) (3 diffs)
-
src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1-template.c (modified) (1 diff)
-
src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1.h (modified) (2 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/disopcode.h
r67034 r67037 793 793 OP_VMOVDQA, 794 794 OP_VMOVDQU, 795 OP_VMOVNTDQ, 795 796 /** @} */ 796 797 OP_END_OF_OPCODES … … 1098 1099 #define OP_PARM_Mps_WO OP_PARM_Mps /**< Annotates write only memory operand. */ 1099 1100 #define OP_PARM_Mpd_WO OP_PARM_Mpd /**< Annotates write only memory operand. */ 1101 #define OP_PARM_Mx_WO OP_PARM_Mx /**< Annotates write only memory operand. */ 1100 1102 #define OP_PARM_PdZx_WO OP_PARM_Pd /**< Annotates write only operand and zero extends to 64-bit. */ 1101 1103 #define OP_PARM_Pq_WO OP_PARM_Pq /**< Annotates write only operand. */ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r67034 r67037 258 258 'Mps_WO': ( 'IDX_UseModRM', 'rm', '%Mps', 'Mps', ), 259 259 'Mpd_WO': ( 'IDX_UseModRM', 'rm', '%Mpd', 'Mpd', ), 260 'Mx_WO': ( 'IDX_UseModRM', 'rm', '%Mx', 'Mx', ), 260 261 'M_RO': ( 'IDX_UseModRM', 'rm', '%M', 'M', ), 261 262 'M_RW': ( 'IDX_UseModRM', 'rm', '%M', 'M', ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r67033 r67037 85 85 { 86 86 /* 87 * 128-bit: Memory, register.87 * 128-bit: Register, Memory 88 88 */ 89 89 IEM_MC_BEGIN(0, 2); … … 105 105 { 106 106 /* 107 * 256-bit: Memory, register.107 * 256-bit: Register, Memory 108 108 */ 109 109 IEM_MC_BEGIN(0, 2); … … 3543 3543 /* Opcode VEX.0F 0xe7 - invalid */ 3544 3544 3545 /** Opcode VEX.66.0F 0xe7 - vmovntdq Mx, Vx */ 3546 FNIEMOP_STUB(iemOp_vmovntdq_Mx_Vx); 3547 //FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx) 3548 //{ 3549 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3550 // if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 3551 // { 3552 // /* Register, memory. */ 3553 // IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx"); 3554 // IEM_MC_BEGIN(0, 2); 3555 // IEM_MC_LOCAL(RTUINT128U, uSrc); 3556 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3557 // 3558 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3559 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3560 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3561 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3562 // 3563 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3564 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 3565 // 3566 // IEM_MC_ADVANCE_RIP(); 3567 // IEM_MC_END(); 3568 // return VINF_SUCCESS; 3569 // } 3570 // 3571 // /* The register, register encoding is invalid. */ 3572 // return IEMOP_RAISE_INVALID_OPCODE(); 3573 //} 3545 /** 3546 * @opcode 0xe7 3547 * @opcodesub !11 mr/reg 3548 * @oppfx 0x66 3549 * @opcpuid avx 3550 * @opgroup og_avx_cachect 3551 * @opxcpttype 1 3552 * @optest op1=-1 op2=2 -> op1=2 3553 * @optest op1=0 op2=-42 -> op1=-42 3554 */ 3555 FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx) 3556 { 3557 IEMOP_MNEMONIC2(VEX_MR, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 3558 Assert(pVCpu->iem.s.uVexLength <= 1); 3559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3560 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 3561 { 3562 if (pVCpu->iem.s.uVexLength == 0) 3563 { 3564 /* 3565 * 128-bit: Memory, register. 3566 */ 3567 IEM_MC_BEGIN(0, 2); 3568 IEM_MC_LOCAL(RTUINT128U, uSrc); 3569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3570 3571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3572 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 3573 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3574 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 3575 3576 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3577 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 3578 3579 IEM_MC_ADVANCE_RIP(); 3580 IEM_MC_END(); 3581 } 3582 else 3583 { 3584 /* 3585 * 256-bit: Memory, register. 3586 */ 3587 IEM_MC_BEGIN(0, 2); 3588 IEM_MC_LOCAL(RTUINT256U, uSrc); 3589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3590 3591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3592 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 3593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 3595 3596 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3597 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 3598 3599 IEM_MC_ADVANCE_RIP(); 3600 IEM_MC_END(); 3601 } 3602 return VINF_SUCCESS; 3603 } 3604 /** 3605 * @opdone 3606 * @opmnemonic udvex660fe7reg 3607 * @opcode 0xe7 3608 * @opcodesub 11 mr/reg 3609 * @oppfx 0x66 3610 * @opunused immediate 3611 * @opcpuid avx 3612 * @optest -> 3613 */ 3614 return IEMOP_RAISE_INVALID_OPCODE(); 3615 } 3574 3616 3575 3617 /* Opcode VEX.F3.0F 0xe7 - invalid */ -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1-template.c
r67034 r67037 4866 4866 case BS3CG1ENC_VEX_MODRM_Mps_WO_Vps: 4867 4867 case BS3CG1ENC_VEX_MODRM_Mpd_WO_Vpd: 4868 case BS3CG1ENC_VEX_MODRM_Mx_WO_Vx: 4868 4869 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_Msomething_Wip_OR_ViceVersa; 4869 4870 pThis->iRmOp = 0; -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1.h
r67034 r67037 122 122 BS3CG1OP_Mps_WO, 123 123 BS3CG1OP_Mpd_WO, 124 BS3CG1OP_Mx_WO, 124 125 125 126 BS3CG1OP_END … … 201 202 BS3CG1ENC_VEX_MODRM_Mps_WO_Vps, 202 203 BS3CG1ENC_VEX_MODRM_Mpd_WO_Vpd, 204 BS3CG1ENC_VEX_MODRM_Mx_WO_Vx, 203 205 BS3CG1ENC_VEX_MODRM_Uss_WO_HssHi_Vss, 204 206 BS3CG1ENC_VEX_MODRM_Usd_WO_HsdHi_Vsd,
Note:
See TracChangeset
for help on using the changeset viewer.

