- Timestamp:
- Mar 4, 2024 11:25:11 AM (7 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
-
VMMAll/IEMAllInstPython.py (modified) (1 diff)
-
VMMAll/IEMAllN8veLiveness.cpp (modified) (2 diffs)
-
VMMAll/IEMAllN8veRecompiler.cpp (modified) (2 diffs)
-
include/IEMN8veRecompiler.h (modified) (3 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r103613 r103660 3123 3123 'IEM_MC_LOCAL_EFLAGS': (McBlock.parseMcLocalEFlags, True, True, True, ), 3124 3124 'IEM_MC_NOREF': (McBlock.parseMcGeneric, False, False, True, ), 3125 'IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT': (McBlock.parseMcGeneric, True, True, False,),3125 'IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT': (McBlock.parseMcGeneric, True, True, True, ), 3126 3126 'IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE': (McBlock.parseMcGeneric, True, True, True, ), 3127 3127 'IEM_MC_MAYBE_RAISE_FPU_XCPT': (McBlock.parseMcGeneric, True, True, False, ), -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veLiveness.cpp
r103625 r103660 178 178 179 179 180 #define IEM_LIVENESS_CR0_INPUT() IEM_LIVENESS_BITMAP_MEMBER_INPUT( fCr0, 0) 181 #define IEM_LIVENESS_CR4_INPUT() IEM_LIVENESS_BITMAP_MEMBER_INPUT( fCr4, 0) 180 #define IEM_LIVENESS_CR0_INPUT() IEM_LIVENESS_BITMAP_MEMBER_INPUT( fCr0, 0) 181 #define IEM_LIVENESS_CR4_INPUT() IEM_LIVENESS_BITMAP_MEMBER_INPUT( fCr4, 0) 182 #define IEM_LIVENESS_XCR0_INPUT() IEM_LIVENESS_BITMAP_MEMBER_INPUT( fXcr0, 0) 182 183 183 184 … … 449 450 #define IEM_MC_MAYBE_RAISE_FPU_XCPT() IEM_LIVENESS_MARK_XCPT_OR_CALL(); IEM_LIVENESS_FSW_INPUT() 450 451 #define IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() \ 451 IEM_LIVENESS_MARK_XCPT_OR_CALL(); IEM_LIVENESS_CR0_INPUT(); IEM_LIVENESS_CR4_INPUT() //; IEM_LIVENESS_XCR0_INPUT()452 IEM_LIVENESS_MARK_XCPT_OR_CALL(); IEM_LIVENESS_CR0_INPUT(); IEM_LIVENESS_CR4_INPUT(); IEM_LIVENESS_XCR0_INPUT() 452 453 #define IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() \ 453 454 IEM_LIVENESS_MARK_XCPT_OR_CALL(); IEM_LIVENESS_CR0_INPUT(); IEM_LIVENESS_CR4_INPUT() -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r103659 r103660 3569 3569 /* [kIemNativeGstReg_SegSelFirst + 5] = */ { CPUMCTX_OFF_AND_SIZE(aSRegs[5].Sel), "gs", }, 3570 3570 /* [kIemNativeGstReg_Cr4] = */ { CPUMCTX_OFF_AND_SIZE(cr4), "cr4", }, 3571 /* [kIemNativeGstReg_Xcr0] = */ { CPUMCTX_OFF_AND_SIZE(aXcr[0]), "xcr0", }, 3571 3572 /* [kIemNativeGstReg_EFlags] = */ { CPUMCTX_OFF_AND_SIZE(eflags), "eflags", }, 3572 3573 #undef CPUMCTX_OFF_AND_SIZE … … 7006 7007 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 7007 7008 iemNativeRegFreeTmp(pReNative, idxCr4Reg); 7009 7010 return off; 7011 } 7012 7013 7014 #define IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() \ 7015 off = iemNativeEmitMaybeRaiseAvxRelatedXcpt(pReNative, off, pCallEntry->idxInstr) 7016 7017 /** 7018 * Emits code to check if a AVX exception (either \#UD or \#NM) should be raised. 7019 * 7020 * @returns New code buffer offset, UINT32_MAX on failure. 7021 * @param pReNative The native recompile state. 7022 * @param off The code buffer offset. 7023 * @param idxInstr The current instruction. 7024 */ 7025 DECL_INLINE_THROW(uint32_t) 7026 iemNativeEmitMaybeRaiseAvxRelatedXcpt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr) 7027 { 7028 /* 7029 * Make sure we don't have any outstanding guest register writes as we may 7030 * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX. 7031 * 7032 * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path? 7033 */ 7034 off = iemNativeRegFlushPendingWrites(pReNative, off); 7035 7036 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 7037 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); 7038 #else 7039 RT_NOREF(idxInstr); 7040 #endif 7041 7042 /* Allocate a temporary CR0, CR4 and XCR0 register. */ 7043 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 7044 uint8_t const idxCr4Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly); 7045 uint8_t const idxXcr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Xcr0, kIemNativeGstRegUse_ReadOnly); 7046 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 7047 uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd); 7048 7049 #if 1 7050 off = iemNativeEmitBrk(pReNative, off, 0x4223); /** @todo Test this when AVX gets actually available. */ 7051 #endif 7052 7053 /** @todo r=aeichner Optimize this more later to have less compares and branches, 7054 * (see IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() in IEMMc.h but check that it has some 7055 * actual performance benefit first). */ 7056 /* 7057 * if ((xcr0 & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE)) 7058 * return raisexcpt(); 7059 */ 7060 const uint8_t idxRegTmp = iemNativeRegAllocTmpImm(pReNative, &off, XSAVE_C_YMM | XSAVE_C_SSE); 7061 off = iemNativeEmitOrGprByGpr(pReNative, off, idxRegTmp, idxXcr0Reg); 7062 off = iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, idxRegTmp, XSAVE_C_YMM | XSAVE_C_SSE, idxLabelRaiseUd); 7063 iemNativeRegFreeTmp(pReNative, idxRegTmp); 7064 7065 /* 7066 * if (!(cr4 & X86_CR4_OSXSAVE)) 7067 * return raisexcpt(); 7068 */ 7069 off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfNoneSet(pReNative, off, idxCr4Reg, X86_CR4_OSXSAVE, idxLabelRaiseUd); 7070 /* 7071 * if (cr0 & X86_CR0_TS) 7072 * return raisexcpt(); 7073 */ 7074 off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(pReNative, off, idxCr0Reg, X86_CR0_TS, idxLabelRaiseNm); 7075 7076 /* Free but don't flush the CR0, CR4 and XCR0 register. */ 7077 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 7078 iemNativeRegFreeTmp(pReNative, idxCr4Reg); 7079 iemNativeRegFreeTmp(pReNative, idxXcr0Reg); 7008 7080 7009 7081 return off; -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r103659 r103660 416 416 uint64_t bmSegSel : 6; /**< 0x26 / 38: */ 417 417 uint64_t fCr4 : 1; /**< 0x2c / 44: */ 418 uint64_t fEflOther : 1; /**< 0x2d / 45: Other EFLAGS bits (~X86_EFL_STATUS_BITS & X86_EFL_LIVE_MASK). First! */ 419 uint64_t fEflCf : 1; /**< 0x2e / 46: Carry flag (X86_EFL_CF / 0). */ 420 uint64_t fEflPf : 1; /**< 0x2f / 47: Parity flag (X86_EFL_PF / 2). */ 421 uint64_t fEflAf : 1; /**< 0x20 / 48: Auxilary carry flag (X86_EFL_AF / 4). */ 422 uint64_t fEflZf : 1; /**< 0x31 / 49: Zero flag (X86_EFL_ZF / 6). */ 423 uint64_t fEflSf : 1; /**< 0x32 / 50: Signed flag (X86_EFL_SF / 7). */ 424 uint64_t fEflOf : 1; /**< 0x33 / 51: Overflow flag (X86_EFL_OF / 12). */ 425 uint64_t uUnused : 12; /* 0x34 / 52 -> 0x40/64 */ 418 uint64_t fXcr0 : 1; /**< 0x2d / 45: */ 419 uint64_t fEflOther : 1; /**< 0x2e / 46: Other EFLAGS bits (~X86_EFL_STATUS_BITS & X86_EFL_LIVE_MASK). First! */ 420 uint64_t fEflCf : 1; /**< 0x2f / 47: Carry flag (X86_EFL_CF / 0). */ 421 uint64_t fEflPf : 1; /**< 0x30 / 48: Parity flag (X86_EFL_PF / 2). */ 422 uint64_t fEflAf : 1; /**< 0x31 / 49: Auxilary carry flag (X86_EFL_AF / 4). */ 423 uint64_t fEflZf : 1; /**< 0x32 / 50: Zero flag (X86_EFL_ZF / 6). */ 424 uint64_t fEflSf : 1; /**< 0x33 / 51: Signed flag (X86_EFL_SF / 7). */ 425 uint64_t fEflOf : 1; /**< 0x34 / 52: Overflow flag (X86_EFL_OF / 12). */ 426 uint64_t uUnused : 11; /* 0x35 / 53 -> 0x40/64 */ 426 427 }; 427 428 } IEMLIVENESSBIT; … … 495 496 #endif 496 497 497 #define IEMLIVENESSBIT_ALL_EFL_MASK UINT64_C(0x00 0fe00000000000)498 #define IEMLIVENESSBIT_ALL_EFL_MASK UINT64_C(0x001fc00000000000) 498 499 499 500 #ifndef IEMLIVENESS_EXTENDED_LAYOUT … … 714 715 kIemNativeGstReg_SegSelLast = kIemNativeGstReg_SegSelFirst + 5, 715 716 kIemNativeGstReg_Cr4, 717 kIemNativeGstReg_Xcr0, 716 718 kIemNativeGstReg_EFlags, /**< 32-bit, includes internal flags - last! */ 717 719 kIemNativeGstReg_End
Note:
See TracChangeset
for help on using the changeset viewer.

