VirtualBox

Changeset 72891 in vbox


Ignore:
Timestamp:
Jul 4, 2018 4:34:18 PM (6 years ago)
Author:
vboxsync
Message:

EM,HM,IEM: Replaced EMInterpretCpuid with IEMExecDecodedCpuid. Set HM_CHANGED_GUEST_XXX more consistently after using IEMExecDecodedXxxx in VT-x code.

Location:
trunk
Files:
7 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/VBox/vmm/em.h

    r72882 r72891  
    316316VMM_INT_DECL(int)               EMInterpretIretV86ForPatm(PVM pVM, PVMCPU pVCpu, PCPUMCTXCORE pRegFrame);
    317317#endif
    318 VMM_INT_DECL(int)               EMInterpretCpuId(PVM pVM, PVMCPU pVCpu, PCPUMCTXCORE pRegFrame);
    319318VMM_INT_DECL(int)               EMInterpretRdpmc(PVM pVM, PVMCPU pVCpu, PCPUMCTXCORE pRegFrame);
    320319VMM_INT_DECL(VBOXSTRICTRC)      EMInterpretInvlpg(PVM pVM, PVMCPU pVCpu, PCPUMCTXCORE pRegFrame, RTGCPTR pAddrGC);
  • trunk/include/VBox/vmm/iem.h

    r72882 r72891  
    298298VMM_INT_DECL(VBOXSTRICTRC)  IEMExecDecodedInvlpg(PVMCPU pVCpu,  uint8_t cbInstr, RTGCPTR GCPtrPage);
    299299VMM_INT_DECL(VBOXSTRICTRC)  IEMExecDecodedInvpcid(PVMCPU pVCpu, uint8_t cbInstr, uint8_t uType, RTGCPTR GCPtrInvpcidDesc);
     300VMM_INT_DECL(VBOXSTRICTRC)  IEMExecDecodedCpuid(PVMCPU pVCpu, uint8_t cbInstr);
    300301VMM_INT_DECL(VBOXSTRICTRC)  IEMExecDecodedRdtsc(PVMCPU pVCpu, uint8_t cbInstr);
    301302VMM_INT_DECL(VBOXSTRICTRC)  IEMExecDecodedRdtscp(PVMCPU pVCpu, uint8_t cbInstr);
  • trunk/src/VBox/VMM/VMMAll/EMAll.cpp

    r72885 r72891  
    12871287
    12881288/**
    1289  * Interpret CPUID given the parameters in the CPU context.
    1290  *
    1291  * @returns VBox status code.
    1292  * @param   pVM         The cross context VM structure.
    1293  * @param   pVCpu       The cross context virtual CPU structure.
    1294  * @param   pRegFrame   The register frame.
    1295  *
    1296  */
    1297 VMM_INT_DECL(int) EMInterpretCpuId(PVM pVM, PVMCPU pVCpu, PCPUMCTXCORE pRegFrame)
    1298 {
    1299     Assert(pRegFrame == CPUMGetGuestCtxCore(pVCpu));
    1300     uint32_t iLeaf    = pRegFrame->eax;
    1301     uint32_t iSubLeaf = pRegFrame->ecx;
    1302     NOREF(pVM);
    1303 
    1304     /* cpuid clears the high dwords of the affected 64 bits registers. */
    1305     pRegFrame->rax = 0;
    1306     pRegFrame->rbx = 0;
    1307     pRegFrame->rcx = 0;
    1308     pRegFrame->rdx = 0;
    1309 
    1310     /* Note: operates the same in 64 and non-64 bits mode. */
    1311     CPUMGetGuestCpuId(pVCpu, iLeaf, iSubLeaf, &pRegFrame->eax, &pRegFrame->ebx, &pRegFrame->ecx, &pRegFrame->edx);
    1312     Log(("Emulate: CPUID %x/%x -> %08x %08x %08x %08x\n", iLeaf, iSubLeaf, pRegFrame->eax, pRegFrame->ebx, pRegFrame->ecx, pRegFrame->edx));
    1313     return VINF_SUCCESS;
    1314 }
    1315 
    1316 
    1317 /**
    13181289 * Interpret RDPMC.
    13191290 *
  • trunk/src/VBox/VMM/VMMAll/IEMAll.cpp

    r72882 r72891  
    1495114951
    1495214952
     14953
     14954/**
     14955 * Interface for HM and EM to emulate the CPUID instruction.
     14956 *
     14957 * @returns Strict VBox status code.
     14958 *
     14959 * @param   pVCpu               The cross context virtual CPU structure.
     14960 * @param   cbInstr             The instruction length in bytes.
     14961 *
     14962 * @remarks Not all of the state needs to be synced in, the usual pluss RAX and RCX.
     14963 */
     14964VMM_INT_DECL(VBOXSTRICTRC)  IEMExecDecodedCpuid(PVMCPU pVCpu, uint8_t cbInstr)
     14965{
     14966    IEMEXEC_ASSERT_INSTR_LEN_RETURN(cbInstr, 2);
     14967    IEM_CTX_ASSERT(pVCpu, IEM_CPUMCTX_EXTRN_EXEC_DECODED_NO_MEM_MASK | CPUMCTX_EXTRN_RAX | CPUMCTX_EXTRN_RCX);
     14968
     14969    iemInitExec(pVCpu, false /*fBypassHandlers*/);
     14970    VBOXSTRICTRC rcStrict = IEM_CIMPL_CALL_0(iemCImpl_cpuid);
     14971    Assert(!pVCpu->iem.s.cActiveMappings);
     14972    return iemUninitExecAndFiddleStatusAndMaybeReenter(pVCpu, rcStrict);
     14973}
     14974
     14975
     14976
    1495314977/**
    1495414978 * Interface for HM and EM to emulate the RDTSC instruction.
  • trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp.h

    r72882 r72891  
    68086808    /** @todo make CPUMGetGuestCpuId import any necessary MSR state. */
    68096809    IEM_CTX_IMPORT_RET(pVCpu, CPUMCTX_EXTRN_ALL_MSRS);
    6810     CPUMGetGuestCpuId(pVCpu, pVCpu->cpum.GstCtx.eax, pVCpu->cpum.GstCtx.ecx, &pVCpu->cpum.GstCtx.eax, &pVCpu->cpum.GstCtx.ebx, &pVCpu->cpum.GstCtx.ecx, &pVCpu->cpum.GstCtx.edx);
     6810    CPUMGetGuestCpuId(pVCpu, pVCpu->cpum.GstCtx.eax, pVCpu->cpum.GstCtx.ecx,
     6811                      &pVCpu->cpum.GstCtx.eax, &pVCpu->cpum.GstCtx.ebx, &pVCpu->cpum.GstCtx.ecx, &pVCpu->cpum.GstCtx.edx);
    68116812    pVCpu->cpum.GstCtx.rax &= UINT32_C(0xffffffff);
    68126813    pVCpu->cpum.GstCtx.rbx &= UINT32_C(0xffffffff);
    68136814    pVCpu->cpum.GstCtx.rcx &= UINT32_C(0xffffffff);
    68146815    pVCpu->cpum.GstCtx.rdx &= UINT32_C(0xffffffff);
     6816    pVCpu->cpum.GstCtx.fExtrn &= ~(CPUMCTX_EXTRN_RAX | CPUMCTX_EXTRN_RCX | CPUMCTX_EXTRN_RDX | CPUMCTX_EXTRN_RBX);
    68156817
    68166818    iemRegAddToRipAndClearRF(pVCpu, cbInstr);
  • trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp

    r72886 r72891  
    61466146
    61476147            default:
    6148             {
    6149                 AssertMsgFailed(("hmR0SvmExitCpuid: EMInterpretCpuId failed with %Rrc\n", rc));
    6150                 rc = VERR_SVM_IPE_2;
    6151                 break;
    6152             }
     6148                AssertMsgFailedBreakStmt(("Bogus enmRaise value: %d (%#x)\n", enmRaise, enmRaise), rc = VERR_SVM_IPE_2);
    61536149        }
    61546150    }
     
    62996295    HMSVM_VALIDATE_EXIT_HANDLER_PARAMS();
    63006296
    6301     HMSVM_CPUMCTX_IMPORT_STATE(pVCpu,   CPUMCTX_EXTRN_RIP
    6302                                       | CPUMCTX_EXTRN_CS);
     6297    HMSVM_CPUMCTX_IMPORT_STATE(pVCpu, IEM_CPUMCTX_EXTRN_EXEC_DECODED_NO_MEM_MASK | CPUMCTX_EXTRN_RAX | CPUMCTX_EXTRN_RCX);
    63036298    VBOXSTRICTRC rcStrict;
    63046299    PCEMEXITREC pExitRec = EMHistoryUpdateFlagsAndTypeAndPC(pVCpu,
     
    63076302    if (!pExitRec)
    63086303    {
    6309         PVM pVM = pVCpu->CTX_SUFF(pVM);
    6310         rcStrict = EMInterpretCpuId(pVM, pVCpu, CPUMCTX2CORE(pCtx));
    6311         if (RT_LIKELY(rcStrict == VINF_SUCCESS))
    6312         {
    6313             hmR0SvmAdvanceRipHwAssist(pVCpu, pCtx, 2);
    6314             HMSVM_CHECK_SINGLE_STEP(pVCpu, rcStrict);
    6315         }
    6316         else
    6317         {
    6318             AssertMsgFailed(("hmR0SvmExitCpuid: EMInterpretCpuId failed with %Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    6319             rcStrict = VERR_EM_INTERPRETER;
    6320         }
     6304        rcStrict = IEMExecDecodedCpuid(pVCpu, hmR0SvmGetInstrLengthHwAssist(pVCpu, pCtx, 2));
     6305        if (rcStrict == VINF_IEM_RAISED_XCPT)
     6306            rcStrict = VINF_SUCCESS;
     6307        HMSVM_CHECK_SINGLE_STEP(pVCpu, rcStrict);
    63216308    }
    63226309    else
  • trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp

    r72886 r72891  
    1124311243     */
    1124411244    int rc = hmR0VmxReadExitInstrLenVmcs(pVmxTransient);
    11245     rc    |= hmR0VmxImportGuestState(pVCpu,   CPUMCTX_EXTRN_RIP
    11246                                             | CPUMCTX_EXTRN_CS);
     11245    rc    |= hmR0VmxImportGuestState(pVCpu, IEM_CPUMCTX_EXTRN_EXEC_DECODED_NO_MEM_MASK | CPUMCTX_EXTRN_RAX | CPUMCTX_EXTRN_RCX);
    1124711246    AssertRCReturn(rc, rc);
    1124811247
     
    1125611255         * Regular CPUID instruction execution.
    1125711256         */
    11258         PVM pVM = pVCpu->CTX_SUFF(pVM);
    11259         rcStrict = EMInterpretCpuId(pVM, pVCpu, CPUMCTX2CORE(pMixedCtx));
    11260         if (RT_LIKELY(rcStrict == VINF_SUCCESS))
    11261         {
    11262             rcStrict = hmR0VmxAdvanceGuestRip(pVCpu, pMixedCtx, pVmxTransient);
    11263             Assert(pVmxTransient->cbInstr == 2);
    11264         }
    11265         else
    11266         {
    11267             AssertMsgFailed(("hmR0VmxExitCpuid: EMInterpretCpuId failed with %Rrc\n", rc));
    11268             rcStrict = VERR_EM_INTERPRETER;
     11257        rcStrict = IEMExecDecodedCpuid(pVCpu, pVmxTransient->cbInstr);
     11258        if (rcStrict == VINF_SUCCESS)
     11259            ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_RAX
     11260                                                     | HM_CHANGED_GUEST_RCX | HM_CHANGED_GUEST_RDX    | HM_CHANGED_GUEST_RBX);
     11261        else if (rcStrict == VINF_IEM_RAISED_XCPT)
     11262        {
     11263            ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
     11264            rcStrict = VINF_SUCCESS;
    1126911265        }
    1127011266    }
     
    1128811284              VBOXSTRICTRC_VAL(rcStrict), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip));
    1128911285    }
    11290     return VBOXSTRICTRC_TODO(rcStrict);
     11286    return rcStrict;
    1129111287}
    1129211288
     
    1132611322        if (pVCpu->hm.s.vmx.u32ProcCtls & VMX_VMCS_CTRL_PROC_EXEC_USE_TSC_OFFSETTING)
    1132711323            pVmxTransient->fUpdateTscOffsettingAndPreemptTimer = true;
    11328         ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged,   HM_CHANGED_GUEST_RIP
    11329                                                    | HM_CHANGED_GUEST_RFLAGS);
     11324        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS
     11325                                                 | HM_CHANGED_GUEST_RAX | HM_CHANGED_GUEST_RDX);
    1133011326    }
    1133111327    else if (rcStrict == VINF_IEM_RAISED_XCPT)
    1133211328    {
     11329        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
    1133311330        rcStrict = VINF_SUCCESS;
    11334         ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
    1133511331    }
    1133611332    return rcStrict;
     
    1135611352        if (pVCpu->hm.s.vmx.u32ProcCtls & VMX_VMCS_CTRL_PROC_EXEC_USE_TSC_OFFSETTING)
    1135711353            pVmxTransient->fUpdateTscOffsettingAndPreemptTimer = true;
    11358         ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged,   HM_CHANGED_GUEST_RIP
    11359                                                    | HM_CHANGED_GUEST_RFLAGS);
     11354        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS
     11355                                                 | HM_CHANGED_GUEST_RAX | HM_CHANGED_GUEST_RDX | HM_CHANGED_GUEST_RCX);
    1136011356    }
    1136111357    else if (rcStrict == VINF_IEM_RAISED_XCPT)
    1136211358    {
     11359        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
    1136311360        rcStrict = VINF_SUCCESS;
    11364         ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
    1136511361    }
    1136611362    return rcStrict;
     
    1187711873    VBOXSTRICTRC rcStrict = IEMExecDecodedRdmsr(pVCpu, pVmxTransient->cbInstr);
    1187811874    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitRdmsr);
    11879     AssertMsg(   rcStrict == VINF_SUCCESS
    11880               || rcStrict == VINF_CPUM_R3_MSR_READ
    11881               || rcStrict == VINF_IEM_RAISED_XCPT,
    11882               ("Unexpected IEMExecDecodedRdmsr status: %Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
     11875    if (rcStrict == VINF_SUCCESS)
     11876        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS
     11877                                                 | HM_CHANGED_GUEST_RAX | HM_CHANGED_GUEST_RDX);
     11878    else if (rcStrict == VINF_IEM_RAISED_XCPT)
     11879    {
     11880        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
     11881        rcStrict = VINF_SUCCESS;
     11882    }
     11883    else
     11884        AssertMsg(rcStrict == VINF_IEM_RAISED_XCPT, ("Unexpected IEMExecDecodedRdmsr status: %Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1188311885
    1188411886    return rcStrict;
     
    1190911911    if (rcStrict == VINF_SUCCESS)
    1191011912    {
     11913        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS);
     11914
    1191111915        /* If this is an X2APIC WRMSR access, update the APIC state as well. */
    1191211916        if (    idMsr == MSR_IA32_APICBASE
     
    1200312007#endif  /* VBOX_STRICT */
    1200412008    }
     12009    else if (rcStrict == VINF_IEM_RAISED_XCPT)
     12010    {
     12011        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
     12012        rcStrict = VINF_SUCCESS;
     12013    }
    1200512014    else
    12006         AssertMsg(   rcStrict == VINF_CPUM_R3_MSR_WRITE
    12007                   || rcStrict == VINF_IEM_RAISED_XCPT,
    12008                   ("Unexpected IEMExecDecodedWrmsr status: %Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
     12015        AssertMsg(rcStrict == VINF_IEM_RAISED_XCPT, ("Unexpected IEMExecDecodedWrmsr status: %Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1200912016
    1201012017    return rcStrict;
     
    1208212089                case 0:
    1208312090                {
    12084                     ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_CR0);
     12091                    ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged,
     12092                                     HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_CR0);
    1208512093                    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCR0Write);
    1208612094                    Log4(("CRX CR0 write rcStrict=%Rrc CR0=%#RX64\n", VBOXSTRICTRC_VAL(rcStrict), pMixedCtx->cr0));
     
    1209912107                    Assert(!pVM->hm.s.fNestedPaging || !CPUMIsGuestPagingEnabledEx(pMixedCtx) || pVCpu->hm.s.fUsingDebugLoop);
    1210012108                    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCR3Write);
    12101                     ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_CR3);
     12109                    ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged,
     12110                                     HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_CR3);
    1210212111                    Log4(("CRX CR3 write rcStrict=%Rrc CR3=%#RX64\n", VBOXSTRICTRC_VAL(rcStrict), pMixedCtx->cr3));
    1210312112                    break;
     
    1210712116                {
    1210812117                    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCR4Write);
    12109                     ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_CR4);
     12118                    ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged,
     12119                                     HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_CR4);
    1211012120                    Log4(("CRX CR4 write rc=%Rrc CR4=%#RX64 fLoadSaveGuestXcr0=%u\n", VBOXSTRICTRC_VAL(rcStrict),
    1211112121                          pMixedCtx->cr4, pVCpu->hm.s.fLoadSaveGuestXcr0));
     
    1211712127                    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCR8Write);
    1211812128                    Assert(!(pVCpu->hm.s.vmx.u32ProcCtls & VMX_VMCS_CTRL_PROC_EXEC_USE_TPR_SHADOW));
    12119                     ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_APIC_TPR);
     12129                    ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged,
     12130                                     HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_APIC_TPR);
    1212012131                    break;
    1212112132                }
     
    1215512166                  VBOXSTRICTRC_VAL(rcStrict)));
    1215612167            if (VMX_EXIT_QUAL_CRX_GENREG(uExitQualification) == X86_GREG_xSP)
    12157                 ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RSP);
     12168                ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_RSP);
     12169            else
     12170                ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS);
    1215812171            break;
    1215912172        }
     
    1216512178                      || rcStrict == VINF_IEM_RAISED_XCPT, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1216612179
    12167             ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_CR0);
     12180            ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_CR0);
    1216812181            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitClts);
    1216912182            Log4(("CRX CLTS rcStrict=%d\n", VBOXSTRICTRC_VAL(rcStrict)));
     
    1218012193                      ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1218112194
    12182             ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_CR0);
     12195            ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS | HM_CHANGED_GUEST_CR0);
    1218312196            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitLmsw);
    1218412197            Log4(("CRX LMSW rcStrict=%d\n", VBOXSTRICTRC_VAL(rcStrict)));
     
    1219112204    }
    1219212205
    12193     ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, rcStrict != VINF_IEM_RAISED_XCPT ? HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS
    12194                                                                                 : HM_CHANGED_XCPT_RAISED_MASK);
     12206    Assert(   (pVCpu->hm.s.fCtxChanged & (HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS))
     12207           == (HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS));
     12208    if (rcStrict == VINF_IEM_RAISED_XCPT)
     12209    {
     12210        ASMAtomicUoOrU64(&pVCpu->hm.s.fCtxChanged, HM_CHANGED_XCPT_RAISED_MASK);
     12211        rcStrict = VINF_SUCCESS;
     12212    }
     12213
    1219512214    STAM_PROFILE_ADV_STOP(&pVCpu->hm.s.StatExitMovCRx, y2);
    1219612215    NOREF(pVM);
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette