VirtualBox

Changeset 104938 in vbox


Ignore:
Timestamp:
Jun 15, 2024 11:01:57 AM (3 months ago)
Author:
vboxsync
Message:

VMM/PGM: Elimintate the GstWalk parameter to PGM_GST_NAME(WalkFast), converting it into a local instead. bugref:10687

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/PGMAllGst.h

    r104937 r104938  
    578578 *                              This is ignored when @a a_fSetFlags is @c false.
    579579 * @param   pWalk               The page walk info.
    580  * @param   pGstWalk            The guest mode specific page walk info.
    581580 * @tparam  a_enmGuestSlatMode  The SLAT mode of the function.
    582581 * @tparam  a_fSetFlags         Whether to process @a fFlags and set accessed
     
    585584 */
    586585template<PGMSLAT const a_enmGuestSlatMode = PGMSLAT_DIRECT, bool const a_fSetFlags = false>
    587 DECLINLINE(int) PGM_GST_NAME(WalkFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk, PGSTPTWALK pGstWalk)
     586DECLINLINE(int) PGM_GST_NAME(WalkFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
    588587{
    589588    int rc;
     
    592591     * Init the walking structures.
    593592     */
    594     RT_ZERO(*pGstWalk);
    595593    pWalk->GCPtr        = GCPtr;
    596594    pWalk->GCPhys       = 0;
     
    611609# endif
    612610
     611    GSTPTWALK GstWalk = {0};
    613612    uint64_t fEffective;
    614613    {
     
    617616         * The PML4 table.
    618617         */
    619         rc = pgmGstGetLongModePML4PtrEx(pVCpu, &pGstWalk->pPml4);
     618        rc = pgmGstGetLongModePML4PtrEx(pVCpu, &GstWalk.pPml4);
    620619        if (RT_SUCCESS(rc)) { /* probable */ }
    621620        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 4, rc);
    622621
    623622        PX86PML4E pPml4e;
    624         pGstWalk->pPml4e  = pPml4e  = &pGstWalk->pPml4->a[(GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK];
     623        GstWalk.pPml4e  = pPml4e  = &GstWalk.pPml4->a[(GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK];
    625624        X86PML4E  Pml4e;
    626         pGstWalk->Pml4e.u = Pml4e.u = ASMAtomicUoReadU64(&pPml4e->u);
     625        GstWalk.Pml4e.u = Pml4e.u = ASMAtomicUoReadU64(&pPml4e->u);
    627626
    628627        if (GST_IS_PGENTRY_PRESENT(pVCpu, Pml4e)) { /* probable */ }
     
    642641        RTGCPHYS GCPhysPdpt = Pml4e.u & X86_PML4E_PG_MASK;
    643642        PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPdpt, false /*a_fFinal*/, GCPhysPdpt, pWalk);
    644         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPdpt, (void **)&pGstWalk->pPdpt);
     643        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPdpt, (void **)&GstWalk.pPdpt);
    645644        if (RT_SUCCESS(rc)) { /* probable */ }
    646645        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 3, rc);
    647646
    648647# elif PGM_GST_TYPE == PGM_TYPE_PAE
    649         rc = pgmGstGetPaePDPTPtrEx(pVCpu, &pGstWalk->pPdpt);
     648        rc = pgmGstGetPaePDPTPtrEx(pVCpu, &GstWalk.pPdpt);
    650649        if (RT_SUCCESS(rc)) { /* probable */ }
    651650        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 8, rc);
     
    655654# if PGM_GST_TYPE == PGM_TYPE_AMD64 || PGM_GST_TYPE == PGM_TYPE_PAE
    656655        PX86PDPE pPdpe;
    657         pGstWalk->pPdpe  = pPdpe  = &pGstWalk->pPdpt->a[(GCPtr >> GST_PDPT_SHIFT) & GST_PDPT_MASK];
     656        GstWalk.pPdpe  = pPdpe  = &GstWalk.pPdpt->a[(GCPtr >> GST_PDPT_SHIFT) & GST_PDPT_MASK];
    658657        X86PDPE  Pdpe;
    659         pGstWalk->Pdpe.u = Pdpe.u = ASMAtomicUoReadU64(&pPdpe->u);
     658        GstWalk.Pdpe.u = Pdpe.u = ASMAtomicUoReadU64(&pPdpe->u);
    660659
    661660        if (GST_IS_PGENTRY_PRESENT(pVCpu, Pdpe)) { /* probable */ }
     
    686685        RTGCPHYS GCPhysPd = Pdpe.u & X86_PDPE_PG_MASK;
    687686        PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPd, false /*a_fFinal*/, GCPhysPd, pWalk);
    688         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPd, (void **)&pGstWalk->pPd);
     687        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPd, (void **)&GstWalk.pPd);
    689688        if (RT_SUCCESS(rc)) { /* probable */ }
    690689        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 2, rc);
    691690
    692691# elif PGM_GST_TYPE == PGM_TYPE_32BIT
    693         rc = pgmGstGet32bitPDPtrEx(pVCpu, &pGstWalk->pPd);
     692        rc = pgmGstGet32bitPDPtrEx(pVCpu, &GstWalk.pPd);
    694693        if (RT_SUCCESS(rc)) { /* probable */ }
    695694        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 8, rc);
     
    698697    {
    699698        PGSTPDE pPde;
    700         pGstWalk->pPde  = pPde  = &pGstWalk->pPd->a[(GCPtr >> GST_PD_SHIFT) & GST_PD_MASK];
     699        GstWalk.pPde  = pPde  = &GstWalk.pPd->a[(GCPtr >> GST_PD_SHIFT) & GST_PD_MASK];
    701700        GSTPDE  Pde;
    702701# if PGM_GST_TYPE != PGM_TYPE_32BIT
    703         pGstWalk->Pde.u = Pde.u = ASMAtomicUoReadU64(&pPde->u);
     702        GstWalk.Pde.u = Pde.u = ASMAtomicUoReadU64(&pPde->u);
    704703# else
    705         pGstWalk->Pde.u = Pde.u = ASMAtomicUoReadU32(&pPde->u);
     704        GstWalk.Pde.u = Pde.u = ASMAtomicUoReadU32(&pPde->u);
    706705# endif
    707706        if (GST_IS_PGENTRY_PRESENT(pVCpu, Pde)) { /* probable */ }
     
    790789        RTGCPHYS GCPhysPt = GST_GET_PDE_GCPHYS(Pde);
    791790        PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPt, false /*a_fFinal*/, GCPhysPt, pWalk);
    792         rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&pGstWalk->pPt);
     791        rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&GstWalk.pPt);
    793792        if (RT_SUCCESS(rc)) { /* probable */ }
    794793        else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 1, rc);
     
    796795    {
    797796        PGSTPTE pPte;
    798         pGstWalk->pPte  = pPte  = &pGstWalk->pPt->a[(GCPtr >> GST_PT_SHIFT) & GST_PT_MASK];
     797        GstWalk.pPte  = pPte  = &GstWalk.pPt->a[(GCPtr >> GST_PT_SHIFT) & GST_PT_MASK];
    799798        GSTPTE  Pte;
    800799# if PGM_GST_TYPE != PGM_TYPE_32BIT
    801         pGstWalk->Pte.u = Pte.u = ASMAtomicUoReadU64(&pPte->u);
     800        GstWalk.Pte.u = Pte.u = ASMAtomicUoReadU64(&pPte->u);
    802801# else
    803         pGstWalk->Pte.u = Pte.u = ASMAtomicUoReadU32(&pPte->u);
     802        GstWalk.Pte.u = Pte.u = ASMAtomicUoReadU32(&pPte->u);
    804803# endif
    805804
     
    932931   || PGM_GST_TYPE == PGM_TYPE_AMD64
    933932
    934     GSTPTWALK GstWalk;
    935933    int rc;
    936934# if defined(VBOX_WITH_NESTED_HWVIRT_VMX_EPT) || defined(VBOX_WITH_NESTED_HWVIRT_SVM_XXX)
     
    940938# endif
    941939            if (fFlags)
    942                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, true>(pVCpu, GCPtr, fFlags, pWalk, &GstWalk);
     940                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, true>(pVCpu, GCPtr, fFlags, pWalk);
    943941            else
    944                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, false>(pVCpu, GCPtr, 0, pWalk, &GstWalk);
     942                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, false>(pVCpu, GCPtr, 0, pWalk);
    945943# if defined(VBOX_WITH_NESTED_HWVIRT_VMX_EPT) || defined(VBOX_WITH_NESTED_HWVIRT_SVM_XXX)
    946944            break;
     
    948946        case PGMSLAT_EPT:
    949947            if (fFlags)
    950                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, true>(pVCpu, GCPtr, fFlags, pWalk, &GstWalk);
     948                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, true>(pVCpu, GCPtr, fFlags, pWalk);
    951949            else
    952                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, false>(pVCpu, GCPtr, 0, pWalk, &GstWalk);
     950                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, false>(pVCpu, GCPtr, 0, pWalk);
    953951            break;
    954952#  endif
     
    956954        case PGMSLAT_32BIT:
    957955            if (fFlags)
    958                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, true>(pVCpu, GCPtr, fFlags, pWalk, &GstWalk);
     956                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, true>(pVCpu, GCPtr, fFlags, pWalk);
    959957            else
    960                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, false>(pVCpu, GCPtr, 0, pWalk, &GstWalk);
     958                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, false>(pVCpu, GCPtr, 0, pWalk);
    961959            break;
    962960        case PGMSLAT_PAE:
    963961            if (fFlags)
    964                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, true>(pVCpu, GCPtr, fFlags, pWalk, &GstWalk);
     962                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, true>(pVCpu, GCPtr, fFlags, pWalk);
    965963            else
    966                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, false>(pVCpu, GCPtr, 0, pWalk, &GstWalk);
     964                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, false>(pVCpu, GCPtr, 0, pWalk);
    967965            break;
    968966        case PGMSLAT_AMD64:
    969967            if (fFlags)
    970                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, true>(pVCpu, GCPtr, fFlags, pWalk, &GstWalk);
     968                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, true>(pVCpu, GCPtr, fFlags, pWalk);
    971969            else
    972                 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, false>(pVCpu, GCPtr, 0, pWalk, &GstWalk);
     970                rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, false>(pVCpu, GCPtr, 0, pWalk);
    973971            break;
    974972#  endif
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette