Changeset 104938 in vbox
- Timestamp:
- Jun 15, 2024 11:01:57 AM (3 months ago)
- File:
-
- 1 edited
-
trunk/src/VBox/VMM/VMMAll/PGMAllGst.h (modified) (15 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/PGMAllGst.h
r104937 r104938 578 578 * This is ignored when @a a_fSetFlags is @c false. 579 579 * @param pWalk The page walk info. 580 * @param pGstWalk The guest mode specific page walk info.581 580 * @tparam a_enmGuestSlatMode The SLAT mode of the function. 582 581 * @tparam a_fSetFlags Whether to process @a fFlags and set accessed … … 585 584 */ 586 585 template<PGMSLAT const a_enmGuestSlatMode = PGMSLAT_DIRECT, bool const a_fSetFlags = false> 587 DECLINLINE(int) PGM_GST_NAME(WalkFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk , PGSTPTWALK pGstWalk)586 DECLINLINE(int) PGM_GST_NAME(WalkFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk) 588 587 { 589 588 int rc; … … 592 591 * Init the walking structures. 593 592 */ 594 RT_ZERO(*pGstWalk);595 593 pWalk->GCPtr = GCPtr; 596 594 pWalk->GCPhys = 0; … … 611 609 # endif 612 610 611 GSTPTWALK GstWalk = {0}; 613 612 uint64_t fEffective; 614 613 { … … 617 616 * The PML4 table. 618 617 */ 619 rc = pgmGstGetLongModePML4PtrEx(pVCpu, & pGstWalk->pPml4);618 rc = pgmGstGetLongModePML4PtrEx(pVCpu, &GstWalk.pPml4); 620 619 if (RT_SUCCESS(rc)) { /* probable */ } 621 620 else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 4, rc); 622 621 623 622 PX86PML4E pPml4e; 624 pGstWalk->pPml4e = pPml4e = &pGstWalk->pPml4->a[(GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK];623 GstWalk.pPml4e = pPml4e = &GstWalk.pPml4->a[(GCPtr >> X86_PML4_SHIFT) & X86_PML4_MASK]; 625 624 X86PML4E Pml4e; 626 pGstWalk->Pml4e.u = Pml4e.u = ASMAtomicUoReadU64(&pPml4e->u);625 GstWalk.Pml4e.u = Pml4e.u = ASMAtomicUoReadU64(&pPml4e->u); 627 626 628 627 if (GST_IS_PGENTRY_PRESENT(pVCpu, Pml4e)) { /* probable */ } … … 642 641 RTGCPHYS GCPhysPdpt = Pml4e.u & X86_PML4E_PG_MASK; 643 642 PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPdpt, false /*a_fFinal*/, GCPhysPdpt, pWalk); 644 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPdpt, (void **)& pGstWalk->pPdpt);643 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPdpt, (void **)&GstWalk.pPdpt); 645 644 if (RT_SUCCESS(rc)) { /* probable */ } 646 645 else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 3, rc); 647 646 648 647 # elif PGM_GST_TYPE == PGM_TYPE_PAE 649 rc = pgmGstGetPaePDPTPtrEx(pVCpu, & pGstWalk->pPdpt);648 rc = pgmGstGetPaePDPTPtrEx(pVCpu, &GstWalk.pPdpt); 650 649 if (RT_SUCCESS(rc)) { /* probable */ } 651 650 else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 8, rc); … … 655 654 # if PGM_GST_TYPE == PGM_TYPE_AMD64 || PGM_GST_TYPE == PGM_TYPE_PAE 656 655 PX86PDPE pPdpe; 657 pGstWalk->pPdpe = pPdpe = &pGstWalk->pPdpt->a[(GCPtr >> GST_PDPT_SHIFT) & GST_PDPT_MASK];656 GstWalk.pPdpe = pPdpe = &GstWalk.pPdpt->a[(GCPtr >> GST_PDPT_SHIFT) & GST_PDPT_MASK]; 658 657 X86PDPE Pdpe; 659 pGstWalk->Pdpe.u = Pdpe.u = ASMAtomicUoReadU64(&pPdpe->u);658 GstWalk.Pdpe.u = Pdpe.u = ASMAtomicUoReadU64(&pPdpe->u); 660 659 661 660 if (GST_IS_PGENTRY_PRESENT(pVCpu, Pdpe)) { /* probable */ } … … 686 685 RTGCPHYS GCPhysPd = Pdpe.u & X86_PDPE_PG_MASK; 687 686 PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPd, false /*a_fFinal*/, GCPhysPd, pWalk); 688 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPd, (void **)& pGstWalk->pPd);687 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPd, (void **)&GstWalk.pPd); 689 688 if (RT_SUCCESS(rc)) { /* probable */ } 690 689 else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 2, rc); 691 690 692 691 # elif PGM_GST_TYPE == PGM_TYPE_32BIT 693 rc = pgmGstGet32bitPDPtrEx(pVCpu, & pGstWalk->pPd);692 rc = pgmGstGet32bitPDPtrEx(pVCpu, &GstWalk.pPd); 694 693 if (RT_SUCCESS(rc)) { /* probable */ } 695 694 else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 8, rc); … … 698 697 { 699 698 PGSTPDE pPde; 700 pGstWalk->pPde = pPde = &pGstWalk->pPd->a[(GCPtr >> GST_PD_SHIFT) & GST_PD_MASK];699 GstWalk.pPde = pPde = &GstWalk.pPd->a[(GCPtr >> GST_PD_SHIFT) & GST_PD_MASK]; 701 700 GSTPDE Pde; 702 701 # if PGM_GST_TYPE != PGM_TYPE_32BIT 703 pGstWalk->Pde.u = Pde.u = ASMAtomicUoReadU64(&pPde->u);702 GstWalk.Pde.u = Pde.u = ASMAtomicUoReadU64(&pPde->u); 704 703 # else 705 pGstWalk->Pde.u = Pde.u = ASMAtomicUoReadU32(&pPde->u);704 GstWalk.Pde.u = Pde.u = ASMAtomicUoReadU32(&pPde->u); 706 705 # endif 707 706 if (GST_IS_PGENTRY_PRESENT(pVCpu, Pde)) { /* probable */ } … … 790 789 RTGCPHYS GCPhysPt = GST_GET_PDE_GCPHYS(Pde); 791 790 PGM_GST_SLAT_WALK_FAST(pVCpu, GCPtr, GCPhysPt, false /*a_fFinal*/, GCPhysPt, pWalk); 792 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)& pGstWalk->pPt);791 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&GstWalk.pPt); 793 792 if (RT_SUCCESS(rc)) { /* probable */ } 794 793 else return PGM_GST_NAME(WalkFastReturnBadPhysAddr)(pVCpu, pWalk, 1, rc); … … 796 795 { 797 796 PGSTPTE pPte; 798 pGstWalk->pPte = pPte = &pGstWalk->pPt->a[(GCPtr >> GST_PT_SHIFT) & GST_PT_MASK];797 GstWalk.pPte = pPte = &GstWalk.pPt->a[(GCPtr >> GST_PT_SHIFT) & GST_PT_MASK]; 799 798 GSTPTE Pte; 800 799 # if PGM_GST_TYPE != PGM_TYPE_32BIT 801 pGstWalk->Pte.u = Pte.u = ASMAtomicUoReadU64(&pPte->u);800 GstWalk.Pte.u = Pte.u = ASMAtomicUoReadU64(&pPte->u); 802 801 # else 803 pGstWalk->Pte.u = Pte.u = ASMAtomicUoReadU32(&pPte->u);802 GstWalk.Pte.u = Pte.u = ASMAtomicUoReadU32(&pPte->u); 804 803 # endif 805 804 … … 932 931 || PGM_GST_TYPE == PGM_TYPE_AMD64 933 932 934 GSTPTWALK GstWalk;935 933 int rc; 936 934 # if defined(VBOX_WITH_NESTED_HWVIRT_VMX_EPT) || defined(VBOX_WITH_NESTED_HWVIRT_SVM_XXX) … … 940 938 # endif 941 939 if (fFlags) 942 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, true>(pVCpu, GCPtr, fFlags, pWalk , &GstWalk);940 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, true>(pVCpu, GCPtr, fFlags, pWalk); 943 941 else 944 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, false>(pVCpu, GCPtr, 0, pWalk , &GstWalk);942 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_DIRECT, false>(pVCpu, GCPtr, 0, pWalk); 945 943 # if defined(VBOX_WITH_NESTED_HWVIRT_VMX_EPT) || defined(VBOX_WITH_NESTED_HWVIRT_SVM_XXX) 946 944 break; … … 948 946 case PGMSLAT_EPT: 949 947 if (fFlags) 950 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, true>(pVCpu, GCPtr, fFlags, pWalk , &GstWalk);948 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, true>(pVCpu, GCPtr, fFlags, pWalk); 951 949 else 952 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, false>(pVCpu, GCPtr, 0, pWalk , &GstWalk);950 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_EPT, false>(pVCpu, GCPtr, 0, pWalk); 953 951 break; 954 952 # endif … … 956 954 case PGMSLAT_32BIT: 957 955 if (fFlags) 958 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, true>(pVCpu, GCPtr, fFlags, pWalk , &GstWalk);956 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, true>(pVCpu, GCPtr, fFlags, pWalk); 959 957 else 960 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, false>(pVCpu, GCPtr, 0, pWalk , &GstWalk);958 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_32BIT, false>(pVCpu, GCPtr, 0, pWalk); 961 959 break; 962 960 case PGMSLAT_PAE: 963 961 if (fFlags) 964 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, true>(pVCpu, GCPtr, fFlags, pWalk , &GstWalk);962 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, true>(pVCpu, GCPtr, fFlags, pWalk); 965 963 else 966 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, false>(pVCpu, GCPtr, 0, pWalk , &GstWalk);964 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_PAE, false>(pVCpu, GCPtr, 0, pWalk); 967 965 break; 968 966 case PGMSLAT_AMD64: 969 967 if (fFlags) 970 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, true>(pVCpu, GCPtr, fFlags, pWalk , &GstWalk);968 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, true>(pVCpu, GCPtr, fFlags, pWalk); 971 969 else 972 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, false>(pVCpu, GCPtr, 0, pWalk , &GstWalk);970 rc = PGM_GST_NAME(WalkFast)<PGMSLAT_AMD64, false>(pVCpu, GCPtr, 0, pWalk); 973 971 break; 974 972 # endif
Note:
See TracChangeset
for help on using the changeset viewer.

