Changeset 105732 in vbox
- Timestamp:
- Aug 19, 2024 5:01:37 PM (6 weeks ago)
- Location:
- trunk
- Files:
-
- 3 edited
-
include/VBox/vmm/cpum-armv8.h (modified) (1 diff)
-
include/iprt/armv8.h (modified) (1 diff)
-
src/VBox/VMM/VMMAll/CPUMAllRegs-armv8.cpp (modified) (1 diff)
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/vmm/cpum-armv8.h
r101121 r105732 858 858 VMMDECL(bool) CPUMGetGuestIrqMasked(PVMCPUCC pVCpu); 859 859 VMMDECL(bool) CPUMGetGuestFiqMasked(PVMCPUCC pVCpu); 860 VMM_INT_DECL(uint8_t) CPUMGetGuestEL(PVMCPUCC pVCpu); 861 VMM_INT_DECL(bool) CPUMGetGuestMmuEnabled(PVMCPUCC pVCpu); 860 862 VMMDECL(VBOXSTRICTRC) CPUMQueryGuestSysReg(PVMCPUCC pVCpu, uint32_t idSysReg, uint64_t *puValue); 861 863 /** @} */ -
trunk/include/iprt/armv8.h
r105685 r105732 2666 2666 2667 2667 2668 2669 /** @name SCTLR_EL1 - AArch64 System Control Register (EL1). 2670 * @{ */ 2671 /** Bit 0 - MMU enable for EL1 and EL0 stage 1 address translation. */ 2672 #define ARMV8_SCTLR_EL1_M RT_BIT_64(0) 2673 /** Bit 1 - Alignment check enable for EL1 and EL0. */ 2674 #define ARMV8_SCTLR_EL1_A RT_BIT_64(1) 2675 /** Bit 2 - Stage 1 cacheability control, for data accesses. */ 2676 #define ARMV8_SCTLR_EL1_C RT_BIT_64(2) 2677 /** Bit 3 - SP alignment check enable. */ 2678 #define ARMV8_SCTLR_EL1_SA RT_BIT_64(3) 2679 /** Bit 4 - SP alignment check enable for EL0. */ 2680 #define ARMV8_SCTLR_EL1_SA0 RT_BIT_64(4) 2681 /** Bit 5 - System instruction memory barrier enable from AArch32 EL0. */ 2682 #define ARMV8_SCTLR_EL1_CP15BEN RT_BIT_64(5) 2683 /** Bit 6 - Non-aligned access enable. */ 2684 #define ARMV8_SCTLR_EL1_nAA RT_BIT_64(6) 2685 /** Bit 7 - IT disable, disables some uses of IT instructions at EL0 using AArch32. */ 2686 #define ARMV8_SCTLR_EL1_ITD RT_BIT_64(7) 2687 /** Bit 8 - SETEND instruction disable, disables SETEND instructions at EL0 using AArch32. */ 2688 #define ARMV8_SCTLR_EL1_SED RT_BIT_64(8) 2689 /** Bit 9 - User Mask Access. Traps EL0 execution of MSR and MRS instructions that access the PSTATE.{D,A,I,F} masks to EL1. */ 2690 #define ARMV8_SCTLR_EL1_UMA RT_BIT_64(9) 2691 /** Bit 10 - Enable EL0 acccess to the CFP*, DVP* and CPP* instructions if FEAT_SPECRES is supported. */ 2692 #define ARMV8_SCTLR_EL1_EnRCTX RT_BIT_64(10) 2693 /** Bit 11 - Exception Exit is Context Synchronizing (FEAT_ExS required). */ 2694 #define ARMV8_SCTLR_EL1_EOS RT_BIT_64(11) 2695 /** Bit 12 - Stage 1 instruction access cacheability control, for access at EL0 and EL1. */ 2696 #define ARMV8_SCTLR_EL1_I RT_BIT_64(12) 2697 /** @todo Finish (lazy developer). */ 2698 /** @} */ 2699 2700 2701 /** @name SCTLR_EL2 - AArch64 System Control Register (EL2) - 32-bit. 2702 * @{ */ 2703 /** Bit 0 - MMU enable for EL2. */ 2704 #define ARMV8_SCTLR_EL2_M RT_BIT_64(0) 2705 /** Bit 1 - Alignment check enable. */ 2706 #define ARMV8_SCTLR_EL2_A RT_BIT_64(1) 2707 /** Bit 2 - Global enable for data and unified caches. */ 2708 #define ARMV8_SCTLR_EL2_C RT_BIT_64(2) 2709 /** Bit 3 - SP alignment check enable. */ 2710 #define ARMV8_SCTLR_EL2_SA RT_BIT_64(3) 2711 /* Bit 4 - 11 - Reserved. */ 2712 /** Bit 12 - Instruction cache enable. */ 2713 #define ARMV8_SCTLR_EL2_I RT_BIT_64(12) 2714 /* Bit 13 - 18 - Reserved. */ 2715 /** Bit 19 - Force treatment of all memory regions with write permissions as XN. */ 2716 #define ARMV8_SCTLR_EL2_WXN RT_BIT_64(19) 2717 /* Bit 20 - 24 - Reserved. */ 2718 /** Bit 25 - Exception endianess - set means big endian, clear little endian. */ 2719 #define ARMV8_SCTLR_EL2_EE RT_BIT_64(25) 2720 /* Bit 26 - 31 - Reserved. */ 2721 /** @} */ 2722 2723 2668 2724 #if (!defined(VBOX_FOR_DTRACE_LIB) && defined(__cplusplus) && !defined(ARMV8_WITHOUT_MK_INSTR)) || defined(DOXYGEN_RUNNING) 2669 2725 /** @defgroup grp_rt_armv8_mkinstr Instruction Encoding Helpers -
trunk/src/VBox/VMM/VMMAll/CPUMAllRegs-armv8.cpp
r101221 r105732 310 310 * Get the current exception level of the guest. 311 311 * 312 * @returns EL 313 * @param pVCpu The cross context virtual CPU structure of the calling EMT. 314 */ 315 VMMDECL(uint32_t) CPUMGetGuestEL(PVMCPU pVCpu) 316 { 317 RT_NOREF(pVCpu); 318 AssertReleaseFailed(); 319 return 0; 312 * @returns Exception Level 0 - 3 313 * @param pVCpu The cross context virtual CPU structure of the calling EMT. 314 */ 315 VMM_INT_DECL(uint8_t) CPUMGetGuestEL(PVMCPU pVCpu) 316 { 317 CPUM_INT_ASSERT_NOT_EXTRN(pVCpu, CPUMCTX_EXTRN_PSTATE); 318 return ARMV8_SPSR_EL2_AARCH64_GET_EL(pVCpu->cpum.s.Guest.fPState); 319 } 320 321 322 /** 323 * Returns whether the guest has the MMU enabled for address translation. 324 * 325 * @returns true if address translation is enabled, false if not. 326 */ 327 VMM_INT_DECL(bool) CPUMGetGuestMmuEnabled(PVMCPUCC pVCpu) 328 { 329 CPUM_INT_ASSERT_NOT_EXTRN(pVCpu, CPUMCTX_EXTRN_PSTATE | CPUMCTX_EXTRN_SCTLR_TCR_TTBR); 330 uint8_t bEl = ARMV8_SPSR_EL2_AARCH64_GET_EL(pVCpu->cpum.s.Guest.fPState); 331 if (bEl == ARMV8_AARCH64_EL_2) 332 { 333 CPUM_INT_ASSERT_NOT_EXTRN(pVCpu, CPUMCTX_EXTRN_SYSREG_EL2); 334 return RT_BOOL(pVCpu->cpum.s.Guest.SctlrEl2.u64 & ARMV8_SCTLR_EL2_M); 335 } 336 337 Assert(bEl == ARMV8_AARCH64_EL_0 || bEl == ARMV8_AARCH64_EL_1); 338 return RT_BOOL(pVCpu->cpum.s.Guest.Sctlr.u64 & ARMV8_SCTLR_EL2_M); 320 339 } 321 340
Note:
See TracChangeset
for help on using the changeset viewer.

