VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h

Last change on this file was 109064, checked in by vboxsync, 9 days ago

VMM/PGMAllGst-armv8.cpp.h: Set fSucceeded correctly on error, bugref:10388

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 45.5 KB
Line 
1/* $Id: PGMAllGst-armv8.cpp.h 109064 2025-04-24 07:20:21Z vboxsync $ */
2/** @file
3 * PGM - Page Manager, ARMv8 Guest Paging Template - All context code.
4 */
5
6/*
7 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*
30 *
31 * Mode criteria:
32 * - MMU enabled/disabled.
33 * - TCR_EL1.TG0 (granule size for TTBR0_EL1).
34 * - TCR_EL1.TG1 (granule size for TTBR1_EL1).
35 * - TCR_EL1.T0SZ (address space size for TTBR0_EL1).
36 * - TCR_EL1.T1SZ (address space size for TTBR1_EL1).
37 * - TCR_EL1.IPS (intermediate physical address size).
38 * - TCR_EL1.TBI0 (ignore top address byte for TTBR0_EL1).
39 * - TCR_EL1.TBI1 (ignore top address byte for TTBR1_EL1).
40 * - TCR_EL1.HPD0 (hierarchical permisson disables for TTBR0_EL1).
41 * - TCR_EL1.HPD1 (hierarchical permisson disables for TTBR1_EL1).
42 * - More ?
43 *
44 * Other relevant modifiers:
45 * - TCR_EL1.HA - hardware access bit.
46 * - TCR_EL1.HD - hardware dirty bit.
47 * - ++
48 *
49 * Each privilege EL (1,2,3) has their own TCR_ELx and TTBR[01]_ELx registers,
50 * so they should all have their own separate modes. To make it simpler,
51 * why not do a separate mode for TTBR0_ELx and one for TTBR1_ELx. Top-level
52 * functions determins which of the roots to use and call template (C++)
53 * functions that takes it from there. Using the preprocessor function template
54 * approach is _not_ desirable here.
55 *
56 */
57
58
59/*
60 * Common helpers.
61 * Common helpers.
62 * Common helpers.
63 */
64
65DECLINLINE(int) pgmGstWalkReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
66{
67 NOREF(pVCpu);
68 pWalk->fSucceeded = false;
69 pWalk->fNotPresent = true;
70 pWalk->uLevel = uLevel;
71 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT
72 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
73 return VERR_PAGE_TABLE_NOT_PRESENT;
74}
75
76DECLINLINE(int) pgmGstWalkReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel, int rc)
77{
78 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); NOREF(rc); NOREF(pVCpu);
79 pWalk->fSucceeded = false;
80 pWalk->fBadPhysAddr = true;
81 pWalk->uLevel = uLevel;
82 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS
83 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
84 return VERR_PAGE_TABLE_NOT_PRESENT;
85}
86
87
88DECLINLINE(int) pgmGstWalkReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
89{
90 NOREF(pVCpu);
91 pWalk->fSucceeded = false;
92 pWalk->fRsvdError = true;
93 pWalk->uLevel = uLevel;
94 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS
95 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
96 return VERR_PAGE_TABLE_NOT_PRESENT;
97}
98
99
100DECLINLINE(int) pgmGstWalkFastReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
101{
102 RT_NOREF(pVCpu);
103 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
104 return VERR_PAGE_TABLE_NOT_PRESENT;
105}
106
107
108DECLINLINE(int) pgmGstWalkFastReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel, int rc)
109{
110 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); RT_NOREF(pVCpu, rc);
111 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
112 return VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS;
113}
114
115
116DECLINLINE(int) pgmGstWalkFastReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
117{
118 RT_NOREF(pVCpu);
119 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
120 return VERR_RESERVED_PAGE_TABLE_BITS;
121}
122
123
124/*
125 * Special no paging variant.
126 * Special no paging variant.
127 * Special no paging variant.
128 */
129
130static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
131{
132 RT_NOREF(pVCpu);
133
134 RT_ZERO(*pWalk);
135 pWalk->fSucceeded = true;
136 pWalk->GCPtr = GCPtr;
137 pWalk->GCPhys = GCPtr;
138 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
139 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
140 return VINF_SUCCESS;
141}
142
143
144static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
145{
146 RT_NOREF(pVCpu, fFlags);
147
148 pWalk->GCPtr = GCPtr;
149 pWalk->GCPhys = GCPtr;
150 pWalk->GCPhysNested = 0;
151 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
152 pWalk->fFailed = PGM_WALKFAIL_SUCCESS;
153 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
154 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
155 return VINF_SUCCESS;
156}
157
158
159static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
160{
161 /* Ignore. */
162 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
163 return VINF_SUCCESS;
164}
165
166
167static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
168{
169 RT_NOREF(pVCpu, GCPtr, pWalk);
170 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
171 return VERR_PGM_NOT_USED_IN_MODE;
172}
173
174
175static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneEnter)(PVMCPUCC pVCpu)
176{
177 /* Nothing to do. */
178 RT_NOREF(pVCpu);
179 return VINF_SUCCESS;
180}
181
182
183static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneExit)(PVMCPUCC pVCpu)
184{
185 /* Nothing to do. */
186 RT_NOREF(pVCpu);
187 return VINF_SUCCESS;
188}
189
190
191/*
192 * Template variants for actual paging modes.
193 * Template variants for actual paging modes.
194 * Template variants for actual paging modes.
195 */
196#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_MINUS_ONE 0
197#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO 1
198#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE 2
199#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO 3
200#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE 4
201#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID 5
202
203
204/*
205 * Descriptor flags to page table attribute flags mapping.
206 */
207static const PGMPTATTRS s_aEffective[] =
208{
209 /* UXN PXN AP[2] AP[1] */
210 /* 0 0 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
211 /* 0 0 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
212 /* 0 0 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
213 /* 0 0 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
214
215 /* 0 1 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UX_MASK,
216 /* 0 1 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK,
217 /* 0 1 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UX_MASK,
218 /* 0 1 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UX_MASK,
219
220 /* 1 0 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK,
221 /* 1 0 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_PX_MASK,
222 /* 1 0 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PX_MASK,
223 /* 1 0 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_PX_MASK,
224
225 /* 1 1 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK,
226 /* 1 1 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK ,
227 /* 1 1 1 0 */ PGM_PTATTRS_PR_MASK,
228 /* 1 1 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK,
229};
230
231
232DECL_FORCE_INLINE(int) pgmGstWalkWorkerSetEffective(PPGMPTWALK pWalk, ARMV8VMSA64DESC Desc)
233{
234 uint32_t const idxPerm = RT_BF_GET(Desc, ARMV8_VMSA64_DESC_PG_OR_BLOCK_LATTR_AP)
235 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN_BIT) << 2
236 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN_BIT) << 3;
237
238 pWalk->fEffective = s_aEffective[idxPerm];
239 return VINF_SUCCESS;
240}
241
242
243template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
244DECL_FORCE_INLINE(int) pgmGstWalkWorker(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
245{
246 RT_NOREF(pGstWalk); /** @todo */
247
248 /* This also applies to TG1 granule sizes, as both share the same encoding in TCR. */
249 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_INVALID == ARMV8_TCR_EL1_AARCH64_TG1_INVALID);
250 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_16KB == ARMV8_TCR_EL1_AARCH64_TG1_16KB);
251 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_4KB == ARMV8_TCR_EL1_AARCH64_TG1_4KB);
252 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_64KB == ARMV8_TCR_EL1_AARCH64_TG1_64KB);
253
254 if RT_CONSTEXPR_IF( a_GranuleSz != ARMV8_TCR_EL1_AARCH64_TG0_INVALID
255 && a_InitialLookupLvl != PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID)
256 {
257 uint64_t fLookupMaskFull;
258 RTGCPTR offPageMask;
259
260 RTGCPTR offLvl1BlockMask;
261 RTGCPTR offLvl2BlockMask;
262
263 uint64_t fNextTableOrPageMask;
264 uint8_t cLvl0Shift;
265 uint8_t cLvl1Shift;
266 uint8_t cLvl2Shift;
267 uint8_t cLvl3Shift;
268
269 RTGCPHYS fGCPhysLvl1BlockBase;
270 RTGCPHYS fGCPhysLvl2BlockBase;
271
272 /** @todo This needs to go into defines in armv8.h if final. */
273 if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
274 {
275 fLookupMaskFull = RT_BIT_64(9) - 1;
276 offLvl1BlockMask = (RTGCPTR)(_1G - 1);
277 offLvl2BlockMask = (RTGCPTR)(_2M - 1);
278 offPageMask = (RTGCPTR)(_4K - 1);
279 fNextTableOrPageMask = UINT64_C(0xfffffffff000);
280 cLvl0Shift = 39;
281 cLvl1Shift = 30;
282 cLvl2Shift = 21;
283 cLvl3Shift = 12;
284 fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
285 fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
286 }
287 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
288 {
289 fLookupMaskFull = RT_BIT_64(11) - 1;
290 offLvl1BlockMask = 0; /** @todo TCR_EL1.DS support. */
291 offLvl2BlockMask = (RTGCPTR)(_32M - 1);
292 offPageMask = (RTGCPTR)(_16K - 1);
293 fNextTableOrPageMask = UINT64_C(0xffffffffc000);
294 cLvl0Shift = 47;
295 cLvl1Shift = 36;
296 cLvl2Shift = 25;
297 cLvl3Shift = 14;
298 fGCPhysLvl1BlockBase = 0; /* Not supported. */
299 fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
300 }
301 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
302 {
303 Assert(a_InitialLookupLvl > 0);
304
305 fLookupMaskFull = RT_BIT_64(13) - 1;
306 offLvl1BlockMask = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
307 offLvl2BlockMask = (RTGCPTR)(_512M - 1);
308 offPageMask = (RTGCPTR)(_64K - 1);
309 fNextTableOrPageMask = UINT64_C(0xffffffff0000);
310 cLvl0Shift = 0; /* No Level 0 with 64KiB granules. */
311 cLvl1Shift = 42;
312 cLvl2Shift = 29;
313 cLvl3Shift = 16;
314 fGCPhysLvl1BlockBase = 0; /* Not supported. */
315 fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
316 }
317
318 pWalk->GCPtr = GCPtr;
319
320 /* Get the initial lookup mask. */
321 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
322 uint64_t fLookupMask;
323 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
324 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
325 else
326 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
327
328 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
329 PARMV8VMSA64DESC paDesc = NULL;
330 ARMV8VMSA64DESC Desc;
331 int rc;
332 if RT_CONSTEXPR_IF(a_InitialLookupLvl == PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO)
333 {
334 Assert(cLvl0Shift != 0);
335 uint8_t const uLvl = 0;
336
337 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
338 if (RT_SUCCESS(rc)) { /* probable */ }
339 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
340
341 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
342 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
343 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
344
345 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
346 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
347
348 /* Full lookup mask from now on. */
349 fLookupMask = fLookupMaskFull;
350 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
351 }
352
353 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE)
354 {
355 uint8_t const uLvl = 1;
356
357 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
358 if (RT_SUCCESS(rc)) { /* probable */ }
359 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
360
361 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
362 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
363 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
364
365 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
366 else
367 {
368 if (offLvl1BlockMask != 0)
369 {
370 /* Block descriptor. */
371 pWalk->fSucceeded = true;
372 pWalk->fGigantPage = true;
373 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
374 return pgmGstWalkWorkerSetEffective(pWalk, Desc);
375 }
376 else
377 return pgmGstWalkReturnRsvdError(pVCpu, pWalk, uLvl);
378 }
379
380 /* Full lookup mask from now on. */
381 fLookupMask = fLookupMaskFull;
382 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
383 }
384
385 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO)
386 {
387 uint8_t const uLvl = 2;
388
389 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
390 if (RT_SUCCESS(rc)) { /* probable */ }
391 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
392
393 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
394 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
395 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
396
397 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
398 else
399 {
400 /* Block descriptor. */
401 pWalk->fSucceeded = true;
402 pWalk->fBigPage = true;
403 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
404 return pgmGstWalkWorkerSetEffective(pWalk, Desc);
405 }
406
407 /* Full lookup mask from now on. */
408 fLookupMask = fLookupMaskFull;
409 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
410 }
411
412 AssertCompile(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE);
413 uint8_t const uLvl = 3;
414
415 /* Next level. */
416 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
417 if (RT_SUCCESS(rc)) { /* probable */ }
418 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
419
420 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
421 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
422 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
423
424 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
425 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
426
427 pWalk->fSucceeded = true;
428 pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
429 return pgmGstWalkWorkerSetEffective(pWalk, Desc);
430 }
431 else
432 AssertReleaseFailedReturn(VERR_PGM_MODE_IPE);
433}
434
435
436template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
437static PGM_CTX_DECL(int) PGM_CTX(pgm,GstGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
438{
439 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>(pVCpu, GCPtr, pWalk, NULL /*pGstWalk*/);
440}
441
442
443static const PGMWALKFAIL g_aPermPrivRead[] =
444{
445 /* UXN PXN AP[2] AP[1] */
446 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
447 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
448 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
449 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
450 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
451 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
452 /* 0 1 1 0 */ PGM_WALKFAIL_SUCCESS,
453 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
454 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
455 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
456 /* 1 0 1 0 */ PGM_WALKFAIL_SUCCESS,
457 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
458 /* 1 1 0 0 */ PGM_WALKFAIL_SUCCESS,
459 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
460 /* 1 1 1 0 */ PGM_WALKFAIL_SUCCESS,
461 /* 1 1 1 1 */ PGM_WALKFAIL_SUCCESS
462};
463
464
465static const PGMWALKFAIL g_aPermPrivWrite[] =
466{
467 /* UXN PXN AP[2] AP[1] */
468 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
469 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
470 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
471 /* 0 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
472 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
473 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
474 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
475 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
476 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
477 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
478 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
479 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
480 /* 1 1 0 0 */ PGM_WALKFAIL_SUCCESS,
481 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
482 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
483 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE
484};
485
486
487static const PGMWALKFAIL g_aPermPrivExec[] =
488{
489 /* UXN PXN AP[2] AP[1] */
490 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
491 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
492 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
493 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
494 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
495 /* 0 1 0 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
496 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
497 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
498 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
499 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
500 /* 1 0 1 0 */ PGM_WALKFAIL_SUCCESS,
501 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
502 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
503 /* 1 1 0 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
504 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
505 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_EXECUTABLE
506};
507
508
509static const PGMWALKFAIL g_aPermUnprivRead[] =
510{
511 /* UXN PXN AP[2] AP[1] */
512 /* 0 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
513 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
514 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
515 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
516 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
517 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
518 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
519 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
520 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
521 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
522 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
523 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
524 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
525 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
526 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
527 /* 1 1 1 1 */ PGM_WALKFAIL_SUCCESS
528};
529
530
531static const PGMWALKFAIL g_aPermUnprivWrite[] =
532{
533 /* UXN PXN AP[2] AP[1] */
534 /* 0 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
535 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
536 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
537 /* 0 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
538 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
539 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
540 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
541 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
542 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
543 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
544 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
545 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
546 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
547 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
548 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
549 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE
550};
551
552
553static const PGMWALKFAIL g_aPermUnprivExec[] =
554{
555 /* UXN PXN AP[2] AP[1] */
556 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
557 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
558 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
559 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
560 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
561 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
562 /* 0 1 1 0 */ PGM_WALKFAIL_SUCCESS,
563 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
564 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
565 /* 1 0 0 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
566 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
567 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
568 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
569 /* 1 1 0 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
570 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
571 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE
572};
573
574
575DECL_FORCE_INLINE(int) pgmGstQueryPageCheckPermissions(PPGMPTWALKFAST pWalk, ARMV8VMSA64DESC Desc, uint32_t fFlags, uint8_t uLvl)
576{
577 Assert(!(fFlags & ~PGMQPAGE_F_VALID_MASK));
578
579 static const uint32_t *s_apaPerm[] =
580 {
581 /* U X W R */
582 /* 0 0 0 0 */ &g_aPermPrivRead[0], /* Don't check or modify anything, this translates to a privileged read */
583 /* 0 0 0 1 */ &g_aPermPrivRead[0], /* Privileged read access */
584 /* 0 0 1 0 */ &g_aPermPrivWrite[0], /* Privileged write access */
585 /* 0 0 1 1 */ NULL, /* Invalid access flags */
586 /* 0 1 0 0 */ &g_aPermPrivExec[0], /* Privileged execute access */
587 /* 0 1 0 1 */ NULL, /* Invalid access flags */
588 /* 0 1 1 0 */ NULL, /* Invalid access flags */
589 /* 0 1 1 1 */ NULL, /* Invalid access flags */
590
591 /* 1 0 0 0 */ NULL, /* Invalid access flags */
592 /* 1 0 0 1 */ &g_aPermUnprivRead[0], /* Unprivileged read access */
593 /* 1 0 1 0 */ &g_aPermUnprivWrite[0], /* Unprivileged write access */
594 /* 1 0 1 1 */ NULL, /* Invalid access flags */
595 /* 1 1 0 0 */ &g_aPermUnprivExec[0], /* Unprivileged execute access */
596 /* 1 1 0 1 */ NULL, /* Invalid access flags */
597 /* 1 1 1 0 */ NULL, /* Invalid access flags */
598 /* 1 1 1 1 */ NULL, /* Invalid access flags */
599 };
600 Assert(fFlags < RT_ELEMENTS(s_apaPerm));
601
602 const uint32_t *paPerm = s_apaPerm[fFlags];
603 AssertReturn(paPerm, VERR_PGM_MODE_IPE);
604
605 uint32_t const idxPerm = RT_BF_GET(Desc, ARMV8_VMSA64_DESC_PG_OR_BLOCK_LATTR_AP)
606 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN_BIT) << 2
607 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN_BIT) << 3;
608
609 pWalk->fEffective = s_aEffective[idxPerm];
610
611 PGMWALKFAIL const fFailed = paPerm[idxPerm];
612 if (fFailed == PGM_WALKFAIL_SUCCESS)
613 {
614 pWalk->fInfo |= PGM_WALKINFO_SUCCEEDED;
615 return VINF_SUCCESS;
616 }
617
618 pWalk->fFailed = fFailed | (uLvl << PGM_WALKFAIL_LEVEL_SHIFT);
619 return VERR_ACCESS_DENIED;
620}
621
622
623template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
624static PGM_CTX_DECL(int) PGM_CTX(pgm,GstQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
625{
626 /* This also applies to TG1 granule sizes, as both share the same encoding in TCR. */
627 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_INVALID == ARMV8_TCR_EL1_AARCH64_TG1_INVALID);
628 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_16KB == ARMV8_TCR_EL1_AARCH64_TG1_16KB);
629 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_4KB == ARMV8_TCR_EL1_AARCH64_TG1_4KB);
630 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_64KB == ARMV8_TCR_EL1_AARCH64_TG1_64KB);
631
632 pWalk->GCPtr = GCPtr;
633
634 if RT_CONSTEXPR_IF( a_GranuleSz != ARMV8_TCR_EL1_AARCH64_TG0_INVALID
635 && a_InitialLookupLvl != PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID)
636 {
637 uint64_t fLookupMaskFull;
638 RTGCPTR offPageMask;
639
640 RTGCPTR offLvl1BlockMask;
641 RTGCPTR offLvl2BlockMask;
642
643 uint64_t fNextTableOrPageMask;
644 uint8_t cLvl0Shift;
645 uint8_t cLvl1Shift;
646 uint8_t cLvl2Shift;
647 uint8_t cLvl3Shift;
648
649 RTGCPHYS fGCPhysLvl1BlockBase;
650 RTGCPHYS fGCPhysLvl2BlockBase;
651
652 /** @todo This needs to go into defines in armv8.h if final. */
653 if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
654 {
655 fLookupMaskFull = RT_BIT_64(9) - 1;
656 offLvl1BlockMask = (RTGCPTR)(_1G - 1);
657 offLvl2BlockMask = (RTGCPTR)(_2M - 1);
658 offPageMask = (RTGCPTR)(_4K - 1);
659 fNextTableOrPageMask = UINT64_C(0xfffffffff000);
660 cLvl0Shift = 39;
661 cLvl1Shift = 30;
662 cLvl2Shift = 21;
663 cLvl3Shift = 12;
664 fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
665 fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
666 }
667 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
668 {
669 fLookupMaskFull = RT_BIT_64(11) - 1;
670 offLvl1BlockMask = 0; /** @todo TCR_EL1.DS support. */
671 offLvl2BlockMask = (RTGCPTR)(_32M - 1);
672 offPageMask = (RTGCPTR)(_16K - 1);
673 fNextTableOrPageMask = UINT64_C(0xffffffffc000);
674 cLvl0Shift = 47;
675 cLvl1Shift = 36;
676 cLvl2Shift = 25;
677 cLvl3Shift = 14;
678 fGCPhysLvl1BlockBase = 0; /* Not supported. */
679 fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
680 }
681 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
682 {
683 Assert(a_InitialLookupLvl > 0);
684
685 fLookupMaskFull = RT_BIT_64(13) - 1;
686 offLvl1BlockMask = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
687 offLvl2BlockMask = (RTGCPTR)(_512M - 1);
688 offPageMask = (RTGCPTR)(_64K - 1);
689 fNextTableOrPageMask = UINT64_C(0xffffffff0000);
690 cLvl0Shift = 0; /* No Level 0 with 64KiB granules. */
691 cLvl1Shift = 42;
692 cLvl2Shift = 29;
693 cLvl3Shift = 16;
694 fGCPhysLvl1BlockBase = 0; /* Not supported. */
695 fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
696 }
697
698 /* Get the initial lookup mask. */
699 uint8_t const bEl = (fFlags & PGMQPAGE_F_USER_MODE) ? 0 : 1; /** @todo EL2 support */
700 uint64_t fLookupMask;
701 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
702 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
703 else
704 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
705
706 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
707 PARMV8VMSA64DESC paDesc = NULL;
708 ARMV8VMSA64DESC Desc;
709 int rc;
710 if RT_CONSTEXPR_IF(a_InitialLookupLvl == PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO)
711 {
712 Assert(cLvl0Shift != 0);
713 uint8_t const uLvl = 0;
714
715 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
716 if (RT_SUCCESS(rc)) { /* probable */ }
717 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
718
719 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
720 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
721 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
722
723 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
724 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
725
726 /* Full lookup mask from now on. */
727 fLookupMask = fLookupMaskFull;
728 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
729 }
730
731 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE)
732 {
733 uint8_t const uLvl = 1;
734
735 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
736 if (RT_SUCCESS(rc)) { /* probable */ }
737 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
738
739 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
740 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
741 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
742
743 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
744 else
745 {
746 if (offLvl1BlockMask != 0)
747 {
748 /* Block descriptor. */
749 pWalk->fInfo = PGM_WALKINFO_GIGANTIC_PAGE;
750 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
751 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
752 }
753 else
754 return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl);
755 }
756
757 /* Full lookup mask from now on. */
758 fLookupMask = fLookupMaskFull;
759 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
760 }
761
762 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO)
763 {
764 uint8_t const uLvl = 2;
765
766 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
767 if (RT_SUCCESS(rc)) { /* probable */ }
768 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
769
770 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
771 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
772 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
773
774 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
775 else
776 {
777 /* Block descriptor. */
778 pWalk->fInfo = PGM_WALKINFO_BIG_PAGE;
779 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
780 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
781 }
782
783 /* Full lookup mask from now on. */
784 fLookupMask = fLookupMaskFull;
785 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
786 }
787
788 AssertCompile(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE);
789 uint8_t const uLvl = 3;
790
791 /* Next level. */
792 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
793 if (RT_SUCCESS(rc)) { /* probable */ }
794 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
795
796 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
797 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
798 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
799
800 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
801 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
802
803 pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
804 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
805 }
806 else
807 AssertReleaseFailedReturn(VERR_PGM_MODE_IPE);
808}
809
810
811template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
812static PGM_CTX_DECL(int) PGM_CTX(pgm,GstModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
813{
814 /** @todo Ignore for now. */
815 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
816 return VINF_SUCCESS;
817}
818
819
820template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
821static PGM_CTX_DECL(int) PGM_CTX(pgm,GstWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
822{
823 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
824 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>(pVCpu, GCPtr, pWalk, pGstWalk);
825}
826
827
828template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
829static PGM_CTX_DECL(int) PGM_CTX(pgm,GstEnter)(PVMCPUCC pVCpu)
830{
831 /* Nothing to do for now. */
832 RT_NOREF(pVCpu);
833 return VINF_SUCCESS;
834}
835
836
837template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
838static PGM_CTX_DECL(int) PGM_CTX(pgm,GstExit)(PVMCPUCC pVCpu)
839{
840 /* Nothing to do for now. */
841 RT_NOREF(pVCpu);
842 return VINF_SUCCESS;
843}
844
845
846/**
847 * Guest mode data array.
848 */
849PGMMODEDATAGST const g_aPgmGuestModeData[PGM_GUEST_MODE_DATA_ARRAY_SIZE] =
850{
851 { UINT32_MAX, NULL, NULL, NULL, NULL, NULL }, /* 0 */
852 {
853 PGM_TYPE_NONE,
854 PGM_CTX(pgm,GstNoneGetPage),
855 PGM_CTX(pgm,GstNoneQueryPageFast),
856 PGM_CTX(pgm,GstNoneModifyPage),
857 PGM_CTX(pgm,GstNoneWalk),
858 PGM_CTX(pgm,GstNoneEnter),
859 PGM_CTX(pgm,GstNoneExit),
860 },
861
862#define PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
863 (2 + ( (a_f52BitOa ? RT_BIT_32(8) : 0) \
864 | (a_fEpd ? RT_BIT_32(7) : 0) \
865 | (a_fTbi ? RT_BIT_32(6) : 0) \
866 | (a_GranuleSz << 4) \
867 | (a_InitialLookupLvl << 1) \
868 | (a_fTtbr0 ? RT_BIT_32(0) : 0) ))
869
870#define PGM_MODE_CREATE_EX(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
871 { \
872 PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa), \
873 PGM_CTX(pgm,GstGetPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
874 PGM_CTX(pgm,GstQueryPageFast)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
875 PGM_CTX(pgm,GstModifyPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
876 PGM_CTX(pgm,GstWalk)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
877 PGM_CTX(pgm,GstEnter)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
878 PGM_CTX(pgm,GstExit)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa> \
879 }
880
881#define PGM_MODE_CREATE_TTBR(a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
882 PGM_MODE_CREATE_EX(false, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa), \
883 PGM_MODE_CREATE_EX(true, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa)
884
885#define PGM_MODE_CREATE_LOOKUP_LVL(a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
886 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_MINUS_ONE, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa), \
887 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
888 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
889 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
890 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
891 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), /* Filler for 3 bit lookup level */ \
892 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), /* Filler for 3 bit lookup level */ \
893 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ) /* Filler for 3 bit lookup level */
894
895#define PGM_MODE_CREATE_GRANULE_SZ(a_fTbi, a_fEpd, a_f52BitOa) \
896 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_INVALID, a_fTbi, a_fEpd, a_f52BitOa), \
897 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_16KB, a_fTbi, a_fEpd, a_f52BitOa), \
898 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_4KB, a_fTbi, a_fEpd, a_f52BitOa), \
899 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_64KB, a_fTbi, a_fEpd, a_f52BitOa)
900
901#define PGM_MODE_CREATE_TBI(a_fEpd, a_f52BitOa) \
902 PGM_MODE_CREATE_GRANULE_SZ(false, a_fEpd, a_f52BitOa), \
903 PGM_MODE_CREATE_GRANULE_SZ(true, a_fEpd, a_f52BitOa)
904
905#define PGM_MODE_CREATE_EPD(a_f52BitOa) \
906 PGM_MODE_CREATE_TBI(false, a_f52BitOa), \
907 PGM_MODE_CREATE_TBI(true, a_f52BitOa)
908
909 /* Recursive expansion for the win, this will blow up to 512 entries covering all possible modes. */
910 PGM_MODE_CREATE_EPD(false),
911 PGM_MODE_CREATE_EPD(true)
912
913#undef PGM_MODE_CREATE_EPD
914#undef PGM_MODE_CREATE_TBI
915#undef PGM_MODE_CREATE_GRANULE_SZ
916#undef PGM_MODE_CREATE_LOOKUP_LVL
917#undef PGM_MODE_CREATE_TTBR
918#undef PGM_MODE_CREATE_EX
919};
920
921
922template<uint8_t a_offTsz, uint8_t a_offTg, uint8_t a_offTbi, uint8_t a_offEpd, bool a_fTtbr0>
923DECLINLINE(uintptr_t) pgmR3DeduceTypeFromTcr(uint64_t u64RegSctlr, uint64_t u64RegTcr, uint64_t *pfInitialLookupMask)
924{
925 uintptr_t idxNewGst = 0;
926
927 /*
928 * MMU enabled at all?
929 * Technically this is incorrect as we use ARMV8_SCTLR_EL1_M regardless of the EL but the bit is the same
930 * for all exception levels.
931 */
932 if (u64RegSctlr & ARMV8_SCTLR_EL1_M)
933 {
934 uint64_t const u64Tsz = (u64RegTcr >> a_offTsz) & 0x1f;
935 uint64_t u64Tg = (u64RegTcr >> a_offTg) & 0x3;
936 bool const fTbi = RT_BOOL(u64RegTcr & RT_BIT_64(a_offTbi));
937 bool const fEpd = RT_BOOL(u64RegTcr & RT_BIT_64(a_offEpd));
938
939 /*
940 * From the ARM reference manual regarding granule size choices:
941 *
942 * If the value is programmed to either a reserved value or a size that has not been implemented, then
943 * the hardware will treat the field as if it has been programmed to an IMPLEMENTATION DEFINED
944 * choice of the sizes that has been implemented for all purposes other than the value read back from
945 * this register.
946 *
947 * We always fall back on the 4KiB granule size in that case.
948 */
949 /** @todo Can this be made table driven? */
950 uint64_t uLookupLvl;
951 if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
952 {
953 if (u64Tsz <= 16)
954 {
955 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO;
956 *pfInitialLookupMask = 0x1;
957 }
958 else if (u64Tsz >= 17 && u64Tsz <= 27)
959 {
960 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE;
961 *pfInitialLookupMask = RT_BIT_64(28 - u64Tsz + 1) - 1;
962 }
963 else if (u64Tsz >= 28 && u64Tsz <= 38)
964 {
965 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO;
966 *pfInitialLookupMask = RT_BIT_64(38 - u64Tsz + 1) - 1;
967 }
968 else /* if (u64Tsz == 39) */
969 {
970 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE;
971 *pfInitialLookupMask = 0x1;
972 }
973 }
974 else if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
975 {
976 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 21)
977 {
978 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE;
979 *pfInitialLookupMask = RT_BIT_64(21 - u64Tsz + 1) - 1;
980 }
981 else if (u64Tsz >= 22 && u64Tsz <= 34)
982 {
983 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO;
984 *pfInitialLookupMask = RT_BIT_64(34 - u64Tsz + 1) - 1;
985 }
986 else /*if (u64Tsz >= 35 && u64Tsz <= 39)*/
987 {
988 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE;
989 if (u64Tsz <= 39)
990 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
991 else
992 *pfInitialLookupMask = 0x1;
993 }
994 }
995 else /* if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_4KB) */
996 {
997 /*
998 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md
999 * For all translation stages
1000 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether:
1001 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field.
1002 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used.
1003 *
1004 * For a stage 1 translation
1005 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether:
1006 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field.
1007 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault.
1008 *
1009 * We currently choose the former for both.
1010 */
1011 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 24)
1012 {
1013 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO;
1014 if (u64Tsz >= 16)
1015 *pfInitialLookupMask = RT_BIT_64(24 - u64Tsz + 1) - 1;
1016 else
1017 *pfInitialLookupMask = RT_BIT_64(9) - 1;
1018 }
1019 else if (u64Tsz >= 25 && u64Tsz <= 33)
1020 {
1021 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE;
1022 *pfInitialLookupMask = RT_BIT_64(33 - u64Tsz + 1) - 1;
1023 }
1024 else /*if (u64Tsz >= 34 && u64Tsz <= 39)*/
1025 {
1026 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO;
1027 if (u64Tsz <= 39)
1028 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
1029 else
1030 *pfInitialLookupMask = 0x1;
1031 }
1032
1033 u64Tg = ARMV8_TCR_EL1_AARCH64_TG0_4KB;
1034 }
1035
1036 /* Build the index into the PGM mode callback table for the given config. */
1037 idxNewGst = PGM_MODE_TYPE_CREATE(a_fTtbr0, uLookupLvl, u64Tg, fTbi, fEpd, false /*f53BitOa*/);
1038 }
1039 else
1040 idxNewGst = PGM_TYPE_NONE;
1041
1042 return idxNewGst;
1043}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette