VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllThrdFuncsBltIn.cpp@ 100734

Last change on this file since 100734 was 100734, checked in by vboxsync, 22 months ago

VMM/IEM: Generate TBs for invalid instruction encodings as well. This involved special casing recompiler call generation for C instruction implementation function that doesn't take any extra arguments, so that we can catch all the deeply hidden IEMOP_RAISE_INVALID_OPCODE_RET invocations and similar. Also had to clean up hacky decoding of effective address related opcode bytes for undefined opcodes, introducing IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(a_bRm) to hide the uglyness. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 23.8 KB
Line 
1/* $Id: IEMAllThrdFuncsBltIn.cpp 100734 2023-07-29 02:04:22Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, Built-in Threaded Functions.
4 *
5 * This is separate from IEMThreadedFunctions.cpp because it doesn't work
6 * with IEM_WITH_OPAQUE_DECODER_STATE defined.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/*********************************************************************************************************************************
33* Header Files *
34*********************************************************************************************************************************/
35#define LOG_GROUP LOG_GROUP_IEM_RE_THREADED
36#define VMCPU_INCL_CPUM_GST_CTX
37#include <VBox/vmm/iem.h>
38#include <VBox/vmm/cpum.h>
39#include <VBox/vmm/apic.h>
40#include <VBox/vmm/pdm.h>
41#include <VBox/vmm/pgm.h>
42#include <VBox/vmm/iom.h>
43#include <VBox/vmm/em.h>
44#include <VBox/vmm/hm.h>
45#include <VBox/vmm/nem.h>
46#include <VBox/vmm/gim.h>
47#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
48# include <VBox/vmm/em.h>
49# include <VBox/vmm/hm_svm.h>
50#endif
51#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
52# include <VBox/vmm/hmvmxinline.h>
53#endif
54#include <VBox/vmm/tm.h>
55#include <VBox/vmm/dbgf.h>
56#include <VBox/vmm/dbgftrace.h>
57#include "IEMInternal.h"
58#include <VBox/vmm/vmcc.h>
59#include <VBox/log.h>
60#include <VBox/err.h>
61#include <VBox/param.h>
62#include <VBox/dis.h>
63#include <VBox/disopcode-x86-amd64.h>
64#include <iprt/asm-math.h>
65#include <iprt/assert.h>
66#include <iprt/string.h>
67#include <iprt/x86.h>
68
69#include "IEMInline.h"
70
71
72
73static VBOXSTRICTRC iemThreadeFuncWorkerObsoleteTb(PVMCPUCC pVCpu)
74{
75 iemThreadedTbObsolete(pVCpu, pVCpu->iem.s.pCurTbR3);
76 return VINF_IEM_REEXEC_MODE_CHANGED; /** @todo different status code... */
77}
78
79
80/**
81 * Built-in function that calls a C-implemention function taking zero arguments.
82 */
83IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_DeferToCImpl0,
84 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
85{
86 PFNIEMCIMPL0 const pfnCImpl = (PFNIEMCIMPL0)(uintptr_t)uParam0;
87 uint8_t const cbInstr = (uint8_t)uParam1;
88 RT_NOREF(uParam2);
89 return pfnCImpl(pVCpu, cbInstr);
90}
91
92
93/**
94 * Built-in function that compares the fExec mask against uParam0.
95 */
96IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckMode,
97 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
98{
99 uint32_t const fExpectedExec = (uint32_t)uParam0;
100 if (pVCpu->iem.s.fExec == fExpectedExec)
101 return VINF_SUCCESS;
102 LogFlow(("Mode changed at %04x:%08RX64: %#x -> %#x (xor: %#x)\n", pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip,
103 fExpectedExec, pVCpu->iem.s.fExec, fExpectedExec ^ pVCpu->iem.s.fExec));
104 RT_NOREF(uParam1, uParam2);
105 return VINF_IEM_REEXEC_MODE_CHANGED;
106}
107
108
109DECL_FORCE_INLINE(RTGCPHYS) iemTbGetRangePhysPageAddr(PCIEMTB pTb, uint8_t idxRange)
110{
111 Assert(idxRange < RT_MIN(pTb->cRanges, RT_ELEMENTS(pTb->aRanges)));
112 uint8_t const idxPage = pTb->aRanges[idxRange].idxPhysPage;
113 Assert(idxPage <= RT_ELEMENTS(pTb->aGCPhysPages));
114 if (idxPage == 0)
115 return pTb->GCPhysPc & ~(RTGCPHYS)GUEST_PAGE_OFFSET_MASK;
116 Assert(!(pTb->aGCPhysPages[idxPage - 1] & GUEST_PAGE_OFFSET_MASK));
117 return pTb->aGCPhysPages[idxPage - 1];
118}
119
120
121/**
122 * Macro that implements the 16/32-bit CS.LIM check, as this is done by a
123 * number of functions.
124 */
125#define BODY_CHECK_CS_LIM(a_cbInstr) do { \
126 if (RT_LIKELY(pVCpu->cpum.GstCtx.eip - pVCpu->cpum.GstCtx.cs.u32Limit >= cbInstr)) \
127 { /* likely */ } \
128 else \
129 { \
130 Log7(("EIP out of bounds at %04x:%08RX32 LB %u - CS.LIM=%#RX32\n", \
131 pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.eip, (a_cbInstr), pVCpu->cpum.GstCtx.cs.u32Limit)); \
132 return iemRaiseGeneralProtectionFault0(pVCpu); \
133 } \
134 } while(0)
135
136/**
137 * Macro that implements opcode (re-)checking.
138 */
139#define BODY_CHECK_OPCODES(a_pTb, a_idxRange, a_offRange, a_cbInstr) do { \
140 Assert((a_idxRange) < (a_pTb)->cRanges && (a_pTb)->cRanges <= RT_ELEMENTS((a_pTb)->aRanges)); \
141 Assert((a_offRange) < (a_pTb)->aRanges[(a_idxRange)].cbOpcodes); \
142 /* We can use pbInstrBuf here as it will be updated when branching (and prior to executing a TB). */ \
143 if (RT_LIKELY(memcmp(&pVCpu->iem.s.pbInstrBuf[(a_pTb)->aRanges[(a_idxRange)].offPhysPage + (a_offRange)], \
144 &(a_pTb)->pabOpcodes[ (a_pTb)->aRanges[(a_idxRange)].offOpcodes + (a_offRange)], \
145 (a_pTb)->aRanges[(a_idxRange)].cbOpcodes - (a_offRange)) == 0)) \
146 { /* likely */ } \
147 else \
148 { \
149 Log7(("TB obsolete: %p at %04x:%08RX64 LB %u; range %u, off %#x LB %#x + %#x; #%u\n", (a_pTb), \
150 pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), (a_idxRange), \
151 (a_pTb)->aRanges[(a_idxRange)].offOpcodes, (a_pTb)->aRanges[(a_idxRange)].cbOpcodes, (a_offRange), __LINE__)); \
152 RT_NOREF(a_cbInstr); \
153 return iemThreadeFuncWorkerObsoleteTb(pVCpu); \
154 } \
155 } while(0)
156
157/**
158 * Macro that implements TLB loading and updating pbInstrBuf updating for an
159 * instruction crossing into a new page.
160 *
161 * This may long jump if we're raising a \#PF, \#GP or similar trouble.
162 */
163#define BODY_LOAD_TLB_FOR_NEW_PAGE(a_pTb, a_offInstr, a_idxRange, a_cbInstr) do { \
164 pVCpu->iem.s.pbInstrBuf = NULL; \
165 pVCpu->iem.s.offCurInstrStart = GUEST_PAGE_SIZE - (a_offInstr); \
166 pVCpu->iem.s.offInstrNextByte = GUEST_PAGE_SIZE; \
167 iemOpcodeFetchBytesJmp(pVCpu, 0, NULL); \
168 \
169 RTGCPHYS const GCPhysNewPage = iemTbGetRangePhysPageAddr(a_pTb, a_idxRange); \
170 if (RT_LIKELY( pVCpu->iem.s.GCPhysInstrBuf == GCPhysNewPage \
171 && pVCpu->iem.s.pbInstrBuf)) \
172 { /* likely */ } \
173 else \
174 { \
175 Log7(("TB obsolete: %p at %04x:%08RX64 LB %u; crossing at %#x; GCPhys=%RGp expected %RGp, pbInstrBuf=%p - #%u\n", \
176 (a_pTb), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), (a_offInstr), \
177 pVCpu->iem.s.GCPhysInstrBuf, GCPhysNewPage, pVCpu->iem.s.pbInstrBuf, __LINE__)); \
178 RT_NOREF(a_cbInstr); \
179 return iemThreadeFuncWorkerObsoleteTb(pVCpu); \
180 } \
181 } while(0)
182
183/**
184 * Macro that implements TLB loading and updating pbInstrBuf updating when
185 * branching or when crossing a page on an instruction boundrary.
186 *
187 * This differs from BODY_LOAD_TLB_FOR_NEW_PAGE in that it will first check if
188 * it is an inter-page branch and also check the page offset.
189 *
190 * This may long jump if we're raising a \#PF, \#GP or similar trouble.
191 */
192#define BODY_LOAD_TLB_AFTER_BRANCH(a_pTb, a_idxRange, a_cbInstr) do { \
193 /* Is RIP within the current code page? */ \
194 Assert(pVCpu->cpum.GstCtx.cs.u64Base == 0 || !IEM_IS_64BIT_CODE(pVCpu)); \
195 uint64_t const uPc = pVCpu->cpum.GstCtx.rip + pVCpu->cpum.GstCtx.cs.u64Base; \
196 uint64_t const off = uPc - pVCpu->iem.s.uInstrBufPc; \
197 if (off < pVCpu->iem.s.cbInstrBufTotal) \
198 { \
199 Assert(!(pVCpu->iem.s.GCPhysInstrBuf & GUEST_PAGE_OFFSET_MASK)); \
200 Assert(pVCpu->iem.s.pbInstrBuf); \
201 RTGCPHYS const GCPhysRangePageWithOffset = iemTbGetRangePhysPageAddr(a_pTb, a_idxRange) \
202 | pTb->aRanges[(a_idxRange)].offPhysPage; \
203 if (GCPhysRangePageWithOffset == pVCpu->iem.s.GCPhysInstrBuf + off) \
204 { /* we're good */ } \
205 else if (pTb->aRanges[(a_idxRange)].offPhysPage != off) \
206 { \
207 Log7(("TB jmp miss: %p at %04x:%08RX64 LB %u; branching/1; GCPhysWithOffset=%RGp expected %RGp, pbInstrBuf=%p - #%u\n", \
208 (a_pTb), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), \
209 pVCpu->iem.s.GCPhysInstrBuf + off, GCPhysRangePageWithOffset, pVCpu->iem.s.pbInstrBuf, __LINE__)); \
210 RT_NOREF(a_cbInstr); \
211 return VINF_IEM_REEXEC_MODE_CHANGED; /** @todo new status code? */ \
212 } \
213 else \
214 { \
215 Log7(("TB obsolete: %p at %04x:%08RX64 LB %u; branching/1; GCPhysWithOffset=%RGp expected %RGp, pbInstrBuf=%p - #%u\n", \
216 (a_pTb), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), \
217 pVCpu->iem.s.GCPhysInstrBuf + off, GCPhysRangePageWithOffset, pVCpu->iem.s.pbInstrBuf, __LINE__)); \
218 RT_NOREF(a_cbInstr); \
219 return iemThreadeFuncWorkerObsoleteTb(pVCpu); \
220 } \
221 } \
222 else \
223 { \
224 /* Must translate new RIP. */ \
225 pVCpu->iem.s.pbInstrBuf = NULL; \
226 pVCpu->iem.s.offCurInstrStart = 0; \
227 pVCpu->iem.s.offInstrNextByte = 0; \
228 iemOpcodeFetchBytesJmp(pVCpu, 0, NULL); \
229 Assert(!(pVCpu->iem.s.GCPhysInstrBuf & GUEST_PAGE_OFFSET_MASK) || !pVCpu->iem.s.pbInstrBuf); \
230 \
231 RTGCPHYS const GCPhysRangePageWithOffset = iemTbGetRangePhysPageAddr(a_pTb, a_idxRange) \
232 | pTb->aRanges[(a_idxRange)].offPhysPage; \
233 uint64_t const offNew = uPc - pVCpu->iem.s.uInstrBufPc; \
234 if ( GCPhysRangePageWithOffset == pVCpu->iem.s.GCPhysInstrBuf + offNew \
235 && pVCpu->iem.s.pbInstrBuf) \
236 { /* likely */ } \
237 else if ( pTb->aRanges[(a_idxRange)].offPhysPage != offNew \
238 && pVCpu->iem.s.pbInstrBuf) \
239 { \
240 Log7(("TB jmp miss: %p at %04x:%08RX64 LB %u; branching/2; GCPhysWithOffset=%RGp expected %RGp, pbInstrBuf=%p - #%u\n", \
241 (a_pTb), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), \
242 pVCpu->iem.s.GCPhysInstrBuf + offNew, GCPhysRangePageWithOffset, pVCpu->iem.s.pbInstrBuf, __LINE__)); \
243 RT_NOREF(a_cbInstr); \
244 return VINF_IEM_REEXEC_MODE_CHANGED; /** @todo new status code? */ \
245 } \
246 else \
247 { \
248 Log7(("TB obsolete: %p at %04x:%08RX64 LB %u; branching/2; GCPhysWithOffset=%RGp expected %RGp, pbInstrBuf=%p - #%u\n", \
249 (a_pTb), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), \
250 pVCpu->iem.s.GCPhysInstrBuf + offNew, GCPhysRangePageWithOffset, pVCpu->iem.s.pbInstrBuf, __LINE__)); \
251 RT_NOREF(a_cbInstr); \
252 return iemThreadeFuncWorkerObsoleteTb(pVCpu); \
253 } \
254 } \
255 } while(0)
256
257/**
258 * Macro that implements PC check after a conditional branch.
259 */
260#define BODY_CHECK_PC_AFTER_BRANCH(a_pTb, a_idxRange, a_cbInstr) do { \
261 /* Is RIP within the current code page? */ \
262 Assert(pVCpu->cpum.GstCtx.cs.u64Base == 0 || !IEM_IS_64BIT_CODE(pVCpu)); \
263 uint64_t const uPc = pVCpu->cpum.GstCtx.rip + pVCpu->cpum.GstCtx.cs.u64Base; \
264 uint64_t const off = uPc - pVCpu->iem.s.uInstrBufPc; \
265 Assert(!(pVCpu->iem.s.GCPhysInstrBuf & GUEST_PAGE_OFFSET_MASK)); \
266 RTGCPHYS const GCPhysRangePageWithOffset = iemTbGetRangePhysPageAddr(a_pTb, a_idxRange) \
267 | pTb->aRanges[(a_idxRange)].offPhysPage; \
268 if ( GCPhysRangePageWithOffset == pVCpu->iem.s.GCPhysInstrBuf + off \
269 && off < pVCpu->iem.s.cbInstrBufTotal) \
270 { /* we're good */ } \
271 else \
272 { \
273 Log7(("TB jmp miss: %p at %04x:%08RX64 LB %u; GCPhysWithOffset=%RGp hoped for %RGp, pbInstrBuf=%p - #%u\n", \
274 (a_pTb), pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, (a_cbInstr), \
275 pVCpu->iem.s.GCPhysInstrBuf + off, GCPhysRangePageWithOffset, pVCpu->iem.s.pbInstrBuf, __LINE__)); \
276 RT_NOREF(a_cbInstr); \
277 return VINF_IEM_REEXEC_MODE_CHANGED; /** @todo new status code? */ \
278 } \
279 } while(0)
280
281
282/**
283 * Built-in function that checks the EIP/IP + uParam0 is within CS.LIM,
284 * raising a \#GP(0) if this isn't the case.
285 */
286IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLim,
287 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
288{
289 uint32_t const cbInstr = (uint32_t)uParam0;
290 RT_NOREF(uParam1, uParam2);
291 BODY_CHECK_CS_LIM(cbInstr);
292 return VINF_SUCCESS;
293}
294
295
296/**
297 * Built-in function for re-checking opcodes and CS.LIM after an instruction
298 * that may have modified them.
299 */
300IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLimAndOpcodes,
301 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
302{
303 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
304 uint32_t const cbInstr = (uint32_t)uParam0;
305 uint32_t const idxRange = (uint32_t)uParam1;
306 uint32_t const offRange = (uint32_t)uParam2;
307 BODY_CHECK_CS_LIM(cbInstr);
308 BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
309 return VINF_SUCCESS;
310}
311
312
313/**
314 * Built-in function for re-checking opcodes after an instruction that may have
315 * modified them.
316 */
317IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckOpcodes,
318 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
319{
320 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
321 uint32_t const cbInstr = (uint32_t)uParam0;
322 uint32_t const idxRange = (uint32_t)uParam1;
323 uint32_t const offRange = (uint32_t)uParam2;
324 BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
325 return VINF_SUCCESS;
326}
327
328
329/*
330 * Post-branching checkers.
331 */
332
333/**
334 * Built-in function for checking CS.LIM, checking the PC and checking opcodes
335 * after conditional branching within the same page.
336 *
337 * @see iemThreadedFunc_BltIn_CheckPcAndOpcodes
338 */
339IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLimAndPcAndOpcodes,
340 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
341{
342 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
343 uint32_t const cbInstr = (uint32_t)uParam0;
344 uint32_t const idxRange = (uint32_t)uParam1;
345 uint32_t const offRange = (uint32_t)uParam2;
346 //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
347 BODY_CHECK_CS_LIM(cbInstr);
348 BODY_CHECK_PC_AFTER_BRANCH(pTb, idxRange, cbInstr);
349 BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
350 //LogFunc(("okay\n"));
351 return VINF_SUCCESS;
352}
353
354
355/**
356 * Built-in function for checking the PC and checking opcodes after conditional
357 * branching within the same page.
358 *
359 * @see iemThreadedFunc_BltIn_CheckCsLimAndPcAndOpcodes
360 */
361IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckPcAndOpcodes,
362 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
363{
364 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
365 uint32_t const cbInstr = (uint32_t)uParam0;
366 uint32_t const idxRange = (uint32_t)uParam1;
367 uint32_t const offRange = (uint32_t)uParam2;
368 //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
369 BODY_CHECK_PC_AFTER_BRANCH(pTb, idxRange, cbInstr);
370 BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
371 //LogFunc(("okay\n"));
372 return VINF_SUCCESS;
373}
374
375
376/**
377 * Built-in function for checking CS.LIM, loading TLB and checking opcodes when
378 * transitioning to a different code page.
379 *
380 * The code page transition can either be natural over onto the next page (with
381 * the instruction starting at page offset zero) or by means of branching.
382 *
383 * @see iemThreadedFunc_BltIn_CheckOpcodesLoadingTlb
384 */
385IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLimAndOpcodesLoadingTlb,
386 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
387{
388 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
389 uint32_t const cbInstr = (uint32_t)uParam0;
390 uint32_t const idxRange = (uint32_t)uParam1;
391 uint32_t const offRange = (uint32_t)uParam2;
392 //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
393 BODY_CHECK_CS_LIM(cbInstr);
394 BODY_LOAD_TLB_AFTER_BRANCH(pTb, idxRange, cbInstr);
395 BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
396 //LogFunc(("okay\n"));
397 return VINF_SUCCESS;
398}
399
400
401/**
402 * Built-in function for loading TLB and checking opcodes when transitioning to
403 * a different code page.
404 *
405 * The code page transition can either be natural over onto the next page (with
406 * the instruction starting at page offset zero) or by means of branching.
407 *
408 * @see iemThreadedFunc_BltIn_CheckCsLimAndOpcodesLoadingTlb
409 */
410IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckOpcodesLoadingTlb,
411 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
412{
413 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
414 uint32_t const cbInstr = (uint32_t)uParam0;
415 uint32_t const idxRange = (uint32_t)uParam1;
416 uint32_t const offRange = (uint32_t)uParam2;
417 //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
418 BODY_LOAD_TLB_AFTER_BRANCH(pTb, idxRange, cbInstr);
419 BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
420 //LogFunc(("okay\n"));
421 return VINF_SUCCESS;
422}
423
424
425
426/*
427 * Natural page crossing checkers.
428 */
429
430/**
431 * Built-in function for checking CS.LIM, loading TLB and checking opcodes on
432 * both pages when transitioning to a different code page.
433 *
434 * This is used when the previous instruction requires revalidation of opcodes
435 * bytes and the current instruction stries a page boundrary with opcode bytes
436 * in both the old and new page.
437 *
438 * @see iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlb
439 */
440IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLimAndOpcodesAcrossPageLoadingTlb,
441 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
442{
443 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
444 uint32_t const cbInstr = (uint32_t)uParam0;
445 uint32_t const cbStartPage = (uint32_t)(uParam0 >> 32);
446 uint32_t const idxRange1 = (uint32_t)uParam1;
447 uint32_t const offRange1 = (uint32_t)uParam2;
448 uint32_t const idxRange2 = idxRange1 + 1;
449 BODY_CHECK_CS_LIM(cbInstr);
450 BODY_CHECK_OPCODES(pTb, idxRange1, offRange1, cbInstr);
451 BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
452 BODY_CHECK_OPCODES(pTb, idxRange2, 0, cbInstr);
453 return VINF_SUCCESS;
454}
455
456
457/**
458 * Built-in function for loading TLB and checking opcodes on both pages when
459 * transitioning to a different code page.
460 *
461 * This is used when the previous instruction requires revalidation of opcodes
462 * bytes and the current instruction stries a page boundrary with opcode bytes
463 * in both the old and new page.
464 *
465 * @see iemThreadedFunc_BltIn_CheckCsLimAndOpcodesAcrossPageLoadingTlb
466 */
467IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlb,
468 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
469{
470 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
471 uint32_t const cbInstr = (uint32_t)uParam0;
472 uint32_t const cbStartPage = (uint32_t)(uParam0 >> 32);
473 uint32_t const idxRange1 = (uint32_t)uParam1;
474 uint32_t const offRange1 = (uint32_t)uParam2;
475 uint32_t const idxRange2 = idxRange1 + 1;
476 BODY_CHECK_OPCODES(pTb, idxRange1, offRange1, cbInstr);
477 BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
478 BODY_CHECK_OPCODES(pTb, idxRange2, 0, cbInstr);
479 return VINF_SUCCESS;
480}
481
482
483/**
484 * Built-in function for checking CS.LIM, loading TLB and checking opcodes when
485 * advancing naturally to a different code page.
486 *
487 * Only opcodes on the new page is checked.
488 *
489 * @see iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlb
490 */
491IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNextPageLoadingTlb,
492 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
493{
494 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
495 uint32_t const cbInstr = (uint32_t)uParam0;
496 uint32_t const cbStartPage = (uint32_t)(uParam0 >> 32);
497 uint32_t const idxRange1 = (uint32_t)uParam1;
498 //uint32_t const offRange1 = (uint32_t)uParam2;
499 uint32_t const idxRange2 = idxRange1 + 1;
500 BODY_CHECK_CS_LIM(cbInstr);
501 BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
502 BODY_CHECK_OPCODES(pTb, idxRange2, 0, cbInstr);
503 RT_NOREF(uParam2);
504 return VINF_SUCCESS;
505}
506
507
508/**
509 * Built-in function for loading TLB and checking opcodes when advancing
510 * naturally to a different code page.
511 *
512 * Only opcodes on the new page is checked.
513 *
514 * @see iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNextPageLoadingTlb
515 */
516IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlb,
517 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
518{
519 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
520 uint32_t const cbInstr = (uint32_t)uParam0;
521 uint32_t const cbStartPage = (uint32_t)(uParam0 >> 32);
522 uint32_t const idxRange1 = (uint32_t)uParam1;
523 //uint32_t const offRange1 = (uint32_t)uParam2;
524 uint32_t const idxRange2 = idxRange1 + 1;
525 BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
526 BODY_CHECK_OPCODES(pTb, idxRange2, 0, cbInstr);
527 RT_NOREF(uParam2);
528 return VINF_SUCCESS;
529}
530
531
532/**
533 * Built-in function for checking CS.LIM, loading TLB and checking opcodes when
534 * advancing naturally to a different code page with first instr at byte 0.
535 *
536 * @see iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlb
537 */
538IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNewPageLoadingTlb,
539 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
540{
541 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
542 uint32_t const cbInstr = (uint32_t)uParam0;
543 uint32_t const idxRange = (uint32_t)uParam1;
544 Assert(uParam2 == 0 /*offRange*/); RT_NOREF(uParam2);
545 BODY_CHECK_CS_LIM(cbInstr);
546 BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, 0, idxRange, cbInstr);
547 Assert(pVCpu->iem.s.offCurInstrStart == 0);
548 BODY_CHECK_OPCODES(pTb, idxRange, 0, cbInstr);
549 return VINF_SUCCESS;
550}
551
552
553/**
554 * Built-in function for loading TLB and checking opcodes when advancing
555 * naturally to a different code page with first instr at byte 0.
556 *
557 * @see iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNewPageLoadingTlb
558 */
559IEM_DECL_IMPL_DEF(VBOXSTRICTRC, iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlb,
560 (PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2))
561{
562 PCIEMTB const pTb = pVCpu->iem.s.pCurTbR3;
563 uint32_t const cbInstr = (uint32_t)uParam0;
564 uint32_t const idxRange = (uint32_t)uParam1;
565 Assert(uParam2 == 0 /*offRange*/); RT_NOREF(uParam2);
566 BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, 0, idxRange, cbInstr);
567 Assert(pVCpu->iem.s.offCurInstrStart == 0);
568 BODY_CHECK_OPCODES(pTb, idxRange, 0, cbInstr);
569 return VINF_SUCCESS;
570}
571
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette