VirtualBox

source: vbox/trunk/src/VBox/VMM/include/IEMOpHlp.h@ 99324

Last change on this file since 99324 was 99324, checked in by vboxsync, 2 years ago

VMM/IEM: Use IEMOP_HLP_DONE_VEX_DECODING_*() rather than IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT or IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT to check for AVX and AVX2 cpuid bits, since the latter two are for runtime checks while the former for the decoding stage. OTOH, the AVX CPUID check is unnecessary in the VexMap files, since the VEX prefixes already checks for it - but that can be optimized some other time. Fixed a number of AVX2/AVX mixups resulting from copy&paste or laziness. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 22.5 KB
Line 
1/* $Id: IEMOpHlp.h 99324 2023-04-06 23:34:00Z vboxsync $ */
2/** @file
3 * IEM - Interpreted Execution Manager - Opcode Helpers.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28#ifndef VMM_INCLUDED_SRC_include_IEMOpHlp_h
29#define VMM_INCLUDED_SRC_include_IEMOpHlp_h
30#ifndef RT_WITHOUT_PRAGMA_ONCE
31# pragma once
32#endif
33
34/** @name Common opcode decoders.
35 * @{
36 */
37void iemOpStubMsg2(PVMCPUCC pVCpu) RT_NOEXCEPT;
38
39/**
40 * Complains about a stub.
41 *
42 * Providing two versions of this macro, one for daily use and one for use when
43 * working on IEM.
44 */
45#if 0
46# define IEMOP_BITCH_ABOUT_STUB() \
47 do { \
48 RTAssertMsg1(NULL, __LINE__, __FILE__, __FUNCTION__); \
49 iemOpStubMsg2(pVCpu); \
50 RTAssertPanic(); \
51 } while (0)
52#else
53# define IEMOP_BITCH_ABOUT_STUB() Log(("Stub: %s (line %d)\n", __FUNCTION__, __LINE__));
54#endif
55
56/** Stubs an opcode. */
57#define FNIEMOP_STUB(a_Name) \
58 FNIEMOP_DEF(a_Name) \
59 { \
60 RT_NOREF_PV(pVCpu); \
61 IEMOP_BITCH_ABOUT_STUB(); \
62 return VERR_IEM_INSTR_NOT_IMPLEMENTED; \
63 } \
64 typedef int ignore_semicolon
65
66/** Stubs an opcode. */
67#define FNIEMOP_STUB_1(a_Name, a_Type0, a_Name0) \
68 FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
69 { \
70 RT_NOREF_PV(pVCpu); \
71 RT_NOREF_PV(a_Name0); \
72 IEMOP_BITCH_ABOUT_STUB(); \
73 return VERR_IEM_INSTR_NOT_IMPLEMENTED; \
74 } \
75 typedef int ignore_semicolon
76
77/** Stubs an opcode which currently should raise \#UD. */
78#define FNIEMOP_UD_STUB(a_Name) \
79 FNIEMOP_DEF(a_Name) \
80 { \
81 Log(("Unsupported instruction %Rfn\n", __FUNCTION__)); \
82 return IEMOP_RAISE_INVALID_OPCODE(); \
83 } \
84 typedef int ignore_semicolon
85
86/** Stubs an opcode which currently should raise \#UD. */
87#define FNIEMOP_UD_STUB_1(a_Name, a_Type0, a_Name0) \
88 FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
89 { \
90 RT_NOREF_PV(pVCpu); \
91 RT_NOREF_PV(a_Name0); \
92 Log(("Unsupported instruction %Rfn\n", __FUNCTION__)); \
93 return IEMOP_RAISE_INVALID_OPCODE(); \
94 } \
95 typedef int ignore_semicolon
96
97/** @} */
98
99
100/** @name Opcode Debug Helpers.
101 * @{
102 */
103#ifdef VBOX_WITH_STATISTICS
104# ifdef IN_RING3
105# define IEMOP_INC_STATS(a_Stats) do { pVCpu->iem.s.StatsR3.a_Stats += 1; } while (0)
106# else
107# define IEMOP_INC_STATS(a_Stats) do { pVCpu->iem.s.StatsRZ.a_Stats += 1; } while (0)
108# endif
109#else
110# define IEMOP_INC_STATS(a_Stats) do { } while (0)
111#endif
112
113#ifdef DEBUG
114# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) \
115 do { \
116 IEMOP_INC_STATS(a_Stats); \
117 Log4(("decode - %04x:%RGv %s%s [#%u]\n", pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, \
118 pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic, pVCpu->iem.s.cInstructions)); \
119 } while (0)
120
121# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
122 do { \
123 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
124 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
125 (void)RT_CONCAT(OP_,a_Upper); \
126 (void)(a_fDisHints); \
127 (void)(a_fIemHints); \
128 } while (0)
129
130# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
131 do { \
132 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
133 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
134 (void)RT_CONCAT(OP_,a_Upper); \
135 (void)RT_CONCAT(OP_PARM_,a_Op1); \
136 (void)(a_fDisHints); \
137 (void)(a_fIemHints); \
138 } while (0)
139
140# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
141 do { \
142 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
143 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
144 (void)RT_CONCAT(OP_,a_Upper); \
145 (void)RT_CONCAT(OP_PARM_,a_Op1); \
146 (void)RT_CONCAT(OP_PARM_,a_Op2); \
147 (void)(a_fDisHints); \
148 (void)(a_fIemHints); \
149 } while (0)
150
151# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
152 do { \
153 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
154 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
155 (void)RT_CONCAT(OP_,a_Upper); \
156 (void)RT_CONCAT(OP_PARM_,a_Op1); \
157 (void)RT_CONCAT(OP_PARM_,a_Op2); \
158 (void)RT_CONCAT(OP_PARM_,a_Op3); \
159 (void)(a_fDisHints); \
160 (void)(a_fIemHints); \
161 } while (0)
162
163# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
164 do { \
165 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
166 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
167 (void)RT_CONCAT(OP_,a_Upper); \
168 (void)RT_CONCAT(OP_PARM_,a_Op1); \
169 (void)RT_CONCAT(OP_PARM_,a_Op2); \
170 (void)RT_CONCAT(OP_PARM_,a_Op3); \
171 (void)RT_CONCAT(OP_PARM_,a_Op4); \
172 (void)(a_fDisHints); \
173 (void)(a_fIemHints); \
174 } while (0)
175
176#else
177# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) IEMOP_INC_STATS(a_Stats)
178
179# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
180 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
181# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
182 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
183# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
184 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
185# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
186 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
187# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
188 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
189
190#endif
191
192#define IEMOP_MNEMONIC0(a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
193 IEMOP_MNEMONIC0EX(a_Lower, \
194 #a_Lower, \
195 a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints)
196#define IEMOP_MNEMONIC1(a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
197 IEMOP_MNEMONIC1EX(RT_CONCAT3(a_Lower,_,a_Op1), \
198 #a_Lower " " #a_Op1, \
199 a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints)
200#define IEMOP_MNEMONIC2(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
201 IEMOP_MNEMONIC2EX(RT_CONCAT5(a_Lower,_,a_Op1,_,a_Op2), \
202 #a_Lower " " #a_Op1 "," #a_Op2, \
203 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints)
204#define IEMOP_MNEMONIC3(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
205 IEMOP_MNEMONIC3EX(RT_CONCAT7(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3), \
206 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3, \
207 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints)
208#define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
209 IEMOP_MNEMONIC4EX(RT_CONCAT9(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3,_,a_Op4), \
210 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3 "," #a_Op4, \
211 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints)
212
213/** @} */
214
215
216/** @name Opcode Helpers.
217 * @{
218 */
219
220#ifdef IN_RING3
221# define IEMOP_HLP_MIN_CPU(a_uMinCpu, a_fOnlyIf) \
222 do { \
223 if (IEM_GET_TARGET_CPU(pVCpu) >= (a_uMinCpu) || !(a_fOnlyIf)) { } \
224 else \
225 { \
226 (void)DBGFSTOP(pVCpu->CTX_SUFF(pVM)); \
227 return IEMOP_RAISE_INVALID_OPCODE(); \
228 } \
229 } while (0)
230#else
231# define IEMOP_HLP_MIN_CPU(a_uMinCpu, a_fOnlyIf) \
232 do { \
233 if (IEM_GET_TARGET_CPU(pVCpu) >= (a_uMinCpu) || !(a_fOnlyIf)) { } \
234 else return IEMOP_RAISE_INVALID_OPCODE(); \
235 } while (0)
236#endif
237
238/** The instruction requires a 186 or later. */
239#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_186
240# define IEMOP_HLP_MIN_186() do { } while (0)
241#else
242# define IEMOP_HLP_MIN_186() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_186, true)
243#endif
244
245/** The instruction requires a 286 or later. */
246#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_286
247# define IEMOP_HLP_MIN_286() do { } while (0)
248#else
249# define IEMOP_HLP_MIN_286() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_286, true)
250#endif
251
252/** The instruction requires a 386 or later. */
253#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
254# define IEMOP_HLP_MIN_386() do { } while (0)
255#else
256# define IEMOP_HLP_MIN_386() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, true)
257#endif
258
259/** The instruction requires a 386 or later if the given expression is true. */
260#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
261# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) do { } while (0)
262#else
263# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, a_fOnlyIf)
264#endif
265
266/** The instruction requires a 486 or later. */
267#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_486
268# define IEMOP_HLP_MIN_486() do { } while (0)
269#else
270# define IEMOP_HLP_MIN_486() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_486, true)
271#endif
272
273/** The instruction requires a Pentium (586) or later. */
274#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PENTIUM
275# define IEMOP_HLP_MIN_586() do { } while (0)
276#else
277# define IEMOP_HLP_MIN_586() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PENTIUM, true)
278#endif
279
280/** The instruction requires a PentiumPro (686) or later. */
281#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PPRO
282# define IEMOP_HLP_MIN_686() do { } while (0)
283#else
284# define IEMOP_HLP_MIN_686() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PPRO, true)
285#endif
286
287
288/** The instruction raises an \#UD in real and V8086 mode. */
289#define IEMOP_HLP_NO_REAL_OR_V86_MODE() \
290 do \
291 { \
292 if (!IEM_IS_REAL_OR_V86_MODE(pVCpu)) { /* likely */ } \
293 else return IEMOP_RAISE_INVALID_OPCODE(); \
294 } while (0)
295
296#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
297/** This instruction raises an \#UD in real and V8086 mode or when not using a
298 * 64-bit code segment when in long mode (applicable to all VMX instructions
299 * except VMCALL).
300 */
301#define IEMOP_HLP_VMX_INSTR(a_szInstr, a_InsDiagPrefix) \
302 do \
303 { \
304 if ( !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
305 && ( !IEM_IS_LONG_MODE(pVCpu) \
306 || IEM_IS_64BIT_CODE(pVCpu))) \
307 { /* likely */ } \
308 else \
309 { \
310 if (IEM_IS_REAL_OR_V86_MODE(pVCpu)) \
311 { \
312 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_RealOrV86Mode; \
313 Log5((a_szInstr ": Real or v8086 mode -> #UD\n")); \
314 return IEMOP_RAISE_INVALID_OPCODE(); \
315 } \
316 if (IEM_IS_LONG_MODE(pVCpu) && !IEM_IS_64BIT_CODE(pVCpu)) \
317 { \
318 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_LongModeCS; \
319 Log5((a_szInstr ": Long mode without 64-bit code segment -> #UD\n")); \
320 return IEMOP_RAISE_INVALID_OPCODE(); \
321 } \
322 } \
323 } while (0)
324
325/** The instruction can only be executed in VMX operation (VMX root mode and
326 * non-root mode).
327 *
328 * @note Update IEM_VMX_IN_VMX_OPERATION if changes are made here.
329 */
330# define IEMOP_HLP_IN_VMX_OPERATION(a_szInstr, a_InsDiagPrefix) \
331 do \
332 { \
333 if (IEM_VMX_IS_ROOT_MODE(pVCpu)) { /* likely */ } \
334 else \
335 { \
336 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_VmxRoot; \
337 Log5((a_szInstr ": Not in VMX operation (root mode) -> #UD\n")); \
338 return IEMOP_RAISE_INVALID_OPCODE(); \
339 } \
340 } while (0)
341#endif /* VBOX_WITH_NESTED_HWVIRT_VMX */
342
343/** The instruction is not available in 64-bit mode, throw \#UD if we're in
344 * 64-bit mode. */
345#define IEMOP_HLP_NO_64BIT() \
346 do \
347 { \
348 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
349 return IEMOP_RAISE_INVALID_OPCODE(); \
350 } while (0)
351
352/** The instruction is only available in 64-bit mode, throw \#UD if we're not in
353 * 64-bit mode. */
354#define IEMOP_HLP_ONLY_64BIT() \
355 do \
356 { \
357 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT) \
358 return IEMOP_RAISE_INVALID_OPCODE(); \
359 } while (0)
360
361/** The instruction defaults to 64-bit operand size if 64-bit mode. */
362#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE() \
363 do \
364 { \
365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
366 iemRecalEffOpSize64Default(pVCpu); \
367 } while (0)
368
369/** The instruction defaults to 64-bit operand size if 64-bit mode and intel
370 * CPUs ignore the operand size prefix complete (e.g. relative jumps). */
371#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX() \
372 do \
373 { \
374 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
375 iemRecalEffOpSize64DefaultAndIntelIgnoresOpSizePrefix(pVCpu); \
376 } while (0)
377
378/** The instruction has 64-bit operand size if 64-bit mode. */
379#define IEMOP_HLP_64BIT_OP_SIZE() \
380 do \
381 { \
382 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
383 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; \
384 } while (0)
385
386/** Only a REX prefix immediately preceeding the first opcode byte takes
387 * effect. This macro helps ensuring this as well as logging bad guest code. */
388#define IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE(a_szPrf) \
389 do \
390 { \
391 if (RT_UNLIKELY(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX)) \
392 { \
393 Log5((a_szPrf ": Overriding REX prefix at %RX16! fPrefixes=%#x\n", pVCpu->cpum.GstCtx.rip, pVCpu->iem.s.fPrefixes)); \
394 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REX_MASK; \
395 pVCpu->iem.s.uRexB = 0; \
396 pVCpu->iem.s.uRexIndex = 0; \
397 pVCpu->iem.s.uRexReg = 0; \
398 iemRecalEffOpSize(pVCpu); \
399 } \
400 } while (0)
401
402/**
403 * Done decoding.
404 */
405#define IEMOP_HLP_DONE_DECODING() \
406 do \
407 { \
408 /*nothing for now, maybe later... */ \
409 } while (0)
410
411/**
412 * Done decoding, raise \#UD exception if lock prefix present.
413 */
414#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX() \
415 do \
416 { \
417 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
418 { /* likely */ } \
419 else \
420 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
421 } while (0)
422
423
424/**
425 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
426 * repnz or size prefixes are present, or if in real or v8086 mode.
427 */
428#define IEMOP_HLP_DONE_VEX_DECODING() \
429 do \
430 { \
431 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
432 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
433 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) )) \
434 { /* likely */ } \
435 else \
436 return IEMOP_RAISE_INVALID_OPCODE(); \
437 } while (0)
438
439/**
440 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
441 * repnz or size prefixes are present, if in real or v8086 mode, or if the
442 * a_fFeature is present in the guest CPU.
443 */
444#define IEMOP_HLP_DONE_VEX_DECODING_EX(a_fFeature) \
445 do \
446 { \
447 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
448 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
449 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
450 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
451 { /* likely */ } \
452 else \
453 return IEMOP_RAISE_INVALID_OPCODE(); \
454 } while (0)
455
456/**
457 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
458 * repnz or size prefixes are present, or if in real or v8086 mode.
459 */
460#define IEMOP_HLP_DONE_VEX_DECODING_L0() \
461 do \
462 { \
463 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
464 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
465 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
466 && pVCpu->iem.s.uVexLength == 0)) \
467 { /* likely */ } \
468 else \
469 return IEMOP_RAISE_INVALID_OPCODE(); \
470 } while (0)
471
472/**
473 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
474 * repnz or size prefixes are present, or if in real or v8086 mode.
475 */
476#define IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeature) \
477 do \
478 { \
479 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
480 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
481 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
482 && pVCpu->iem.s.uVexLength == 0 \
483 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
484 { /* likely */ } \
485 else \
486 return IEMOP_RAISE_INVALID_OPCODE(); \
487 } while (0)
488
489
490/**
491 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
492 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate
493 * register 0, or if in real or v8086 mode.
494 */
495#define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV() \
496 do \
497 { \
498 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
499 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
500 && !pVCpu->iem.s.uVex3rdReg \
501 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) )) \
502 { /* likely */ } \
503 else \
504 return IEMOP_RAISE_INVALID_OPCODE(); \
505 } while (0)
506
507/**
508 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
509 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate
510 * register 0, if in real or v8086 mode, or if the a_fFeature is not present in
511 * the guest CPU.
512 */
513#define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) \
514 do \
515 { \
516 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
517 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
518 && !pVCpu->iem.s.uVex3rdReg \
519 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
520 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
521 { /* likely */ } \
522 else \
523 return IEMOP_RAISE_INVALID_OPCODE(); \
524 } while (0)
525
526/**
527 * Done decoding VEX, no V, L=0.
528 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
529 * we're in real or v8086 mode, if VEX.V!=0xf, or if VEX.L!=0.
530 */
531#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV() \
532 do \
533 { \
534 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
535 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
536 && pVCpu->iem.s.uVexLength == 0 \
537 && pVCpu->iem.s.uVex3rdReg == 0 \
538 && !IEM_IS_REAL_OR_V86_MODE(pVCpu))) \
539 { /* likely */ } \
540 else \
541 return IEMOP_RAISE_INVALID_OPCODE(); \
542 } while (0)
543
544/**
545 * Done decoding VEX, no V, L=0.
546 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
547 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature
548 * is not present in the guest CPU.
549 */
550#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) \
551 do \
552 { \
553 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
554 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
555 && pVCpu->iem.s.uVexLength == 0 \
556 && pVCpu->iem.s.uVex3rdReg == 0 \
557 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
558 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
559 { /* likely */ } \
560 else \
561 return IEMOP_RAISE_INVALID_OPCODE(); \
562 } while (0)
563
564#define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \
565 do \
566 { \
567 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
568 { /* likely */ } \
569 else \
570 { \
571 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_fDisOpType); \
572 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
573 } \
574 } while (0)
575#define IEMOP_HLP_DECODED_NL_2(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_uDisParam1, a_fDisOpType) \
576 do \
577 { \
578 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
579 { /* likely */ } \
580 else \
581 { \
582 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_uDisParam1); NOREF(a_fDisOpType); \
583 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
584 } \
585 } while (0)
586
587/**
588 * Done decoding, raise \#UD exception if any lock, repz or repnz prefixes
589 * are present.
590 */
591#define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() \
592 do \
593 { \
594 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
595 { /* likely */ } \
596 else \
597 return IEMOP_RAISE_INVALID_OPCODE(); \
598 } while (0)
599
600/**
601 * Done decoding, raise \#UD exception if any operand-size override, repz or repnz
602 * prefixes are present.
603 */
604#define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() \
605 do \
606 { \
607 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
608 { /* likely */ } \
609 else \
610 return IEMOP_RAISE_INVALID_OPCODE(); \
611 } while (0)
612
613/**
614 * Check for a CPUMFEATURES member to be true, raise \#UD if clear.
615 */
616#define IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, a_fFeature) \
617 do \
618 { \
619 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature) \
620 { /* likely */ } \
621 else \
622 return IEMOP_RAISE_INVALID_OPCODE(); \
623 } while (0)
624
625VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff) RT_NOEXCEPT;
626VBOXSTRICTRC iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff, int8_t offRsp) RT_NOEXCEPT;
627#ifdef IEM_WITH_SETJMP
628RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm) IEM_NOEXCEPT_MAY_LONGJMP;
629#endif
630
631/** @} */
632
633#endif /* !VMM_INCLUDED_SRC_include_IEMOpHlp_h */
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette