VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstCommonBodyMacros.h

Last change on this file was 104195, checked in by vboxsync, 2 months ago

VMM/IEM: Refactoring assembly helpers to not pass eflags by reference but instead by value and return the updated value (via eax/w0) - first chunk: ADD,ADC,SUB,SBB,CMP,TEST,AND,OR,XOR. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 19.8 KB
RevLine 
[98916]1/* $Id: IEMAllInstCommonBodyMacros.h 104195 2024-04-05 14:45:23Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, Common Body Macros.
4 *
5 * This is placed in its own file without anything else in it, so that it can
[100733]6 * be digested by SimplerParser in IEMAllInstPython.py prior processing
[98916]7 * any of the other IEMAllInstruction*.cpp.h files. For instance
[100733]8 * IEMAllInstCommon.cpp.h wouldn't do as it defines several invalid
[98916]9 * instructions and such that could confuse the parser result.
10 */
11
12/*
13 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
14 *
15 * This file is part of VirtualBox base platform packages, as
16 * available from https://www.virtualbox.org.
17 *
18 * This program is free software; you can redistribute it and/or
19 * modify it under the terms of the GNU General Public License
20 * as published by the Free Software Foundation, in version 3 of the
21 * License.
22 *
23 * This program is distributed in the hope that it will be useful, but
24 * WITHOUT ANY WARRANTY; without even the implied warranty of
25 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
26 * General Public License for more details.
27 *
28 * You should have received a copy of the GNU General Public License
29 * along with this program; if not, see <https://www.gnu.org/licenses>.
30 *
31 * SPDX-License-Identifier: GPL-3.0-only
32 */
33
34
35/**
36 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with a register
37 * as the destination.
38 *
39 * @note Used both in OneByte and TwoByte0f.
40 */
[103718]41#define IEMOP_BODY_BINARY_rv_rm(a_bRm, a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_f16BitMcFlag, a_EmitterBasename, a_fNativeArchs) \
[98916]42 /* \
43 * If rm is denoting a register, no more instruction bytes. \
44 */ \
[103548]45 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
[98916]46 { \
47 switch (pVCpu->iem.s.enmEffOpSize) \
48 { \
49 case IEMMODE_16BIT: \
[104018]50 IEM_MC_BEGIN(a_f16BitMcFlag, 0); \
[100714]51 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
[104195]52 IEM_MC_ARG(uint16_t, u16Src, 2); \
[103548]53 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
[103640]54 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
55 IEM_MC_LOCAL(uint16_t, u16Dst); \
56 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]57 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[103640]58 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
59 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst); \
[103828]60 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103640]61 } IEM_MC_NATIVE_ELSE() { \
[104195]62 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
[103640]63 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104195]64 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
65 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
66 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
[103640]67 } IEM_MC_NATIVE_ENDIF(); \
[98916]68 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
69 IEM_MC_END(); \
70 break; \
71 \
72 case IEMMODE_32BIT: \
[104018]73 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
[100714]74 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
[104195]75 IEM_MC_ARG(uint32_t, u32Src, 2); \
[103548]76 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
[103640]77 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
78 IEM_MC_LOCAL(uint32_t, u32Dst); \
79 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]80 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[103640]81 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
82 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Dst); \
[103828]83 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103640]84 } IEM_MC_NATIVE_ELSE() { \
[104195]85 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
[103640]86 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104195]87 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
88 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
[103718]89 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[104195]90 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
[103640]91 } IEM_MC_NATIVE_ENDIF(); \
[98916]92 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
93 IEM_MC_END(); \
94 break; \
95 \
96 case IEMMODE_64BIT: \
[104018]97 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
[100714]98 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
[104195]99 IEM_MC_ARG(uint64_t, u64Src, 2); \
[103548]100 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
[103640]101 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
102 IEM_MC_LOCAL(uint64_t, u64Dst); \
103 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]104 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[103640]105 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
106 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst); \
[103828]107 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103640]108 } IEM_MC_NATIVE_ELSE() { \
[104195]109 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
[103640]110 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104195]111 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
112 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
113 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
[103640]114 } IEM_MC_NATIVE_ENDIF(); \
[98916]115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
116 IEM_MC_END(); \
117 break; \
118 \
119 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
120 } \
121 } \
122 else \
123 { \
124 /* \
125 * We're accessing memory. \
126 */ \
127 switch (pVCpu->iem.s.enmEffOpSize) \
128 { \
129 case IEMMODE_16BIT: \
[104018]130 IEM_MC_BEGIN(a_f16BitMcFlag, 0); \
[98916]131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
[103548]132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
[98916]133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
[104195]134 IEM_MC_ARG(uint16_t, u16Src, 2); \
[98916]135 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
[103642]136 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
137 IEM_MC_LOCAL(uint16_t, u16Dst); \
138 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]139 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[103642]140 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
141 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst); \
[103828]142 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103642]143 } IEM_MC_NATIVE_ELSE() { \
[104195]144 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
[103642]145 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[104195]146 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
147 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
148 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
[103642]149 } IEM_MC_NATIVE_ENDIF(); \
[98916]150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
151 IEM_MC_END(); \
152 break; \
153 \
154 case IEMMODE_32BIT: \
[104018]155 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
[98916]156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
[103548]157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
[98916]158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
[104195]159 IEM_MC_ARG(uint32_t, u32Src, 2); \
[98916]160 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
[103642]161 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
162 IEM_MC_LOCAL(uint32_t, u32Dst); \
163 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]164 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[103642]165 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
166 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Dst); \
[103828]167 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103642]168 } IEM_MC_NATIVE_ELSE() { \
[104195]169 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
[103642]170 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[104195]171 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
172 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
[103718]173 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[104195]174 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
[103642]175 } IEM_MC_NATIVE_ENDIF(); \
[98916]176 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
177 IEM_MC_END(); \
178 break; \
179 \
180 case IEMMODE_64BIT: \
[104018]181 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
[98916]182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
[103548]183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
[98916]184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
[104195]185 IEM_MC_ARG(uint64_t, u64Src, 2); \
[98916]186 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
[103642]187 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
188 IEM_MC_LOCAL(uint64_t, u64Dst); \
189 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]190 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[103642]191 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
192 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst); \
[103828]193 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103642]194 } IEM_MC_NATIVE_ELSE() { \
[104195]195 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
[103642]196 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[104195]197 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
198 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
199 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
[103642]200 } IEM_MC_NATIVE_ENDIF(); \
[98916]201 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
202 IEM_MC_END(); \
203 break; \
204 \
205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
206 } \
207 } \
208 (void)0
209
[104195]210
[103718]211/**
[104195]212 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with a register
213 * as the destination.
[103718]214 *
215 * @note Used both in OneByte and TwoByte0f.
216 */
[104195]217#define IEMOP_BODY_BINARY_TODO_rv_rm(a_bRm, a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_f16BitMcFlag, a_EmitterBasename, a_fNativeArchs) \
[103718]218 /* \
219 * If rm is denoting a register, no more instruction bytes. \
220 */ \
221 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
222 { \
223 switch (pVCpu->iem.s.enmEffOpSize) \
224 { \
225 case IEMMODE_16BIT: \
[104195]226 IEM_MC_BEGIN(a_f16BitMcFlag, 0); \
[103718]227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
228 IEM_MC_ARG(uint16_t, u16Src, 1); \
229 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
230 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
231 IEM_MC_LOCAL(uint16_t, u16Dst); \
232 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]233 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[104195]234 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
235 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst); \
[103828]236 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103718]237 } IEM_MC_NATIVE_ELSE() { \
238 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
239 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
241 IEM_MC_REF_EFLAGS(pEFlags); \
[104195]242 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
[103718]243 } IEM_MC_NATIVE_ENDIF(); \
244 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
245 IEM_MC_END(); \
246 break; \
247 \
248 case IEMMODE_32BIT: \
[104018]249 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
[103718]250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
251 IEM_MC_ARG(uint32_t, u32Src, 1); \
252 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
253 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
254 IEM_MC_LOCAL(uint32_t, u32Dst); \
255 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]256 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[104195]257 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
258 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Dst); \
[103828]259 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103718]260 } IEM_MC_NATIVE_ELSE() { \
261 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
262 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
263 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
264 IEM_MC_REF_EFLAGS(pEFlags); \
[104195]265 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
266 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[103718]267 } IEM_MC_NATIVE_ENDIF(); \
268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
269 IEM_MC_END(); \
270 break; \
271 \
272 case IEMMODE_64BIT: \
[104018]273 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
[103718]274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
275 IEM_MC_ARG(uint64_t, u64Src, 1); \
276 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
277 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
278 IEM_MC_LOCAL(uint64_t, u64Dst); \
279 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]280 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[104195]281 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
282 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst); \
[103828]283 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103718]284 } IEM_MC_NATIVE_ELSE() { \
285 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
286 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
[104195]289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
[103718]290 } IEM_MC_NATIVE_ENDIF(); \
291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
292 IEM_MC_END(); \
293 break; \
294 \
295 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
296 } \
297 } \
298 else \
299 { \
300 /* \
301 * We're accessing memory. \
302 */ \
303 switch (pVCpu->iem.s.enmEffOpSize) \
304 { \
305 case IEMMODE_16BIT: \
[104195]306 IEM_MC_BEGIN(a_f16BitMcFlag, 0); \
[103718]307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
310 IEM_MC_ARG(uint16_t, u16Src, 1); \
311 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
312 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
313 IEM_MC_LOCAL(uint16_t, u16Dst); \
314 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]315 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[104195]316 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
317 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst); \
[103828]318 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103718]319 } IEM_MC_NATIVE_ELSE() { \
320 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
321 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
322 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
323 IEM_MC_REF_EFLAGS(pEFlags); \
[104195]324 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
[103718]325 } IEM_MC_NATIVE_ENDIF(); \
326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
327 IEM_MC_END(); \
328 break; \
329 \
330 case IEMMODE_32BIT: \
[104018]331 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
[103718]332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
335 IEM_MC_ARG(uint32_t, u32Src, 1); \
336 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
338 IEM_MC_LOCAL(uint32_t, u32Dst); \
339 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]340 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[104195]341 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
342 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Dst); \
[103828]343 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103718]344 } IEM_MC_NATIVE_ELSE() { \
345 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
346 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
347 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
348 IEM_MC_REF_EFLAGS(pEFlags); \
[104195]349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
350 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
[103718]351 } IEM_MC_NATIVE_ENDIF(); \
352 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
353 IEM_MC_END(); \
354 break; \
355 \
356 case IEMMODE_64BIT: \
[104018]357 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
[103718]358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
361 IEM_MC_ARG(uint64_t, u64Src, 1); \
362 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
363 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
364 IEM_MC_LOCAL(uint64_t, u64Dst); \
365 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
[104016]366 IEM_MC_LOCAL_EFLAGS(uEFlags); \
[104195]367 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
368 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst); \
[103828]369 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
[103718]370 } IEM_MC_NATIVE_ELSE() { \
371 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
372 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
373 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
374 IEM_MC_REF_EFLAGS(pEFlags); \
[104195]375 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
[103718]376 } IEM_MC_NATIVE_ENDIF(); \
377 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
378 IEM_MC_END(); \
379 break; \
380 \
381 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
382 } \
383 } \
384 (void)0
385
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use