VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 104206

Last change on this file since 104206 was 104206, checked in by vboxsync, 13 months ago

VMM/IEM: Refactoring assembly helpers to not pass eflags by reference but instead by value and return the updated value (via eax/w0) - first chunk: IMUL(two ops), BSF, BSR, LZCNT, TZCNT, POPCNT. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 104206 2024-04-05 20:28:19Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Special case body for bytes instruction like SUB and XOR that can be used
61 * to zero a register.
62 *
63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the
64 * same register.
65 */
66#define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \
67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
69 { \
70 IEM_MC_BEGIN(0, 0); \
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \
73 IEM_MC_LOCAL_EFLAGS(fEFlags); \
74 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
75 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
76 IEM_MC_COMMIT_EFLAGS(fEFlags); \
77 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
78 IEM_MC_END(); \
79 } ((void)0)
80
81/**
82 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
83 * memory/register as the destination.
84 */
85#define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
86 /* \
87 * If rm is denoting a register, no more instruction bytes. \
88 */ \
89 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
90 { \
91 IEM_MC_BEGIN(0, 0); \
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
93 IEM_MC_ARG(uint8_t, u8Src, 2); \
94 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
95 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
96 IEM_MC_LOCAL(uint8_t, u8Dst); \
97 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
98 IEM_MC_LOCAL_EFLAGS(uEFlags); \
99 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
100 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \
101 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
102 } IEM_MC_NATIVE_ELSE() { \
103 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
104 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
105 IEM_MC_ARG_EFLAGS( fEFlags, 0); \
106 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlags, pu8Dst, u8Src); \
107 IEM_MC_COMMIT_EFLAGS_OPT(fEFlagsRet); \
108 } IEM_MC_NATIVE_ENDIF(); \
109 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
110 IEM_MC_END(); \
111 } \
112 else \
113 { \
114 /* \
115 * We're accessing memory. \
116 */ \
117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
118 { \
119 IEM_MC_BEGIN(0, 0); \
120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
122 IEMOP_HLP_DONE_DECODING(); \
123 \
124 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
126 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
127 IEM_MC_ARG(uint8_t, u8Src, 2); \
128 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
129 IEM_MC_ARG_EFLAGS( fEFlags, 0); \
130 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlags, pu8Dst, u8Src); \
131 \
132 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
133 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
135 IEM_MC_END(); \
136 } \
137 else \
138 { \
139 IEM_MC_BEGIN(0, 0); \
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
142 IEMOP_HLP_DONE_DECODING(); \
143 \
144 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
145 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
146 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
147 IEM_MC_ARG(uint8_t, u8Src, 2); \
148 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
149 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
150 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), fEFlagsIn, pu8Dst, u8Src); \
151 \
152 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
153 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
155 IEM_MC_END(); \
156 } \
157 } \
158 (void)0
159
160/**
161 * Body for instructions like TEST & CMP with a byte memory/registers as
162 * operands.
163 */
164#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
165 /* \
166 * If rm is denoting a register, no more instruction bytes. \
167 */ \
168 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
169 { \
170 IEM_MC_BEGIN(0, 0); \
171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
172 IEM_MC_ARG(uint8_t, u8Src, 2); \
173 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
174 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
175 IEM_MC_LOCAL(uint8_t, u8Dst); \
176 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
177 IEM_MC_LOCAL_EFLAGS(uEFlags); \
178 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
179 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
180 } IEM_MC_NATIVE_ELSE() { \
181 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
182 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
183 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
184 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU8, fEFlagsIn, pu8Dst, u8Src); \
185 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
186 } IEM_MC_NATIVE_ENDIF(); \
187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
188 IEM_MC_END(); \
189 } \
190 else \
191 { \
192 /* \
193 * We're accessing memory. \
194 */ \
195 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
196 { \
197 IEM_MC_BEGIN(0, 0); \
198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
200 IEMOP_HLP_DONE_DECODING(); \
201 IEM_MC_NATIVE_IF(0) { \
202 IEM_MC_LOCAL(uint8_t, u8Dst); \
203 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
204 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
205 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
206 IEM_MC_LOCAL_EFLAGS(uEFlags); \
207 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
208 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
209 } IEM_MC_NATIVE_ELSE() { \
210 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
211 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
212 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
213 IEM_MC_ARG(uint8_t, u8Src, 2); \
214 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
215 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
216 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU8, fEFlagsIn, pu8Dst, u8Src); \
217 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
218 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
219 } IEM_MC_NATIVE_ENDIF(); \
220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
221 IEM_MC_END(); \
222 } \
223 else \
224 { \
225 /** @todo we should probably decode the address first. */ \
226 IEMOP_HLP_DONE_DECODING(); \
227 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
228 } \
229 } \
230 (void)0
231
232/**
233 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
234 * destination.
235 */
236#define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \
237 /* \
238 * If rm is denoting a register, no more instruction bytes. \
239 */ \
240 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
241 { \
242 IEM_MC_BEGIN(0, 0); \
243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
244 IEM_MC_ARG(uint8_t, u8Src, 2); \
245 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
246 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
247 IEM_MC_LOCAL(uint8_t, u8Dst); \
248 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
249 IEM_MC_LOCAL_EFLAGS(uEFlags); \
250 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
251 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
252 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
253 } IEM_MC_NATIVE_ELSE() { \
254 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
255 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
256 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
257 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
258 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
259 } IEM_MC_NATIVE_ENDIF(); \
260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
261 IEM_MC_END(); \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 */ \
268 IEM_MC_BEGIN(0, 0); \
269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
272 IEM_MC_ARG(uint8_t, u8Src, 2); \
273 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
274 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
275 IEM_MC_LOCAL(uint8_t, u8Dst); \
276 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
277 IEM_MC_LOCAL_EFLAGS(uEFlags); \
278 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
279 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
280 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
281 } IEM_MC_NATIVE_ELSE() { \
282 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
283 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
284 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
285 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
286 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
287 } IEM_MC_NATIVE_ENDIF(); \
288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
289 IEM_MC_END(); \
290 } \
291 (void)0
292
293/**
294 * Body for byte instruction CMP with a register as the destination.
295 */
296#define IEMOP_BODY_BINARY_r8_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
297 /* \
298 * If rm is denoting a register, no more instruction bytes. \
299 */ \
300 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
301 { \
302 IEM_MC_BEGIN(0, 0); \
303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
304 IEM_MC_ARG(uint8_t, u8Src, 2); \
305 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
306 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
307 IEM_MC_LOCAL(uint8_t, u8Dst); \
308 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
309 IEM_MC_LOCAL_EFLAGS(uEFlags); \
310 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
311 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
312 } IEM_MC_NATIVE_ELSE() { \
313 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
314 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
315 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
316 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
317 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
318 } IEM_MC_NATIVE_ENDIF(); \
319 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
320 IEM_MC_END(); \
321 } \
322 else \
323 { \
324 /* \
325 * We're accessing memory. \
326 */ \
327 IEM_MC_BEGIN(0, 0); \
328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
331 IEM_MC_ARG(uint8_t, u8Src, 2); \
332 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
333 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
334 IEM_MC_LOCAL(uint8_t, u8Dst); \
335 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
336 IEM_MC_LOCAL_EFLAGS(uEFlags); \
337 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
338 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
339 } IEM_MC_NATIVE_ELSE() { \
340 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
341 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
342 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
343 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
344 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
345 } IEM_MC_NATIVE_ENDIF(); \
346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
347 IEM_MC_END(); \
348 } \
349 (void)0
350
351
352/**
353 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
354 * memory/register as the destination.
355 */
356#define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
357 /* \
358 * If rm is denoting a register, no more instruction bytes. \
359 */ \
360 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
361 { \
362 switch (pVCpu->iem.s.enmEffOpSize) \
363 { \
364 case IEMMODE_16BIT: \
365 IEM_MC_BEGIN(0, 0); \
366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
367 IEM_MC_ARG(uint16_t, u16Src, 2); \
368 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
369 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
370 IEM_MC_LOCAL(uint16_t, u16Dst); \
371 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
372 IEM_MC_LOCAL_EFLAGS(uEFlags); \
373 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
374 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \
375 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
376 } IEM_MC_NATIVE_ELSE() { \
377 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
378 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
379 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
380 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
381 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
382 } IEM_MC_NATIVE_ENDIF(); \
383 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
384 IEM_MC_END(); \
385 break; \
386 \
387 case IEMMODE_32BIT: \
388 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
390 IEM_MC_ARG(uint32_t, u32Src, 2); \
391 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
392 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
393 IEM_MC_LOCAL(uint32_t, u32Dst); \
394 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
395 IEM_MC_LOCAL_EFLAGS(uEFlags); \
396 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
397 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \
398 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
399 } IEM_MC_NATIVE_ELSE() { \
400 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
401 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
402 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
403 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
404 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
405 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
406 } IEM_MC_NATIVE_ENDIF(); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 case IEMMODE_64BIT: \
412 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
414 IEM_MC_ARG(uint64_t, u64Src, 2); \
415 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
416 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
417 IEM_MC_LOCAL(uint64_t, u64Dst); \
418 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
419 IEM_MC_LOCAL_EFLAGS(uEFlags); \
420 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
421 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \
422 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
423 } IEM_MC_NATIVE_ELSE() { \
424 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
426 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
427 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
428 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
429 } IEM_MC_NATIVE_ENDIF(); \
430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
431 IEM_MC_END(); \
432 break; \
433 \
434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
435 } \
436 } \
437 else \
438 { \
439 /* \
440 * We're accessing memory. \
441 */ \
442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
443 { \
444 switch (pVCpu->iem.s.enmEffOpSize) \
445 { \
446 case IEMMODE_16BIT: \
447 IEM_MC_BEGIN(0, 0); \
448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 \
452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
453 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
454 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
455 IEM_MC_ARG(uint16_t, u16Src, 2); \
456 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
457 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
458 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
459 \
460 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
461 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
463 IEM_MC_END(); \
464 break; \
465 \
466 case IEMMODE_32BIT: \
467 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
470 IEMOP_HLP_DONE_DECODING(); \
471 \
472 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
473 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
474 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
475 IEM_MC_ARG(uint32_t, u32Src, 2); \
476 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
477 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
478 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
479 \
480 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
481 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
482 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
483 IEM_MC_END(); \
484 break; \
485 \
486 case IEMMODE_64BIT: \
487 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
490 IEMOP_HLP_DONE_DECODING(); \
491 \
492 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
493 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
494 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
495 IEM_MC_ARG(uint64_t, u64Src, 2); \
496 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
497 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
498 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
499 \
500 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
501 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
502 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
503 IEM_MC_END(); \
504 break; \
505 \
506 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
507 } \
508 } \
509 else \
510 { \
511 (void)0
512/* Separate macro to work around parsing issue in IEMAllInstPython.py */
513#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \
514 switch (pVCpu->iem.s.enmEffOpSize) \
515 { \
516 case IEMMODE_16BIT: \
517 IEM_MC_BEGIN(0, 0); \
518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
520 IEMOP_HLP_DONE_DECODING(); \
521 \
522 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
523 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
524 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
525 IEM_MC_ARG(uint16_t, u16Src, 2); \
526 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
527 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
528 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
529 \
530 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
531 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
533 IEM_MC_END(); \
534 break; \
535 \
536 case IEMMODE_32BIT: \
537 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
540 IEMOP_HLP_DONE_DECODING(); \
541 \
542 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
543 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
544 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
545 IEM_MC_ARG(uint32_t, u32Src, 2); \
546 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
547 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
548 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
549 \
550 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
551 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
552 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
553 IEM_MC_END(); \
554 break; \
555 \
556 case IEMMODE_64BIT: \
557 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
560 IEMOP_HLP_DONE_DECODING(); \
561 \
562 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
563 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
564 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_ARG(uint64_t, u64Src, 2); \
566 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
567 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
568 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
569 \
570 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
571 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
572 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
573 IEM_MC_END(); \
574 break; \
575 \
576 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
577 } \
578 } \
579 } \
580 (void)0
581
582/**
583 * Body for read-only word/dword/qword instructions like TEST and CMP with
584 * memory/register as the destination.
585 */
586#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_InsNm, a_fNativeArchs) \
587 /* \
588 * If rm is denoting a register, no more instruction bytes. \
589 */ \
590 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
591 { \
592 switch (pVCpu->iem.s.enmEffOpSize) \
593 { \
594 case IEMMODE_16BIT: \
595 IEM_MC_BEGIN(0, 0); \
596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
597 IEM_MC_ARG(uint16_t, u16Src, 2); \
598 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
599 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
600 IEM_MC_LOCAL(uint16_t, u16Dst); \
601 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
602 IEM_MC_LOCAL_EFLAGS(uEFlags); \
603 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
604 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
605 } IEM_MC_NATIVE_ELSE() { \
606 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
607 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
608 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
609 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
610 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
611 } IEM_MC_NATIVE_ENDIF(); \
612 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
613 IEM_MC_END(); \
614 break; \
615 \
616 case IEMMODE_32BIT: \
617 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
619 IEM_MC_ARG(uint32_t, u32Src, 2); \
620 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
621 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
622 IEM_MC_LOCAL(uint32_t, u32Dst); \
623 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
624 IEM_MC_LOCAL_EFLAGS(uEFlags); \
625 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
626 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
627 } IEM_MC_NATIVE_ELSE() { \
628 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
629 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
630 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
631 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
632 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
633 } IEM_MC_NATIVE_ENDIF(); \
634 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
635 IEM_MC_END(); \
636 break; \
637 \
638 case IEMMODE_64BIT: \
639 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
641 IEM_MC_ARG(uint64_t, u64Src, 2); \
642 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
643 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
644 IEM_MC_LOCAL(uint64_t, u64Dst); \
645 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
646 IEM_MC_LOCAL_EFLAGS(uEFlags); \
647 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
648 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
649 } IEM_MC_NATIVE_ELSE() { \
650 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
651 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
652 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
653 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
654 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
655 } IEM_MC_NATIVE_ENDIF(); \
656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
657 IEM_MC_END(); \
658 break; \
659 \
660 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
661 } \
662 } \
663 else \
664 { \
665 /* \
666 * We're accessing memory. \
667 */ \
668 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
669 { \
670 switch (pVCpu->iem.s.enmEffOpSize) \
671 { \
672 case IEMMODE_16BIT: \
673 IEM_MC_BEGIN(0, 0); \
674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
676 IEMOP_HLP_DONE_DECODING(); \
677 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
678 IEM_MC_LOCAL(uint16_t, u16Dst); \
679 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
680 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
681 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
682 IEM_MC_LOCAL_EFLAGS(uEFlags); \
683 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
684 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
685 } IEM_MC_NATIVE_ELSE() { \
686 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
687 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
688 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
689 IEM_MC_ARG(uint16_t, u16Src, 2); \
690 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
691 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
692 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
693 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
694 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
695 } IEM_MC_NATIVE_ENDIF(); \
696 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
697 IEM_MC_END(); \
698 break; \
699 \
700 case IEMMODE_32BIT: \
701 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
704 IEMOP_HLP_DONE_DECODING(); \
705 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
706 IEM_MC_LOCAL(uint32_t, u32Dst); \
707 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
708 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
709 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
710 IEM_MC_LOCAL_EFLAGS(uEFlags); \
711 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
712 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
713 } IEM_MC_NATIVE_ELSE() { \
714 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
715 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
716 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
717 IEM_MC_ARG(uint32_t, u32Src, 2); \
718 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
719 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
720 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
721 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
722 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
723 } IEM_MC_NATIVE_ENDIF(); \
724 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
725 IEM_MC_END(); \
726 break; \
727 \
728 case IEMMODE_64BIT: \
729 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
732 IEMOP_HLP_DONE_DECODING(); \
733 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
734 IEM_MC_LOCAL(uint64_t, u64Dst); \
735 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
736 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
737 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
738 IEM_MC_LOCAL_EFLAGS(uEFlags); \
739 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
740 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
741 } IEM_MC_NATIVE_ELSE() { \
742 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
743 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
744 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
745 IEM_MC_ARG(uint64_t, u64Src, 2); \
746 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
747 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
748 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
749 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
750 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
751 } IEM_MC_NATIVE_ENDIF(); \
752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
753 IEM_MC_END(); \
754 break; \
755 \
756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
757 } \
758 } \
759 else \
760 { \
761 IEMOP_HLP_DONE_DECODING(); \
762 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
763 } \
764 } \
765 (void)0
766
767
768/**
769 * Body for instructions like ADD, AND, OR, ++ with working on AL with
770 * a byte immediate.
771 */
772#define IEMOP_BODY_BINARY_AL_Ib(a_InsNm, a_fNativeArchs) \
773 IEM_MC_BEGIN(0, 0); \
774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
776 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
777 IEM_MC_LOCAL(uint8_t, u8Dst); \
778 IEM_MC_FETCH_GREG_U8(u8Dst, X86_GREG_xAX); \
779 IEM_MC_LOCAL(uint32_t, uEFlags); \
780 IEM_MC_FETCH_EFLAGS(uEFlags); \
781 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
782 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Dst); \
783 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
784 } IEM_MC_NATIVE_ELSE() { \
785 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
786 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
788 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 2); \
789 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
790 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
791 } IEM_MC_NATIVE_ENDIF(); \
792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
793 IEM_MC_END()
794
795/**
796 * Body for instructions like ADD, AND, OR, ++ with working on
797 * AX/EAX/RAX with a word/dword immediate.
798 */
799#define IEMOP_BODY_BINARY_rAX_Iz_RW(a_InsNm, a_fNativeArchs) \
800 switch (pVCpu->iem.s.enmEffOpSize) \
801 { \
802 case IEMMODE_16BIT: \
803 { \
804 IEM_MC_BEGIN(0, 0); \
805 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
807 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
808 IEM_MC_LOCAL(uint16_t, u16Dst); \
809 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
810 IEM_MC_LOCAL(uint32_t, uEFlags); \
811 IEM_MC_FETCH_EFLAGS(uEFlags); \
812 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
813 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Dst); \
814 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
815 } IEM_MC_NATIVE_ELSE() { \
816 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2); \
817 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
818 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
819 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
820 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
821 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
822 } IEM_MC_NATIVE_ENDIF(); \
823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
824 IEM_MC_END(); \
825 } \
826 \
827 case IEMMODE_32BIT: \
828 { \
829 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
832 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
833 IEM_MC_LOCAL(uint32_t, u32Dst); \
834 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
835 IEM_MC_LOCAL(uint32_t, uEFlags); \
836 IEM_MC_FETCH_EFLAGS(uEFlags); \
837 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
838 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Dst); \
839 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
840 } IEM_MC_NATIVE_ELSE() { \
841 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2); \
842 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
843 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
844 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
845 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
846 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
847 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
848 } IEM_MC_NATIVE_ENDIF(); \
849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
850 IEM_MC_END(); \
851 } \
852 \
853 case IEMMODE_64BIT: \
854 { \
855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
856 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
858 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
859 IEM_MC_LOCAL(uint64_t, u64Dst); \
860 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
861 IEM_MC_LOCAL(uint32_t, uEFlags); \
862 IEM_MC_FETCH_EFLAGS(uEFlags); \
863 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
864 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Dst); \
865 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
866 } IEM_MC_NATIVE_ELSE() { \
867 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2); \
868 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
869 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
870 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
871 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
872 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
873 } IEM_MC_NATIVE_ENDIF(); \
874 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
875 IEM_MC_END(); \
876 } \
877 \
878 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
879 } \
880 (void)0
881
882/**
883 * Body for the instructions CMP and TEST working on AX/EAX/RAX with a
884 * word/dword immediate.
885 */
886#define IEMOP_BODY_BINARY_rAX_Iz_RO(a_InsNm, a_fNativeArchs) \
887 switch (pVCpu->iem.s.enmEffOpSize) \
888 { \
889 case IEMMODE_16BIT: \
890 { \
891 IEM_MC_BEGIN(0, 0); \
892 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
894 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
895 IEM_MC_LOCAL(uint16_t, u16Dst); \
896 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
897 IEM_MC_LOCAL(uint32_t, uEFlags); \
898 IEM_MC_FETCH_EFLAGS(uEFlags); \
899 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
900 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
901 } IEM_MC_NATIVE_ELSE() { \
902 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2); \
903 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
904 IEM_MC_REF_GREG_U16_CONST(pu16Dst, X86_GREG_xAX); \
905 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
906 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
907 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
908 } IEM_MC_NATIVE_ENDIF(); \
909 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
910 IEM_MC_END(); \
911 } \
912 \
913 case IEMMODE_32BIT: \
914 { \
915 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
916 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
918 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
919 IEM_MC_LOCAL(uint32_t, u32Dst); \
920 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
921 IEM_MC_LOCAL(uint32_t, uEFlags); \
922 IEM_MC_FETCH_EFLAGS(uEFlags); \
923 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
924 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
925 } IEM_MC_NATIVE_ELSE() { \
926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2); \
927 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
928 IEM_MC_REF_GREG_U32_CONST(pu32Dst, X86_GREG_xAX); \
929 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
930 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
931 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
932 } IEM_MC_NATIVE_ENDIF(); \
933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
934 IEM_MC_END(); \
935 } \
936 \
937 case IEMMODE_64BIT: \
938 { \
939 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
940 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
942 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
943 IEM_MC_LOCAL(uint64_t, u64Dst); \
944 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
945 IEM_MC_LOCAL(uint32_t, uEFlags); \
946 IEM_MC_FETCH_EFLAGS(uEFlags); \
947 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
948 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
949 } IEM_MC_NATIVE_ELSE() { \
950 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2); \
951 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
952 IEM_MC_REF_GREG_U64_CONST(pu64Dst, X86_GREG_xAX); \
953 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
954 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
955 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
956 } IEM_MC_NATIVE_ENDIF(); \
957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
958 IEM_MC_END(); \
959 } \
960 \
961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
962 } \
963 (void)0
964
965
966
967/* Instruction specification format - work in progress: */
968
969/**
970 * @opcode 0x00
971 * @opmnemonic add
972 * @op1 rm:Eb
973 * @op2 reg:Gb
974 * @opmaps one
975 * @openc ModR/M
976 * @opflclass arithmetic
977 * @ophints harmless ignores_op_sizes
978 * @opstats add_Eb_Gb
979 * @opgroup og_gen_arith_bin
980 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
981 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
982 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
983 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
984 */
985FNIEMOP_DEF(iemOp_add_Eb_Gb)
986{
987 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
989 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
990}
991
992
993/**
994 * @opcode 0x01
995 * @opgroup og_gen_arith_bin
996 * @opflclass arithmetic
997 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
998 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
999 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1000 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1001 */
1002FNIEMOP_DEF(iemOp_add_Ev_Gv)
1003{
1004 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1006 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1007 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add);
1008}
1009
1010
1011/**
1012 * @opcode 0x02
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic
1015 * @opcopytests iemOp_add_Eb_Gb
1016 */
1017FNIEMOP_DEF(iemOp_add_Gb_Eb)
1018{
1019 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1021 IEMOP_BODY_BINARY_r8_rm(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1022}
1023
1024
1025/**
1026 * @opcode 0x03
1027 * @opgroup og_gen_arith_bin
1028 * @opflclass arithmetic
1029 * @opcopytests iemOp_add_Ev_Gv
1030 */
1031FNIEMOP_DEF(iemOp_add_Gv_Ev)
1032{
1033 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1035 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 0, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1036}
1037
1038
1039/**
1040 * @opcode 0x04
1041 * @opgroup og_gen_arith_bin
1042 * @opflclass arithmetic
1043 * @opcopytests iemOp_add_Eb_Gb
1044 */
1045FNIEMOP_DEF(iemOp_add_Al_Ib)
1046{
1047 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1048 IEMOP_BODY_BINARY_AL_Ib(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1049}
1050
1051
1052/**
1053 * @opcode 0x05
1054 * @opgroup og_gen_arith_bin
1055 * @opflclass arithmetic
1056 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
1057 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1058 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1059 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1060 */
1061FNIEMOP_DEF(iemOp_add_eAX_Iz)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1064 IEMOP_BODY_BINARY_rAX_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1065}
1066
1067
1068/**
1069 * @opcode 0x06
1070 * @opgroup og_stack_sreg
1071 */
1072FNIEMOP_DEF(iemOp_push_ES)
1073{
1074 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1075 IEMOP_HLP_NO_64BIT();
1076 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
1077}
1078
1079
1080/**
1081 * @opcode 0x07
1082 * @opgroup og_stack_sreg
1083 */
1084FNIEMOP_DEF(iemOp_pop_ES)
1085{
1086 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1089 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1090 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1091 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
1092 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
1093 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
1094 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
1095 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
1096}
1097
1098
1099/**
1100 * @opcode 0x08
1101 * @opgroup og_gen_arith_bin
1102 * @opflclass logical
1103 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1104 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1105 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1106 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1107 */
1108FNIEMOP_DEF(iemOp_or_Eb_Gb)
1109{
1110 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1114}
1115
1116
1117/*
1118 * @opcode 0x09
1119 * @opgroup og_gen_arith_bin
1120 * @opflclass logical
1121 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1122 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1123 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1124 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1125 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1126 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1127 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
1128 */
1129FNIEMOP_DEF(iemOp_or_Ev_Gv)
1130{
1131 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1134 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1135 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or);
1136}
1137
1138
1139/**
1140 * @opcode 0x0a
1141 * @opgroup og_gen_arith_bin
1142 * @opflclass logical
1143 * @opcopytests iemOp_or_Eb_Gb
1144 */
1145FNIEMOP_DEF(iemOp_or_Gb_Eb)
1146{
1147 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1148 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1150 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1151}
1152
1153
1154/**
1155 * @opcode 0x0b
1156 * @opgroup og_gen_arith_bin
1157 * @opflclass logical
1158 * @opcopytests iemOp_or_Ev_Gv
1159 */
1160FNIEMOP_DEF(iemOp_or_Gv_Ev)
1161{
1162 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1165 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1166}
1167
1168
1169/**
1170 * @opcode 0x0c
1171 * @opgroup og_gen_arith_bin
1172 * @opflclass logical
1173 * @opcopytests iemOp_or_Eb_Gb
1174 */
1175FNIEMOP_DEF(iemOp_or_Al_Ib)
1176{
1177 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1179 IEMOP_BODY_BINARY_AL_Ib(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1180}
1181
1182
1183/**
1184 * @opcode 0x0d
1185 * @opgroup og_gen_arith_bin
1186 * @opflclass logical
1187 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1188 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1189 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1190 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1191 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1192 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1193 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1194 */
1195FNIEMOP_DEF(iemOp_or_eAX_Iz)
1196{
1197 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1199 IEMOP_BODY_BINARY_rAX_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1200}
1201
1202
1203/**
1204 * @opcode 0x0e
1205 * @opgroup og_stack_sreg
1206 */
1207FNIEMOP_DEF(iemOp_push_CS)
1208{
1209 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1210 IEMOP_HLP_NO_64BIT();
1211 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1212}
1213
1214
1215/**
1216 * @opcode 0x0f
1217 * @opmnemonic EscTwo0f
1218 * @openc two0f
1219 * @opdisenum OP_2B_ESC
1220 * @ophints harmless
1221 * @opgroup og_escapes
1222 */
1223FNIEMOP_DEF(iemOp_2byteEscape)
1224{
1225#if 0 /// @todo def VBOX_STRICT
1226 /* Sanity check the table the first time around. */
1227 static bool s_fTested = false;
1228 if (RT_LIKELY(s_fTested)) { /* likely */ }
1229 else
1230 {
1231 s_fTested = true;
1232 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1233 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1234 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1235 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1236 }
1237#endif
1238
1239 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1240 {
1241 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1242 IEMOP_HLP_MIN_286();
1243 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1244 }
1245 /* @opdone */
1246
1247 /*
1248 * On the 8086 this is a POP CS instruction.
1249 * For the time being we don't specify this this.
1250 */
1251 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1252 IEMOP_HLP_NO_64BIT();
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 /** @todo eliminate END_TB here */
1255 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1256 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1257 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1258 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1259}
1260
1261/**
1262 * @opcode 0x10
1263 * @opgroup og_gen_arith_bin
1264 * @opflclass arithmetic_carry
1265 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1266 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1267 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1268 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1269 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1270 */
1271FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1272{
1273 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1275 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1276}
1277
1278
1279/**
1280 * @opcode 0x11
1281 * @opgroup og_gen_arith_bin
1282 * @opflclass arithmetic_carry
1283 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1284 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1285 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1286 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1287 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1288 */
1289FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1290{
1291 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1293 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1294 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc);
1295}
1296
1297
1298/**
1299 * @opcode 0x12
1300 * @opgroup og_gen_arith_bin
1301 * @opflclass arithmetic_carry
1302 * @opcopytests iemOp_adc_Eb_Gb
1303 */
1304FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1305{
1306 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1308 IEMOP_BODY_BINARY_r8_rm(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1309}
1310
1311
1312/**
1313 * @opcode 0x13
1314 * @opgroup og_gen_arith_bin
1315 * @opflclass arithmetic_carry
1316 * @opcopytests iemOp_adc_Ev_Gv
1317 */
1318FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1319{
1320 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1322 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 0, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1323}
1324
1325
1326/**
1327 * @opcode 0x14
1328 * @opgroup og_gen_arith_bin
1329 * @opflclass arithmetic_carry
1330 * @opcopytests iemOp_adc_Eb_Gb
1331 */
1332FNIEMOP_DEF(iemOp_adc_Al_Ib)
1333{
1334 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1335 IEMOP_BODY_BINARY_AL_Ib(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1336}
1337
1338
1339/**
1340 * @opcode 0x15
1341 * @opgroup og_gen_arith_bin
1342 * @opflclass arithmetic_carry
1343 * @opcopytests iemOp_adc_Ev_Gv
1344 */
1345FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1346{
1347 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1348 IEMOP_BODY_BINARY_rAX_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1349}
1350
1351
1352/**
1353 * @opcode 0x16
1354 */
1355FNIEMOP_DEF(iemOp_push_SS)
1356{
1357 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1358 IEMOP_HLP_NO_64BIT();
1359 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1360}
1361
1362
1363/**
1364 * @opcode 0x17
1365 */
1366FNIEMOP_DEF(iemOp_pop_SS)
1367{
1368 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1370 IEMOP_HLP_NO_64BIT();
1371 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1372 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1373 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1374 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1375 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1376 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1377 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1378}
1379
1380
1381/**
1382 * @opcode 0x18
1383 * @opgroup og_gen_arith_bin
1384 * @opflclass arithmetic_carry
1385 */
1386FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1387{
1388 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1390 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1391}
1392
1393
1394/**
1395 * @opcode 0x19
1396 * @opgroup og_gen_arith_bin
1397 * @opflclass arithmetic_carry
1398 */
1399FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1400{
1401 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1403 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1404 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb);
1405}
1406
1407
1408/**
1409 * @opcode 0x1a
1410 * @opgroup og_gen_arith_bin
1411 * @opflclass arithmetic_carry
1412 */
1413FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1414{
1415 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1417 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1418}
1419
1420
1421/**
1422 * @opcode 0x1b
1423 * @opgroup og_gen_arith_bin
1424 * @opflclass arithmetic_carry
1425 */
1426FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1427{
1428 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1430 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 0, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1431}
1432
1433
1434/**
1435 * @opcode 0x1c
1436 * @opgroup og_gen_arith_bin
1437 * @opflclass arithmetic_carry
1438 */
1439FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1440{
1441 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1442 IEMOP_BODY_BINARY_AL_Ib(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1443}
1444
1445
1446/**
1447 * @opcode 0x1d
1448 * @opgroup og_gen_arith_bin
1449 * @opflclass arithmetic_carry
1450 */
1451FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1452{
1453 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1454 IEMOP_BODY_BINARY_rAX_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1455}
1456
1457
1458/**
1459 * @opcode 0x1e
1460 * @opgroup og_stack_sreg
1461 */
1462FNIEMOP_DEF(iemOp_push_DS)
1463{
1464 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1465 IEMOP_HLP_NO_64BIT();
1466 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1467}
1468
1469
1470/**
1471 * @opcode 0x1f
1472 * @opgroup og_stack_sreg
1473 */
1474FNIEMOP_DEF(iemOp_pop_DS)
1475{
1476 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEMOP_HLP_NO_64BIT();
1479 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1481 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1482 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1483 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1484 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1485 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1486}
1487
1488
1489/**
1490 * @opcode 0x20
1491 * @opgroup og_gen_arith_bin
1492 * @opflclass logical
1493 */
1494FNIEMOP_DEF(iemOp_and_Eb_Gb)
1495{
1496 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1499 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1500}
1501
1502
1503/**
1504 * @opcode 0x21
1505 * @opgroup og_gen_arith_bin
1506 * @opflclass logical
1507 */
1508FNIEMOP_DEF(iemOp_and_Ev_Gv)
1509{
1510 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1514 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and);
1515}
1516
1517
1518/**
1519 * @opcode 0x22
1520 * @opgroup og_gen_arith_bin
1521 * @opflclass logical
1522 */
1523FNIEMOP_DEF(iemOp_and_Gb_Eb)
1524{
1525 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1528 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1529}
1530
1531
1532/**
1533 * @opcode 0x23
1534 * @opgroup og_gen_arith_bin
1535 * @opflclass logical
1536 */
1537FNIEMOP_DEF(iemOp_and_Gv_Ev)
1538{
1539 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1542 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1543}
1544
1545
1546/**
1547 * @opcode 0x24
1548 * @opgroup og_gen_arith_bin
1549 * @opflclass logical
1550 */
1551FNIEMOP_DEF(iemOp_and_Al_Ib)
1552{
1553 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1555 IEMOP_BODY_BINARY_AL_Ib(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1556}
1557
1558
1559/**
1560 * @opcode 0x25
1561 * @opgroup og_gen_arith_bin
1562 * @opflclass logical
1563 */
1564FNIEMOP_DEF(iemOp_and_eAX_Iz)
1565{
1566 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1567 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1568 IEMOP_BODY_BINARY_rAX_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1569}
1570
1571
1572/**
1573 * @opcode 0x26
1574 * @opmnemonic SEG
1575 * @op1 ES
1576 * @opgroup og_prefix
1577 * @openc prefix
1578 * @opdisenum OP_SEG
1579 * @ophints harmless
1580 */
1581FNIEMOP_DEF(iemOp_seg_ES)
1582{
1583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1584 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1585 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1586
1587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1588 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1589}
1590
1591
1592/**
1593 * @opcode 0x27
1594 * @opfltest af,cf
1595 * @opflmodify cf,pf,af,zf,sf,of
1596 * @opflundef of
1597 */
1598FNIEMOP_DEF(iemOp_daa)
1599{
1600 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1601 IEMOP_HLP_NO_64BIT();
1602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1603 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1604 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1605}
1606
1607
1608/**
1609 * Special case body for word/dword/qword instruction like SUB and XOR that can
1610 * be used to zero a register.
1611 *
1612 * This can be used both for the rv_rm and rm_rv forms since it's working on the
1613 * same register.
1614 */
1615#define IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(a_bRm) \
1616 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((a_bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
1617 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
1618 { \
1619 switch (pVCpu->iem.s.enmEffOpSize) \
1620 { \
1621 case IEMMODE_16BIT: \
1622 IEM_MC_BEGIN(0, 0); \
1623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1624 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1625 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1626 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1627 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1628 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1629 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1630 IEM_MC_END(); \
1631 break; \
1632 \
1633 case IEMMODE_32BIT: \
1634 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1636 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1637 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1638 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1639 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1640 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1641 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1642 IEM_MC_END(); \
1643 break; \
1644 \
1645 case IEMMODE_64BIT: \
1646 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1648 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1649 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1650 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1651 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1652 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1654 IEM_MC_END(); \
1655 break; \
1656 \
1657 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1658 } \
1659 } ((void)0)
1660
1661
1662/**
1663 * @opcode 0x28
1664 * @opgroup og_gen_arith_bin
1665 * @opflclass arithmetic
1666 */
1667FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1668{
1669 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1671 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1672 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1673}
1674
1675
1676/**
1677 * @opcode 0x29
1678 * @opgroup og_gen_arith_bin
1679 * @opflclass arithmetic
1680 */
1681FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1682{
1683 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1685 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1686 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1687 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub);
1688}
1689
1690
1691/**
1692 * @opcode 0x2a
1693 * @opgroup og_gen_arith_bin
1694 * @opflclass arithmetic
1695 */
1696FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1697{
1698 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1700 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1701 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1702}
1703
1704
1705/**
1706 * @opcode 0x2b
1707 * @opgroup og_gen_arith_bin
1708 * @opflclass arithmetic
1709 */
1710FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1711{
1712 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1714 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1715 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1716}
1717
1718
1719/**
1720 * @opcode 0x2c
1721 * @opgroup og_gen_arith_bin
1722 * @opflclass arithmetic
1723 */
1724FNIEMOP_DEF(iemOp_sub_Al_Ib)
1725{
1726 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1727 IEMOP_BODY_BINARY_AL_Ib(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1728}
1729
1730
1731/**
1732 * @opcode 0x2d
1733 * @opgroup og_gen_arith_bin
1734 * @opflclass arithmetic
1735 */
1736FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1737{
1738 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1739 IEMOP_BODY_BINARY_rAX_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1740}
1741
1742
1743/**
1744 * @opcode 0x2e
1745 * @opmnemonic SEG
1746 * @op1 CS
1747 * @opgroup og_prefix
1748 * @openc prefix
1749 * @opdisenum OP_SEG
1750 * @ophints harmless
1751 */
1752FNIEMOP_DEF(iemOp_seg_CS)
1753{
1754 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1755 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1756 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1757
1758 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1759 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1760}
1761
1762
1763/**
1764 * @opcode 0x2f
1765 * @opfltest af,cf
1766 * @opflmodify cf,pf,af,zf,sf,of
1767 * @opflundef of
1768 */
1769FNIEMOP_DEF(iemOp_das)
1770{
1771 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1772 IEMOP_HLP_NO_64BIT();
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1775 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1776}
1777
1778
1779/**
1780 * @opcode 0x30
1781 * @opgroup og_gen_arith_bin
1782 * @opflclass logical
1783 */
1784FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1785{
1786 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1790 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1791}
1792
1793
1794/**
1795 * @opcode 0x31
1796 * @opgroup og_gen_arith_bin
1797 * @opflclass logical
1798 */
1799FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1800{
1801 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1804 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1805 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1806 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor);
1807}
1808
1809
1810/**
1811 * @opcode 0x32
1812 * @opgroup og_gen_arith_bin
1813 * @opflclass logical
1814 */
1815FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1816{
1817 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1818 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1820 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1821 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1822}
1823
1824
1825/**
1826 * @opcode 0x33
1827 * @opgroup og_gen_arith_bin
1828 * @opflclass logical
1829 */
1830FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1831{
1832 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1835 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1836 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1837}
1838
1839
1840/**
1841 * @opcode 0x34
1842 * @opgroup og_gen_arith_bin
1843 * @opflclass logical
1844 */
1845FNIEMOP_DEF(iemOp_xor_Al_Ib)
1846{
1847 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1849 IEMOP_BODY_BINARY_AL_Ib(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1850}
1851
1852
1853/**
1854 * @opcode 0x35
1855 * @opgroup og_gen_arith_bin
1856 * @opflclass logical
1857 */
1858FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1859{
1860 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1862 IEMOP_BODY_BINARY_rAX_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1863}
1864
1865
1866/**
1867 * @opcode 0x36
1868 * @opmnemonic SEG
1869 * @op1 SS
1870 * @opgroup og_prefix
1871 * @openc prefix
1872 * @opdisenum OP_SEG
1873 * @ophints harmless
1874 */
1875FNIEMOP_DEF(iemOp_seg_SS)
1876{
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1879 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883}
1884
1885
1886/**
1887 * @opcode 0x37
1888 * @opfltest af
1889 * @opflmodify cf,pf,af,zf,sf,of
1890 * @opflundef pf,zf,sf,of
1891 * @opgroup og_gen_arith_dec
1892 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1893 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1894 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1895 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1896 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1897 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1898 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1899 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1900 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1901 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1902 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1903 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1904 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1905 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1906 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1907 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1908 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1909 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1910 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1911 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1912 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1913 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1914 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1915 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1916 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1917 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1918 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1919 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1920 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1921 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1922 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1923 */
1924FNIEMOP_DEF(iemOp_aaa)
1925{
1926 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1927 IEMOP_HLP_NO_64BIT();
1928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1930
1931 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1932}
1933
1934
1935/**
1936 * Body for word/dword/qword the instruction CMP, ++ with a register as the
1937 * destination.
1938 *
1939 * @note Used both in OneByte and TwoByte0f.
1940 */
1941#define IEMOP_BODY_BINARY_rv_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
1942 /* \
1943 * If rm is denoting a register, no more instruction bytes. \
1944 */ \
1945 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
1946 { \
1947 switch (pVCpu->iem.s.enmEffOpSize) \
1948 { \
1949 case IEMMODE_16BIT: \
1950 IEM_MC_BEGIN(0, 0); \
1951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1952 IEM_MC_ARG(uint16_t, u16Src, 2); \
1953 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1954 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1955 IEM_MC_LOCAL(uint16_t, u16Dst); \
1956 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1957 IEM_MC_LOCAL_EFLAGS(uEFlags); \
1958 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
1959 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
1960 } IEM_MC_NATIVE_ELSE() { \
1961 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
1962 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1963 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1964 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
1965 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1966 } IEM_MC_NATIVE_ENDIF(); \
1967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1968 IEM_MC_END(); \
1969 break; \
1970 \
1971 case IEMMODE_32BIT: \
1972 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1974 IEM_MC_ARG(uint32_t, u32Src, 2); \
1975 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1976 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1977 IEM_MC_LOCAL(uint32_t, u32Dst); \
1978 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1979 IEM_MC_LOCAL_EFLAGS(uEFlags); \
1980 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
1981 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
1982 } IEM_MC_NATIVE_ELSE() { \
1983 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
1984 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1985 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1986 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
1987 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1988 } IEM_MC_NATIVE_ENDIF(); \
1989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1990 IEM_MC_END(); \
1991 break; \
1992 \
1993 case IEMMODE_64BIT: \
1994 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1996 IEM_MC_ARG(uint64_t, u64Src, 2); \
1997 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1998 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1999 IEM_MC_LOCAL(uint64_t, u64Dst); \
2000 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2001 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2002 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
2003 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2004 } IEM_MC_NATIVE_ELSE() { \
2005 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
2006 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2007 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2008 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
2009 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2010 } IEM_MC_NATIVE_ENDIF(); \
2011 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2012 IEM_MC_END(); \
2013 break; \
2014 \
2015 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2016 } \
2017 } \
2018 else \
2019 { \
2020 /* \
2021 * We're accessing memory. \
2022 */ \
2023 switch (pVCpu->iem.s.enmEffOpSize) \
2024 { \
2025 case IEMMODE_16BIT: \
2026 IEM_MC_BEGIN(0, 0); \
2027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2030 IEM_MC_ARG(uint16_t, u16Src, 2); \
2031 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2032 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2033 IEM_MC_LOCAL(uint16_t, u16Dst); \
2034 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2035 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2036 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
2037 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2038 } IEM_MC_NATIVE_ELSE() { \
2039 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
2040 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2041 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2042 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
2043 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2044 } IEM_MC_NATIVE_ENDIF(); \
2045 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2046 IEM_MC_END(); \
2047 break; \
2048 \
2049 case IEMMODE_32BIT: \
2050 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
2051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2054 IEM_MC_ARG(uint32_t, u32Src, 2); \
2055 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2056 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2057 IEM_MC_LOCAL(uint32_t, u32Dst); \
2058 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2059 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2060 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
2061 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2062 } IEM_MC_NATIVE_ELSE() { \
2063 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
2064 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2065 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2066 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
2067 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2068 } IEM_MC_NATIVE_ENDIF(); \
2069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2070 IEM_MC_END(); \
2071 break; \
2072 \
2073 case IEMMODE_64BIT: \
2074 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2078 IEM_MC_ARG(uint64_t, u64Src, 2); \
2079 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2080 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2081 IEM_MC_LOCAL(uint64_t, u64Dst); \
2082 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2083 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2084 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
2085 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2086 } IEM_MC_NATIVE_ELSE() { \
2087 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
2088 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2089 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2090 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
2091 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2092 } IEM_MC_NATIVE_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2094 IEM_MC_END(); \
2095 break; \
2096 \
2097 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2098 } \
2099 } \
2100 (void)0
2101
2102
2103/**
2104 * @opcode 0x38
2105 * @opflclass arithmetic
2106 */
2107FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
2108{
2109 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
2110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2111 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2112}
2113
2114
2115/**
2116 * @opcode 0x39
2117 * @opflclass arithmetic
2118 */
2119FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
2120{
2121 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
2122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2123 IEMOP_BODY_BINARY_rm_rv_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2124}
2125
2126
2127/**
2128 * @opcode 0x3a
2129 * @opflclass arithmetic
2130 */
2131FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
2132{
2133 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
2134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2135 IEMOP_BODY_BINARY_r8_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2136}
2137
2138
2139/**
2140 * @opcode 0x3b
2141 * @opflclass arithmetic
2142 */
2143FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
2144{
2145 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 IEMOP_BODY_BINARY_rv_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2148}
2149
2150
2151/**
2152 * @opcode 0x3c
2153 * @opflclass arithmetic
2154 */
2155FNIEMOP_DEF(iemOp_cmp_Al_Ib)
2156{
2157 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
2158 IEMOP_BODY_BINARY_AL_Ib(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2159}
2160
2161
2162/**
2163 * @opcode 0x3d
2164 * @opflclass arithmetic
2165 */
2166FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
2167{
2168 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
2169 IEMOP_BODY_BINARY_rAX_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2170}
2171
2172
2173/**
2174 * @opcode 0x3e
2175 */
2176FNIEMOP_DEF(iemOp_seg_DS)
2177{
2178 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
2179 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
2180 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
2181
2182 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2183 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2184}
2185
2186
2187/**
2188 * @opcode 0x3f
2189 * @opfltest af
2190 * @opflmodify cf,pf,af,zf,sf,of
2191 * @opflundef pf,zf,sf,of
2192 * @opgroup og_gen_arith_dec
2193 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
2194 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
2195 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
2196 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
2197 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
2198 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
2199 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
2200 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
2201 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
2202 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2203 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2204 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
2205 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
2206 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
2207 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
2208 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2209 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2210 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2211 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2212 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
2213 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
2214 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
2215 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
2216 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
2217 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
2218 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
2219 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
2220 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
2221 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
2222 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
2223 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
2224 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2225 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2226 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2227 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2228 */
2229FNIEMOP_DEF(iemOp_aas)
2230{
2231 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
2232 IEMOP_HLP_NO_64BIT();
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
2235
2236 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
2237}
2238
2239
2240/**
2241 * Common 'inc/dec register' helper.
2242 *
2243 * Not for 64-bit code, only for what became the rex prefixes.
2244 */
2245#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
2246 switch (pVCpu->iem.s.enmEffOpSize) \
2247 { \
2248 case IEMMODE_16BIT: \
2249 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0); \
2250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2251 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
2252 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2253 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
2254 IEM_MC_REF_EFLAGS(pEFlags); \
2255 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
2256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2257 IEM_MC_END(); \
2258 break; \
2259 \
2260 case IEMMODE_32BIT: \
2261 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2263 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2264 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2265 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
2266 IEM_MC_REF_EFLAGS(pEFlags); \
2267 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
2268 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
2269 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2270 IEM_MC_END(); \
2271 break; \
2272 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2273 } \
2274 (void)0
2275
2276/**
2277 * @opcode 0x40
2278 * @opflclass incdec
2279 */
2280FNIEMOP_DEF(iemOp_inc_eAX)
2281{
2282 /*
2283 * This is a REX prefix in 64-bit mode.
2284 */
2285 if (IEM_IS_64BIT_CODE(pVCpu))
2286 {
2287 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
2288 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
2289
2290 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2291 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2292 }
2293
2294 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
2295 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
2296}
2297
2298
2299/**
2300 * @opcode 0x41
2301 * @opflclass incdec
2302 */
2303FNIEMOP_DEF(iemOp_inc_eCX)
2304{
2305 /*
2306 * This is a REX prefix in 64-bit mode.
2307 */
2308 if (IEM_IS_64BIT_CODE(pVCpu))
2309 {
2310 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
2311 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2312 pVCpu->iem.s.uRexB = 1 << 3;
2313
2314 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2315 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2316 }
2317
2318 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
2319 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
2320}
2321
2322
2323/**
2324 * @opcode 0x42
2325 * @opflclass incdec
2326 */
2327FNIEMOP_DEF(iemOp_inc_eDX)
2328{
2329 /*
2330 * This is a REX prefix in 64-bit mode.
2331 */
2332 if (IEM_IS_64BIT_CODE(pVCpu))
2333 {
2334 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2335 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2336 pVCpu->iem.s.uRexIndex = 1 << 3;
2337
2338 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2339 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2340 }
2341
2342 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2343 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2344}
2345
2346
2347
2348/**
2349 * @opcode 0x43
2350 * @opflclass incdec
2351 */
2352FNIEMOP_DEF(iemOp_inc_eBX)
2353{
2354 /*
2355 * This is a REX prefix in 64-bit mode.
2356 */
2357 if (IEM_IS_64BIT_CODE(pVCpu))
2358 {
2359 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2360 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2361 pVCpu->iem.s.uRexB = 1 << 3;
2362 pVCpu->iem.s.uRexIndex = 1 << 3;
2363
2364 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2365 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2366 }
2367
2368 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2369 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2370}
2371
2372
2373/**
2374 * @opcode 0x44
2375 * @opflclass incdec
2376 */
2377FNIEMOP_DEF(iemOp_inc_eSP)
2378{
2379 /*
2380 * This is a REX prefix in 64-bit mode.
2381 */
2382 if (IEM_IS_64BIT_CODE(pVCpu))
2383 {
2384 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2385 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2386 pVCpu->iem.s.uRexReg = 1 << 3;
2387
2388 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2389 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2390 }
2391
2392 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2393 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2394}
2395
2396
2397/**
2398 * @opcode 0x45
2399 * @opflclass incdec
2400 */
2401FNIEMOP_DEF(iemOp_inc_eBP)
2402{
2403 /*
2404 * This is a REX prefix in 64-bit mode.
2405 */
2406 if (IEM_IS_64BIT_CODE(pVCpu))
2407 {
2408 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2409 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2410 pVCpu->iem.s.uRexReg = 1 << 3;
2411 pVCpu->iem.s.uRexB = 1 << 3;
2412
2413 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2414 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2415 }
2416
2417 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2418 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2419}
2420
2421
2422/**
2423 * @opcode 0x46
2424 * @opflclass incdec
2425 */
2426FNIEMOP_DEF(iemOp_inc_eSI)
2427{
2428 /*
2429 * This is a REX prefix in 64-bit mode.
2430 */
2431 if (IEM_IS_64BIT_CODE(pVCpu))
2432 {
2433 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2434 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2435 pVCpu->iem.s.uRexReg = 1 << 3;
2436 pVCpu->iem.s.uRexIndex = 1 << 3;
2437
2438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2439 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2440 }
2441
2442 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2443 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2444}
2445
2446
2447/**
2448 * @opcode 0x47
2449 * @opflclass incdec
2450 */
2451FNIEMOP_DEF(iemOp_inc_eDI)
2452{
2453 /*
2454 * This is a REX prefix in 64-bit mode.
2455 */
2456 if (IEM_IS_64BIT_CODE(pVCpu))
2457 {
2458 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2459 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2460 pVCpu->iem.s.uRexReg = 1 << 3;
2461 pVCpu->iem.s.uRexB = 1 << 3;
2462 pVCpu->iem.s.uRexIndex = 1 << 3;
2463
2464 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2465 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2466 }
2467
2468 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2469 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2470}
2471
2472
2473/**
2474 * @opcode 0x48
2475 * @opflclass incdec
2476 */
2477FNIEMOP_DEF(iemOp_dec_eAX)
2478{
2479 /*
2480 * This is a REX prefix in 64-bit mode.
2481 */
2482 if (IEM_IS_64BIT_CODE(pVCpu))
2483 {
2484 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2485 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2486 iemRecalEffOpSize(pVCpu);
2487
2488 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2489 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2490 }
2491
2492 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2493 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2494}
2495
2496
2497/**
2498 * @opcode 0x49
2499 * @opflclass incdec
2500 */
2501FNIEMOP_DEF(iemOp_dec_eCX)
2502{
2503 /*
2504 * This is a REX prefix in 64-bit mode.
2505 */
2506 if (IEM_IS_64BIT_CODE(pVCpu))
2507 {
2508 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2509 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2510 pVCpu->iem.s.uRexB = 1 << 3;
2511 iemRecalEffOpSize(pVCpu);
2512
2513 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2514 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2515 }
2516
2517 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2518 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2519}
2520
2521
2522/**
2523 * @opcode 0x4a
2524 * @opflclass incdec
2525 */
2526FNIEMOP_DEF(iemOp_dec_eDX)
2527{
2528 /*
2529 * This is a REX prefix in 64-bit mode.
2530 */
2531 if (IEM_IS_64BIT_CODE(pVCpu))
2532 {
2533 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2534 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2535 pVCpu->iem.s.uRexIndex = 1 << 3;
2536 iemRecalEffOpSize(pVCpu);
2537
2538 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2539 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2540 }
2541
2542 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2543 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2544}
2545
2546
2547/**
2548 * @opcode 0x4b
2549 * @opflclass incdec
2550 */
2551FNIEMOP_DEF(iemOp_dec_eBX)
2552{
2553 /*
2554 * This is a REX prefix in 64-bit mode.
2555 */
2556 if (IEM_IS_64BIT_CODE(pVCpu))
2557 {
2558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2559 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2560 pVCpu->iem.s.uRexB = 1 << 3;
2561 pVCpu->iem.s.uRexIndex = 1 << 3;
2562 iemRecalEffOpSize(pVCpu);
2563
2564 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2565 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2566 }
2567
2568 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2569 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2570}
2571
2572
2573/**
2574 * @opcode 0x4c
2575 * @opflclass incdec
2576 */
2577FNIEMOP_DEF(iemOp_dec_eSP)
2578{
2579 /*
2580 * This is a REX prefix in 64-bit mode.
2581 */
2582 if (IEM_IS_64BIT_CODE(pVCpu))
2583 {
2584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2585 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2586 pVCpu->iem.s.uRexReg = 1 << 3;
2587 iemRecalEffOpSize(pVCpu);
2588
2589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2590 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2591 }
2592
2593 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2594 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2595}
2596
2597
2598/**
2599 * @opcode 0x4d
2600 * @opflclass incdec
2601 */
2602FNIEMOP_DEF(iemOp_dec_eBP)
2603{
2604 /*
2605 * This is a REX prefix in 64-bit mode.
2606 */
2607 if (IEM_IS_64BIT_CODE(pVCpu))
2608 {
2609 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2610 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2611 pVCpu->iem.s.uRexReg = 1 << 3;
2612 pVCpu->iem.s.uRexB = 1 << 3;
2613 iemRecalEffOpSize(pVCpu);
2614
2615 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2616 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2617 }
2618
2619 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2620 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2621}
2622
2623
2624/**
2625 * @opcode 0x4e
2626 * @opflclass incdec
2627 */
2628FNIEMOP_DEF(iemOp_dec_eSI)
2629{
2630 /*
2631 * This is a REX prefix in 64-bit mode.
2632 */
2633 if (IEM_IS_64BIT_CODE(pVCpu))
2634 {
2635 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2637 pVCpu->iem.s.uRexReg = 1 << 3;
2638 pVCpu->iem.s.uRexIndex = 1 << 3;
2639 iemRecalEffOpSize(pVCpu);
2640
2641 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2642 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2643 }
2644
2645 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2646 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2647}
2648
2649
2650/**
2651 * @opcode 0x4f
2652 * @opflclass incdec
2653 */
2654FNIEMOP_DEF(iemOp_dec_eDI)
2655{
2656 /*
2657 * This is a REX prefix in 64-bit mode.
2658 */
2659 if (IEM_IS_64BIT_CODE(pVCpu))
2660 {
2661 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2662 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2663 pVCpu->iem.s.uRexReg = 1 << 3;
2664 pVCpu->iem.s.uRexB = 1 << 3;
2665 pVCpu->iem.s.uRexIndex = 1 << 3;
2666 iemRecalEffOpSize(pVCpu);
2667
2668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2670 }
2671
2672 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2673 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2674}
2675
2676
2677/**
2678 * Common 'push register' helper.
2679 */
2680FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2681{
2682 if (IEM_IS_64BIT_CODE(pVCpu))
2683 {
2684 iReg |= pVCpu->iem.s.uRexB;
2685 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2686 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2687 }
2688
2689 switch (pVCpu->iem.s.enmEffOpSize)
2690 {
2691 case IEMMODE_16BIT:
2692 IEM_MC_BEGIN(0, 0);
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2694 IEM_MC_LOCAL(uint16_t, u16Value);
2695 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2696 IEM_MC_PUSH_U16(u16Value);
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 break;
2700
2701 case IEMMODE_32BIT:
2702 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2704 IEM_MC_LOCAL(uint32_t, u32Value);
2705 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2706 IEM_MC_PUSH_U32(u32Value);
2707 IEM_MC_ADVANCE_RIP_AND_FINISH();
2708 IEM_MC_END();
2709 break;
2710
2711 case IEMMODE_64BIT:
2712 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2714 IEM_MC_LOCAL(uint64_t, u64Value);
2715 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2716 IEM_MC_PUSH_U64(u64Value);
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 break;
2720
2721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2722 }
2723}
2724
2725
2726/**
2727 * @opcode 0x50
2728 */
2729FNIEMOP_DEF(iemOp_push_eAX)
2730{
2731 IEMOP_MNEMONIC(push_rAX, "push rAX");
2732 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2733}
2734
2735
2736/**
2737 * @opcode 0x51
2738 */
2739FNIEMOP_DEF(iemOp_push_eCX)
2740{
2741 IEMOP_MNEMONIC(push_rCX, "push rCX");
2742 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2743}
2744
2745
2746/**
2747 * @opcode 0x52
2748 */
2749FNIEMOP_DEF(iemOp_push_eDX)
2750{
2751 IEMOP_MNEMONIC(push_rDX, "push rDX");
2752 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2753}
2754
2755
2756/**
2757 * @opcode 0x53
2758 */
2759FNIEMOP_DEF(iemOp_push_eBX)
2760{
2761 IEMOP_MNEMONIC(push_rBX, "push rBX");
2762 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2763}
2764
2765
2766/**
2767 * @opcode 0x54
2768 */
2769FNIEMOP_DEF(iemOp_push_eSP)
2770{
2771 IEMOP_MNEMONIC(push_rSP, "push rSP");
2772 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2773 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2774
2775 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2776 IEM_MC_BEGIN(IEM_MC_F_ONLY_8086, 0);
2777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2778 IEM_MC_LOCAL(uint16_t, u16Value);
2779 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2780 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2781 IEM_MC_PUSH_U16(u16Value);
2782 IEM_MC_ADVANCE_RIP_AND_FINISH();
2783 IEM_MC_END();
2784}
2785
2786
2787/**
2788 * @opcode 0x55
2789 */
2790FNIEMOP_DEF(iemOp_push_eBP)
2791{
2792 IEMOP_MNEMONIC(push_rBP, "push rBP");
2793 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2794}
2795
2796
2797/**
2798 * @opcode 0x56
2799 */
2800FNIEMOP_DEF(iemOp_push_eSI)
2801{
2802 IEMOP_MNEMONIC(push_rSI, "push rSI");
2803 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2804}
2805
2806
2807/**
2808 * @opcode 0x57
2809 */
2810FNIEMOP_DEF(iemOp_push_eDI)
2811{
2812 IEMOP_MNEMONIC(push_rDI, "push rDI");
2813 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2814}
2815
2816
2817/**
2818 * Common 'pop register' helper.
2819 */
2820FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2821{
2822 if (IEM_IS_64BIT_CODE(pVCpu))
2823 {
2824 iReg |= pVCpu->iem.s.uRexB;
2825 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2826 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2827 }
2828
2829 switch (pVCpu->iem.s.enmEffOpSize)
2830 {
2831 case IEMMODE_16BIT:
2832 IEM_MC_BEGIN(0, 0);
2833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2834 IEM_MC_POP_GREG_U16(iReg);
2835 IEM_MC_ADVANCE_RIP_AND_FINISH();
2836 IEM_MC_END();
2837 break;
2838
2839 case IEMMODE_32BIT:
2840 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_POP_GREG_U32(iReg);
2843 IEM_MC_ADVANCE_RIP_AND_FINISH();
2844 IEM_MC_END();
2845 break;
2846
2847 case IEMMODE_64BIT:
2848 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2850 IEM_MC_POP_GREG_U64(iReg);
2851 IEM_MC_ADVANCE_RIP_AND_FINISH();
2852 IEM_MC_END();
2853 break;
2854
2855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2856 }
2857}
2858
2859
2860/**
2861 * @opcode 0x58
2862 */
2863FNIEMOP_DEF(iemOp_pop_eAX)
2864{
2865 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2866 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2867}
2868
2869
2870/**
2871 * @opcode 0x59
2872 */
2873FNIEMOP_DEF(iemOp_pop_eCX)
2874{
2875 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2876 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2877}
2878
2879
2880/**
2881 * @opcode 0x5a
2882 */
2883FNIEMOP_DEF(iemOp_pop_eDX)
2884{
2885 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2886 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2887}
2888
2889
2890/**
2891 * @opcode 0x5b
2892 */
2893FNIEMOP_DEF(iemOp_pop_eBX)
2894{
2895 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2896 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2897}
2898
2899
2900/**
2901 * @opcode 0x5c
2902 */
2903FNIEMOP_DEF(iemOp_pop_eSP)
2904{
2905 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2906 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2907}
2908
2909
2910/**
2911 * @opcode 0x5d
2912 */
2913FNIEMOP_DEF(iemOp_pop_eBP)
2914{
2915 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2916 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2917}
2918
2919
2920/**
2921 * @opcode 0x5e
2922 */
2923FNIEMOP_DEF(iemOp_pop_eSI)
2924{
2925 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2926 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2927}
2928
2929
2930/**
2931 * @opcode 0x5f
2932 */
2933FNIEMOP_DEF(iemOp_pop_eDI)
2934{
2935 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2936 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2937}
2938
2939
2940/**
2941 * @opcode 0x60
2942 */
2943FNIEMOP_DEF(iemOp_pusha)
2944{
2945 IEMOP_MNEMONIC(pusha, "pusha");
2946 IEMOP_HLP_MIN_186();
2947 IEMOP_HLP_NO_64BIT();
2948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2949 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2950 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2951 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2952}
2953
2954
2955/**
2956 * @opcode 0x61
2957 */
2958FNIEMOP_DEF(iemOp_popa__mvex)
2959{
2960 if (!IEM_IS_64BIT_CODE(pVCpu))
2961 {
2962 IEMOP_MNEMONIC(popa, "popa");
2963 IEMOP_HLP_MIN_186();
2964 IEMOP_HLP_NO_64BIT();
2965 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2966 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2967 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2968 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2975 iemCImpl_popa_16);
2976 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2977 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2978 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2982 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2986 iemCImpl_popa_32);
2987 }
2988 IEMOP_MNEMONIC(mvex, "mvex");
2989 Log(("mvex prefix is not supported!\n"));
2990 IEMOP_RAISE_INVALID_OPCODE_RET();
2991}
2992
2993
2994/**
2995 * @opcode 0x62
2996 * @opmnemonic bound
2997 * @op1 Gv_RO
2998 * @op2 Ma
2999 * @opmincpu 80186
3000 * @ophints harmless x86_invalid_64
3001 * @optest op1=0 op2=0 ->
3002 * @optest op1=1 op2=0 -> value.xcpt=5
3003 * @optest o16 / op1=0xffff op2=0x0000fffe ->
3004 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
3005 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
3006 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
3007 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
3008 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
3009 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
3010 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
3011 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
3012 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
3013 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
3014 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
3015 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
3016 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
3017 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
3018 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
3019 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
3020 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
3021 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
3022 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
3023 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
3024 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
3025 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
3026 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
3027 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
3028 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
3029 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
3030 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
3031 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
3032 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
3033 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
3034 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
3035 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
3036 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
3037 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
3038 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
3039 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
3040 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
3041 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
3042 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
3043 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
3044 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
3045 */
3046FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
3047{
3048 /* The BOUND instruction is invalid 64-bit mode. In legacy and
3049 compatability mode it is invalid with MOD=3.
3050
3051 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
3052 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
3053 given as R and X without an exact description, so we assume it builds on
3054 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
3055 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
3056 uint8_t bRm;
3057 if (!IEM_IS_64BIT_CODE(pVCpu))
3058 {
3059 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3060 IEMOP_HLP_MIN_186();
3061 IEM_OPCODE_GET_NEXT_U8(&bRm);
3062 if (IEM_IS_MODRM_MEM_MODE(bRm))
3063 {
3064 /** @todo testcase: check that there are two memory accesses involved. Check
3065 * whether they're both read before the \#BR triggers. */
3066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3067 {
3068 IEM_MC_BEGIN(IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
3069 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
3070 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
3071 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
3072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3073
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076
3077 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
3078 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3079 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
3080
3081 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
3082 IEM_MC_END();
3083 }
3084 else /* 32-bit operands */
3085 {
3086 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3087 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
3088 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
3089 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094
3095 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
3096 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
3098
3099 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
3100 IEM_MC_END();
3101 }
3102 }
3103
3104 /*
3105 * @opdone
3106 */
3107 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
3108 {
3109 /* Note that there is no need for the CPU to fetch further bytes
3110 here because MODRM.MOD == 3. */
3111 Log(("evex not supported by the guest CPU!\n"));
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113 }
3114 }
3115 else
3116 {
3117 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
3118 * does modr/m read, whereas AMD probably doesn't... */
3119 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
3120 {
3121 Log(("evex not supported by the guest CPU!\n"));
3122 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
3123 }
3124 IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 }
3126
3127 IEMOP_MNEMONIC(evex, "evex");
3128 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
3129 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
3130 Log(("evex prefix is not implemented!\n"));
3131 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3132}
3133
3134
3135/**
3136 * @opcode 0x63
3137 * @opflmodify zf
3138 * @note non-64-bit modes.
3139 */
3140FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
3141{
3142 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
3143 IEMOP_HLP_MIN_286();
3144 IEMOP_HLP_NO_REAL_OR_V86_MODE();
3145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3146
3147 if (IEM_IS_MODRM_REG_MODE(bRm))
3148 {
3149 /* Register */
3150 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
3151 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3152 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3153 IEM_MC_ARG(uint16_t, u16Src, 1);
3154 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3155
3156 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3157 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
3158 IEM_MC_REF_EFLAGS(pEFlags);
3159 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
3160
3161 IEM_MC_ADVANCE_RIP_AND_FINISH();
3162 IEM_MC_END();
3163 }
3164 else
3165 {
3166 /* Memory */
3167 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
3168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3169 IEM_MC_ARG(uint16_t, u16Src, 1);
3170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3171 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
3172
3173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3174 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3175 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3176 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3177 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
3178 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
3179
3180 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
3181 IEM_MC_COMMIT_EFLAGS(EFlags);
3182 IEM_MC_ADVANCE_RIP_AND_FINISH();
3183 IEM_MC_END();
3184 }
3185}
3186
3187
3188/**
3189 * @opcode 0x63
3190 *
3191 * @note This is a weird one. It works like a regular move instruction if
3192 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
3193 * @todo This definitely needs a testcase to verify the odd cases. */
3194FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
3195{
3196 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
3197
3198 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
3199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3200
3201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3202 {
3203 if (IEM_IS_MODRM_REG_MODE(bRm))
3204 {
3205 /*
3206 * Register to register.
3207 */
3208 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3210 IEM_MC_LOCAL(uint64_t, u64Value);
3211 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3212 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3213 IEM_MC_ADVANCE_RIP_AND_FINISH();
3214 IEM_MC_END();
3215 }
3216 else
3217 {
3218 /*
3219 * We're loading a register from memory.
3220 */
3221 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3222 IEM_MC_LOCAL(uint64_t, u64Value);
3223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3227 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 }
3231 }
3232 else
3233 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
3234}
3235
3236
3237/**
3238 * @opcode 0x64
3239 * @opmnemonic segfs
3240 * @opmincpu 80386
3241 * @opgroup og_prefixes
3242 */
3243FNIEMOP_DEF(iemOp_seg_FS)
3244{
3245 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
3246 IEMOP_HLP_MIN_386();
3247
3248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
3249 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
3250
3251 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3252 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3253}
3254
3255
3256/**
3257 * @opcode 0x65
3258 * @opmnemonic seggs
3259 * @opmincpu 80386
3260 * @opgroup og_prefixes
3261 */
3262FNIEMOP_DEF(iemOp_seg_GS)
3263{
3264 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
3265 IEMOP_HLP_MIN_386();
3266
3267 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
3268 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
3269
3270 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3271 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3272}
3273
3274
3275/**
3276 * @opcode 0x66
3277 * @opmnemonic opsize
3278 * @openc prefix
3279 * @opmincpu 80386
3280 * @ophints harmless
3281 * @opgroup og_prefixes
3282 */
3283FNIEMOP_DEF(iemOp_op_size)
3284{
3285 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
3286 IEMOP_HLP_MIN_386();
3287
3288 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
3289 iemRecalEffOpSize(pVCpu);
3290
3291 /* For the 4 entry opcode tables, the operand prefix doesn't not count
3292 when REPZ or REPNZ are present. */
3293 if (pVCpu->iem.s.idxPrefix == 0)
3294 pVCpu->iem.s.idxPrefix = 1;
3295
3296 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3297 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3298}
3299
3300
3301/**
3302 * @opcode 0x67
3303 * @opmnemonic addrsize
3304 * @openc prefix
3305 * @opmincpu 80386
3306 * @ophints harmless
3307 * @opgroup og_prefixes
3308 */
3309FNIEMOP_DEF(iemOp_addr_size)
3310{
3311 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
3312 IEMOP_HLP_MIN_386();
3313
3314 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3315 switch (pVCpu->iem.s.enmDefAddrMode)
3316 {
3317 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3318 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
3319 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3320 default: AssertFailed();
3321 }
3322
3323 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3324 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3325}
3326
3327
3328/**
3329 * @opcode 0x68
3330 */
3331FNIEMOP_DEF(iemOp_push_Iz)
3332{
3333 IEMOP_MNEMONIC(push_Iz, "push Iz");
3334 IEMOP_HLP_MIN_186();
3335 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3336 switch (pVCpu->iem.s.enmEffOpSize)
3337 {
3338 case IEMMODE_16BIT:
3339 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3342 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3343 IEM_MC_PUSH_U16(u16Value);
3344 IEM_MC_ADVANCE_RIP_AND_FINISH();
3345 IEM_MC_END();
3346 break;
3347
3348 case IEMMODE_32BIT:
3349 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3350 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3352 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3353 IEM_MC_PUSH_U32(u32Value);
3354 IEM_MC_ADVANCE_RIP_AND_FINISH();
3355 IEM_MC_END();
3356 break;
3357
3358 case IEMMODE_64BIT:
3359 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3360 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3363 IEM_MC_PUSH_U64(u64Value);
3364 IEM_MC_ADVANCE_RIP_AND_FINISH();
3365 IEM_MC_END();
3366 break;
3367
3368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3369 }
3370}
3371
3372
3373/**
3374 * @opcode 0x69
3375 * @opflclass multiply
3376 */
3377FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3378{
3379 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3380 IEMOP_HLP_MIN_186();
3381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3382 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3383
3384 switch (pVCpu->iem.s.enmEffOpSize)
3385 {
3386 case IEMMODE_16BIT:
3387 {
3388 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3389 if (IEM_IS_MODRM_REG_MODE(bRm))
3390 {
3391 /* register operand */
3392 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3393 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3395 IEM_MC_LOCAL(uint16_t, u16Tmp);
3396 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3397
3398 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3399 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3400 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2);
3401 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3402 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3403 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3404
3405 IEM_MC_ADVANCE_RIP_AND_FINISH();
3406 IEM_MC_END();
3407 }
3408 else
3409 {
3410 /* memory operand */
3411 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3414
3415 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417
3418 IEM_MC_LOCAL(uint16_t, u16Tmp);
3419 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3420
3421 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3422 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3423 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2);
3424 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3425 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3426 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3427
3428 IEM_MC_ADVANCE_RIP_AND_FINISH();
3429 IEM_MC_END();
3430 }
3431 break;
3432 }
3433
3434 case IEMMODE_32BIT:
3435 {
3436 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3437 if (IEM_IS_MODRM_REG_MODE(bRm))
3438 {
3439 /* register operand */
3440 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3441 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_LOCAL(uint32_t, u32Tmp);
3444 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3445
3446 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3447 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3448 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2);
3449 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3450 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3451 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3452
3453 IEM_MC_ADVANCE_RIP_AND_FINISH();
3454 IEM_MC_END();
3455 }
3456 else
3457 {
3458 /* memory operand */
3459 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3462
3463 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3465
3466 IEM_MC_LOCAL(uint32_t, u32Tmp);
3467 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3468
3469 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3470 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3471 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2);
3472 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3473 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3474 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3475
3476 IEM_MC_ADVANCE_RIP_AND_FINISH();
3477 IEM_MC_END();
3478 }
3479 break;
3480 }
3481
3482 case IEMMODE_64BIT:
3483 {
3484 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3485 if (IEM_IS_MODRM_REG_MODE(bRm))
3486 {
3487 /* register operand */
3488 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3489 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3493
3494 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3495 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3496 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2);
3497 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3498 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3499 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3500
3501 IEM_MC_ADVANCE_RIP_AND_FINISH();
3502 IEM_MC_END();
3503 }
3504 else
3505 {
3506 /* memory operand */
3507 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3510
3511 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3513
3514 IEM_MC_LOCAL(uint64_t, u64Tmp);
3515 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3516
3517 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3518 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3519 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 2);
3520 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3521 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3522 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3523
3524 IEM_MC_ADVANCE_RIP_AND_FINISH();
3525 IEM_MC_END();
3526 }
3527 break;
3528 }
3529
3530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3531 }
3532}
3533
3534
3535/**
3536 * @opcode 0x6a
3537 */
3538FNIEMOP_DEF(iemOp_push_Ib)
3539{
3540 IEMOP_MNEMONIC(push_Ib, "push Ib");
3541 IEMOP_HLP_MIN_186();
3542 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3543 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3544
3545 switch (pVCpu->iem.s.enmEffOpSize)
3546 {
3547 case IEMMODE_16BIT:
3548 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3551 IEM_MC_PUSH_U16(uValue);
3552 IEM_MC_ADVANCE_RIP_AND_FINISH();
3553 IEM_MC_END();
3554 break;
3555 case IEMMODE_32BIT:
3556 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3559 IEM_MC_PUSH_U32(uValue);
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 break;
3563 case IEMMODE_64BIT:
3564 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3566 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3567 IEM_MC_PUSH_U64(uValue);
3568 IEM_MC_ADVANCE_RIP_AND_FINISH();
3569 IEM_MC_END();
3570 break;
3571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3572 }
3573}
3574
3575
3576/**
3577 * @opcode 0x6b
3578 * @opflclass multiply
3579 */
3580FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3581{
3582 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3583 IEMOP_HLP_MIN_186();
3584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3585 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3586
3587 switch (pVCpu->iem.s.enmEffOpSize)
3588 {
3589 case IEMMODE_16BIT:
3590 {
3591 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3592 if (IEM_IS_MODRM_REG_MODE(bRm))
3593 {
3594 /* register operand */
3595 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3596 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3598
3599 IEM_MC_LOCAL(uint16_t, u16Tmp);
3600 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3601
3602 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3603 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3604 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 2);
3605 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3606 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3607 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3608
3609 IEM_MC_ADVANCE_RIP_AND_FINISH();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /* memory operand */
3615 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3616
3617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3619
3620 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3622
3623 IEM_MC_LOCAL(uint16_t, u16Tmp);
3624 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3625
3626 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3627 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3628 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2);
3629 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3630 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3631 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3632
3633 IEM_MC_ADVANCE_RIP_AND_FINISH();
3634 IEM_MC_END();
3635 }
3636 break;
3637 }
3638
3639 case IEMMODE_32BIT:
3640 {
3641 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3642 if (IEM_IS_MODRM_REG_MODE(bRm))
3643 {
3644 /* register operand */
3645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3646 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3648 IEM_MC_LOCAL(uint32_t, u32Tmp);
3649 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3650
3651 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3652 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3653 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 2);
3654 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3655 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3656 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3657
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 /* memory operand */
3664 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3667
3668 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3670
3671 IEM_MC_LOCAL(uint32_t, u32Tmp);
3672 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3673
3674 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3675 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3676 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2);
3677 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3678 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3679 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3680
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 IEM_MC_END();
3683 }
3684 break;
3685 }
3686
3687 case IEMMODE_64BIT:
3688 {
3689 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3690 if (IEM_IS_MODRM_REG_MODE(bRm))
3691 {
3692 /* register operand */
3693 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3694 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_LOCAL(uint64_t, u64Tmp);
3697 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3698
3699 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3700 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3701 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 2);
3702 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3703 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3704 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3705
3706 IEM_MC_ADVANCE_RIP_AND_FINISH();
3707 IEM_MC_END();
3708 }
3709 else
3710 {
3711 /* memory operand */
3712 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3715
3716 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3718
3719 IEM_MC_LOCAL(uint64_t, u64Tmp);
3720 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3721
3722 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3723 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3724 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 2);
3725 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3726 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3727 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3728
3729 IEM_MC_ADVANCE_RIP_AND_FINISH();
3730 IEM_MC_END();
3731 }
3732 break;
3733 }
3734
3735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3736 }
3737}
3738
3739
3740/**
3741 * @opcode 0x6c
3742 * @opfltest iopl,df
3743 */
3744FNIEMOP_DEF(iemOp_insb_Yb_DX)
3745{
3746 IEMOP_HLP_MIN_186();
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3749 {
3750 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3751 switch (pVCpu->iem.s.enmEffAddrMode)
3752 {
3753 case IEMMODE_16BIT:
3754 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3755 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3757 iemCImpl_rep_ins_op8_addr16, false);
3758 case IEMMODE_32BIT:
3759 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3760 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3761 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3762 iemCImpl_rep_ins_op8_addr32, false);
3763 case IEMMODE_64BIT:
3764 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3765 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3767 iemCImpl_rep_ins_op8_addr64, false);
3768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3769 }
3770 }
3771 else
3772 {
3773 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3774 switch (pVCpu->iem.s.enmEffAddrMode)
3775 {
3776 case IEMMODE_16BIT:
3777 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3778 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3779 iemCImpl_ins_op8_addr16, false);
3780 case IEMMODE_32BIT:
3781 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3782 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3783 iemCImpl_ins_op8_addr32, false);
3784 case IEMMODE_64BIT:
3785 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3786 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3787 iemCImpl_ins_op8_addr64, false);
3788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3789 }
3790 }
3791}
3792
3793
3794/**
3795 * @opcode 0x6d
3796 * @opfltest iopl,df
3797 */
3798FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3799{
3800 IEMOP_HLP_MIN_186();
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3802 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3803 {
3804 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3805 switch (pVCpu->iem.s.enmEffOpSize)
3806 {
3807 case IEMMODE_16BIT:
3808 switch (pVCpu->iem.s.enmEffAddrMode)
3809 {
3810 case IEMMODE_16BIT:
3811 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3814 iemCImpl_rep_ins_op16_addr16, false);
3815 case IEMMODE_32BIT:
3816 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3819 iemCImpl_rep_ins_op16_addr32, false);
3820 case IEMMODE_64BIT:
3821 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3824 iemCImpl_rep_ins_op16_addr64, false);
3825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3826 }
3827 break;
3828 case IEMMODE_64BIT:
3829 case IEMMODE_32BIT:
3830 switch (pVCpu->iem.s.enmEffAddrMode)
3831 {
3832 case IEMMODE_16BIT:
3833 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3834 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3835 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3836 iemCImpl_rep_ins_op32_addr16, false);
3837 case IEMMODE_32BIT:
3838 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3841 iemCImpl_rep_ins_op32_addr32, false);
3842 case IEMMODE_64BIT:
3843 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3844 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3845 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3846 iemCImpl_rep_ins_op32_addr64, false);
3847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3848 }
3849 break;
3850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3851 }
3852 }
3853 else
3854 {
3855 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3856 switch (pVCpu->iem.s.enmEffOpSize)
3857 {
3858 case IEMMODE_16BIT:
3859 switch (pVCpu->iem.s.enmEffAddrMode)
3860 {
3861 case IEMMODE_16BIT:
3862 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3863 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3864 iemCImpl_ins_op16_addr16, false);
3865 case IEMMODE_32BIT:
3866 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3867 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3868 iemCImpl_ins_op16_addr32, false);
3869 case IEMMODE_64BIT:
3870 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3871 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3872 iemCImpl_ins_op16_addr64, false);
3873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3874 }
3875 break;
3876 case IEMMODE_64BIT:
3877 case IEMMODE_32BIT:
3878 switch (pVCpu->iem.s.enmEffAddrMode)
3879 {
3880 case IEMMODE_16BIT:
3881 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3882 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3883 iemCImpl_ins_op32_addr16, false);
3884 case IEMMODE_32BIT:
3885 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3886 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3887 iemCImpl_ins_op32_addr32, false);
3888 case IEMMODE_64BIT:
3889 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3890 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3891 iemCImpl_ins_op32_addr64, false);
3892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3893 }
3894 break;
3895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3896 }
3897 }
3898}
3899
3900
3901/**
3902 * @opcode 0x6e
3903 * @opfltest iopl,df
3904 */
3905FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3906{
3907 IEMOP_HLP_MIN_186();
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3910 {
3911 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3912 switch (pVCpu->iem.s.enmEffAddrMode)
3913 {
3914 case IEMMODE_16BIT:
3915 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3916 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3917 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3918 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3919 case IEMMODE_32BIT:
3920 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3921 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3922 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3923 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3924 case IEMMODE_64BIT:
3925 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3926 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3927 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3928 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3930 }
3931 }
3932 else
3933 {
3934 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3935 switch (pVCpu->iem.s.enmEffAddrMode)
3936 {
3937 case IEMMODE_16BIT:
3938 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3939 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3940 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3941 case IEMMODE_32BIT:
3942 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3943 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3944 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3945 case IEMMODE_64BIT:
3946 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3947 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3948 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3950 }
3951 }
3952}
3953
3954
3955/**
3956 * @opcode 0x6f
3957 * @opfltest iopl,df
3958 */
3959FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3960{
3961 IEMOP_HLP_MIN_186();
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3964 {
3965 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3966 switch (pVCpu->iem.s.enmEffOpSize)
3967 {
3968 case IEMMODE_16BIT:
3969 switch (pVCpu->iem.s.enmEffAddrMode)
3970 {
3971 case IEMMODE_16BIT:
3972 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3975 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3976 case IEMMODE_32BIT:
3977 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3978 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3980 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3981 case IEMMODE_64BIT:
3982 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3983 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3985 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3987 }
3988 break;
3989 case IEMMODE_64BIT:
3990 case IEMMODE_32BIT:
3991 switch (pVCpu->iem.s.enmEffAddrMode)
3992 {
3993 case IEMMODE_16BIT:
3994 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3995 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3996 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3997 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3998 case IEMMODE_32BIT:
3999 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4000 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
4001 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
4002 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
4003 case IEMMODE_64BIT:
4004 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4005 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
4006 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
4007 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
4008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4009 }
4010 break;
4011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4012 }
4013 }
4014 else
4015 {
4016 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
4017 switch (pVCpu->iem.s.enmEffOpSize)
4018 {
4019 case IEMMODE_16BIT:
4020 switch (pVCpu->iem.s.enmEffAddrMode)
4021 {
4022 case IEMMODE_16BIT:
4023 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4024 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4025 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
4026 case IEMMODE_32BIT:
4027 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4028 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4029 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
4030 case IEMMODE_64BIT:
4031 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4032 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4033 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
4034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4035 }
4036 break;
4037 case IEMMODE_64BIT:
4038 case IEMMODE_32BIT:
4039 switch (pVCpu->iem.s.enmEffAddrMode)
4040 {
4041 case IEMMODE_16BIT:
4042 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4043 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4044 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
4045 case IEMMODE_32BIT:
4046 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4047 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4048 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
4049 case IEMMODE_64BIT:
4050 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4051 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4052 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
4053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4054 }
4055 break;
4056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4057 }
4058 }
4059}
4060
4061
4062/**
4063 * @opcode 0x70
4064 * @opfltest of
4065 */
4066FNIEMOP_DEF(iemOp_jo_Jb)
4067{
4068 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
4069 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4070 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4071
4072 IEM_MC_BEGIN(0, 0);
4073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4075 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4076 } IEM_MC_ELSE() {
4077 IEM_MC_ADVANCE_RIP_AND_FINISH();
4078 } IEM_MC_ENDIF();
4079 IEM_MC_END();
4080}
4081
4082
4083/**
4084 * @opcode 0x71
4085 * @opfltest of
4086 */
4087FNIEMOP_DEF(iemOp_jno_Jb)
4088{
4089 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
4090 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4092
4093 IEM_MC_BEGIN(0, 0);
4094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4096 IEM_MC_ADVANCE_RIP_AND_FINISH();
4097 } IEM_MC_ELSE() {
4098 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4099 } IEM_MC_ENDIF();
4100 IEM_MC_END();
4101}
4102
4103/**
4104 * @opcode 0x72
4105 * @opfltest cf
4106 */
4107FNIEMOP_DEF(iemOp_jc_Jb)
4108{
4109 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
4110 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4112
4113 IEM_MC_BEGIN(0, 0);
4114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4116 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4117 } IEM_MC_ELSE() {
4118 IEM_MC_ADVANCE_RIP_AND_FINISH();
4119 } IEM_MC_ENDIF();
4120 IEM_MC_END();
4121}
4122
4123
4124/**
4125 * @opcode 0x73
4126 * @opfltest cf
4127 */
4128FNIEMOP_DEF(iemOp_jnc_Jb)
4129{
4130 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
4131 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4133
4134 IEM_MC_BEGIN(0, 0);
4135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4137 IEM_MC_ADVANCE_RIP_AND_FINISH();
4138 } IEM_MC_ELSE() {
4139 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4140 } IEM_MC_ENDIF();
4141 IEM_MC_END();
4142}
4143
4144
4145/**
4146 * @opcode 0x74
4147 * @opfltest zf
4148 */
4149FNIEMOP_DEF(iemOp_je_Jb)
4150{
4151 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
4152 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4154
4155 IEM_MC_BEGIN(0, 0);
4156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4158 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4159 } IEM_MC_ELSE() {
4160 IEM_MC_ADVANCE_RIP_AND_FINISH();
4161 } IEM_MC_ENDIF();
4162 IEM_MC_END();
4163}
4164
4165
4166/**
4167 * @opcode 0x75
4168 * @opfltest zf
4169 */
4170FNIEMOP_DEF(iemOp_jne_Jb)
4171{
4172 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
4173 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4175
4176 IEM_MC_BEGIN(0, 0);
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4179 IEM_MC_ADVANCE_RIP_AND_FINISH();
4180 } IEM_MC_ELSE() {
4181 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4182 } IEM_MC_ENDIF();
4183 IEM_MC_END();
4184}
4185
4186
4187/**
4188 * @opcode 0x76
4189 * @opfltest cf,zf
4190 */
4191FNIEMOP_DEF(iemOp_jbe_Jb)
4192{
4193 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
4194 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4195 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4196
4197 IEM_MC_BEGIN(0, 0);
4198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4199 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4200 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4201 } IEM_MC_ELSE() {
4202 IEM_MC_ADVANCE_RIP_AND_FINISH();
4203 } IEM_MC_ENDIF();
4204 IEM_MC_END();
4205}
4206
4207
4208/**
4209 * @opcode 0x77
4210 * @opfltest cf,zf
4211 */
4212FNIEMOP_DEF(iemOp_jnbe_Jb)
4213{
4214 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
4215 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4216 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4217
4218 IEM_MC_BEGIN(0, 0);
4219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4220 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4221 IEM_MC_ADVANCE_RIP_AND_FINISH();
4222 } IEM_MC_ELSE() {
4223 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4224 } IEM_MC_ENDIF();
4225 IEM_MC_END();
4226}
4227
4228
4229/**
4230 * @opcode 0x78
4231 * @opfltest sf
4232 */
4233FNIEMOP_DEF(iemOp_js_Jb)
4234{
4235 IEMOP_MNEMONIC(js_Jb, "js Jb");
4236 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4237 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4238
4239 IEM_MC_BEGIN(0, 0);
4240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4242 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4243 } IEM_MC_ELSE() {
4244 IEM_MC_ADVANCE_RIP_AND_FINISH();
4245 } IEM_MC_ENDIF();
4246 IEM_MC_END();
4247}
4248
4249
4250/**
4251 * @opcode 0x79
4252 * @opfltest sf
4253 */
4254FNIEMOP_DEF(iemOp_jns_Jb)
4255{
4256 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
4257 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4258 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4259
4260 IEM_MC_BEGIN(0, 0);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4263 IEM_MC_ADVANCE_RIP_AND_FINISH();
4264 } IEM_MC_ELSE() {
4265 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4266 } IEM_MC_ENDIF();
4267 IEM_MC_END();
4268}
4269
4270
4271/**
4272 * @opcode 0x7a
4273 * @opfltest pf
4274 */
4275FNIEMOP_DEF(iemOp_jp_Jb)
4276{
4277 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
4278 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4280
4281 IEM_MC_BEGIN(0, 0);
4282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4284 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4285 } IEM_MC_ELSE() {
4286 IEM_MC_ADVANCE_RIP_AND_FINISH();
4287 } IEM_MC_ENDIF();
4288 IEM_MC_END();
4289}
4290
4291
4292/**
4293 * @opcode 0x7b
4294 * @opfltest pf
4295 */
4296FNIEMOP_DEF(iemOp_jnp_Jb)
4297{
4298 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
4299 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4301
4302 IEM_MC_BEGIN(0, 0);
4303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4305 IEM_MC_ADVANCE_RIP_AND_FINISH();
4306 } IEM_MC_ELSE() {
4307 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4308 } IEM_MC_ENDIF();
4309 IEM_MC_END();
4310}
4311
4312
4313/**
4314 * @opcode 0x7c
4315 * @opfltest sf,of
4316 */
4317FNIEMOP_DEF(iemOp_jl_Jb)
4318{
4319 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
4320 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4321 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4322
4323 IEM_MC_BEGIN(0, 0);
4324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4325 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4326 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4327 } IEM_MC_ELSE() {
4328 IEM_MC_ADVANCE_RIP_AND_FINISH();
4329 } IEM_MC_ENDIF();
4330 IEM_MC_END();
4331}
4332
4333
4334/**
4335 * @opcode 0x7d
4336 * @opfltest sf,of
4337 */
4338FNIEMOP_DEF(iemOp_jnl_Jb)
4339{
4340 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4341 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4343
4344 IEM_MC_BEGIN(0, 0);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4347 IEM_MC_ADVANCE_RIP_AND_FINISH();
4348 } IEM_MC_ELSE() {
4349 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4350 } IEM_MC_ENDIF();
4351 IEM_MC_END();
4352}
4353
4354
4355/**
4356 * @opcode 0x7e
4357 * @opfltest zf,sf,of
4358 */
4359FNIEMOP_DEF(iemOp_jle_Jb)
4360{
4361 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4362 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4364
4365 IEM_MC_BEGIN(0, 0);
4366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4367 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4368 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4369 } IEM_MC_ELSE() {
4370 IEM_MC_ADVANCE_RIP_AND_FINISH();
4371 } IEM_MC_ENDIF();
4372 IEM_MC_END();
4373}
4374
4375
4376/**
4377 * @opcode 0x7f
4378 * @opfltest zf,sf,of
4379 */
4380FNIEMOP_DEF(iemOp_jnle_Jb)
4381{
4382 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4383 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4385
4386 IEM_MC_BEGIN(0, 0);
4387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4388 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4389 IEM_MC_ADVANCE_RIP_AND_FINISH();
4390 } IEM_MC_ELSE() {
4391 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4392 } IEM_MC_ENDIF();
4393 IEM_MC_END();
4394}
4395
4396
4397/**
4398 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4399 * iemOp_Grp1_Eb_Ib_80.
4400 */
4401#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4402 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4403 { \
4404 /* register target */ \
4405 IEM_MC_BEGIN(0, 0); \
4406 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4408 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4409 IEM_MC_LOCAL(uint8_t, u8Dst); \
4410 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4411 IEM_MC_LOCAL_EFLAGS( uEFlags); \
4412 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4413 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
4414 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4415 } IEM_MC_NATIVE_ELSE() { \
4416 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4417 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4418 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4419 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4420 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4421 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4422 } IEM_MC_NATIVE_ENDIF(); \
4423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4424 IEM_MC_END(); \
4425 } \
4426 else \
4427 { \
4428 /* memory target */ \
4429 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4430 { \
4431 IEM_MC_BEGIN(0, 0); \
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4434 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4435 IEMOP_HLP_DONE_DECODING(); \
4436 \
4437 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4438 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4439 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4440 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4441 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4442 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4443 \
4444 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4445 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4447 IEM_MC_END(); \
4448 } \
4449 else \
4450 { \
4451 IEM_MC_BEGIN(0, 0); \
4452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4454 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4455 IEMOP_HLP_DONE_DECODING(); \
4456 \
4457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4458 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4459 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4460 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4461 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4462 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), fEFlagsIn, pu8Dst, u8Src); \
4463 \
4464 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4465 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4466 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4467 IEM_MC_END(); \
4468 } \
4469 } \
4470 (void)0
4471
4472#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_InsNm, a_fNativeArchs) \
4473 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4474 { \
4475 /* register target */ \
4476 IEM_MC_BEGIN(0, 0); \
4477 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4479 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4480 IEM_MC_LOCAL(uint8_t, u8Dst); \
4481 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4482 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4483 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4484 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4485 } IEM_MC_NATIVE_ELSE() { \
4486 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
4487 IEM_MC_REF_GREG_U8_CONST(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4488 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4489 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4490 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4491 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4492 } IEM_MC_NATIVE_ENDIF(); \
4493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4494 IEM_MC_END(); \
4495 } \
4496 else \
4497 { \
4498 /* memory target */ \
4499 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4500 { \
4501 IEM_MC_BEGIN(0, 0); \
4502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4504 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4505 IEMOP_HLP_DONE_DECODING(); \
4506 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4507 IEM_MC_LOCAL(uint8_t, u8Dst); \
4508 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4509 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4510 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4511 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4512 } IEM_MC_NATIVE_ELSE() { \
4513 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4514 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
4515 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4516 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4517 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4518 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4519 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4520 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4521 } IEM_MC_NATIVE_ENDIF(); \
4522 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4523 IEM_MC_END(); \
4524 } \
4525 else \
4526 { \
4527 IEMOP_HLP_DONE_DECODING(); \
4528 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4529 } \
4530 } \
4531 (void)0
4532
4533
4534
4535/**
4536 * @opmaps grp1_80,grp1_83
4537 * @opcode /0
4538 * @opflclass arithmetic
4539 */
4540FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4541{
4542 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4543 IEMOP_BODY_BINARY_Eb_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4544}
4545
4546
4547/**
4548 * @opmaps grp1_80,grp1_83
4549 * @opcode /1
4550 * @opflclass logical
4551 */
4552FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4553{
4554 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4555 IEMOP_BODY_BINARY_Eb_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4556}
4557
4558
4559/**
4560 * @opmaps grp1_80,grp1_83
4561 * @opcode /2
4562 * @opflclass arithmetic_carry
4563 */
4564FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4565{
4566 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4567 IEMOP_BODY_BINARY_Eb_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4568}
4569
4570
4571/**
4572 * @opmaps grp1_80,grp1_83
4573 * @opcode /3
4574 * @opflclass arithmetic_carry
4575 */
4576FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4577{
4578 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4579 IEMOP_BODY_BINARY_Eb_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4580}
4581
4582
4583/**
4584 * @opmaps grp1_80,grp1_83
4585 * @opcode /4
4586 * @opflclass logical
4587 */
4588FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4589{
4590 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4591 IEMOP_BODY_BINARY_Eb_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4592}
4593
4594
4595/**
4596 * @opmaps grp1_80,grp1_83
4597 * @opcode /5
4598 * @opflclass arithmetic
4599 */
4600FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4601{
4602 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4603 IEMOP_BODY_BINARY_Eb_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4604}
4605
4606
4607/**
4608 * @opmaps grp1_80,grp1_83
4609 * @opcode /6
4610 * @opflclass logical
4611 */
4612FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4613{
4614 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4615 IEMOP_BODY_BINARY_Eb_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4616}
4617
4618
4619/**
4620 * @opmaps grp1_80,grp1_83
4621 * @opcode /7
4622 * @opflclass arithmetic
4623 */
4624FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4625{
4626 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4627 IEMOP_BODY_BINARY_Eb_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4628}
4629
4630
4631/**
4632 * @opcode 0x80
4633 */
4634FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4635{
4636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4637 switch (IEM_GET_MODRM_REG_8(bRm))
4638 {
4639 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4640 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4641 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4642 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4643 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4644 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4645 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4646 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4648 }
4649}
4650
4651
4652/**
4653 * Body for a group 1 binary operator.
4654 */
4655#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4656 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4657 { \
4658 /* register target */ \
4659 switch (pVCpu->iem.s.enmEffOpSize) \
4660 { \
4661 case IEMMODE_16BIT: \
4662 { \
4663 IEM_MC_BEGIN(0, 0); \
4664 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4666 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4667 IEM_MC_LOCAL(uint16_t, u16Dst); \
4668 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4669 IEM_MC_LOCAL(uint32_t, uEFlags); \
4670 IEM_MC_FETCH_EFLAGS(uEFlags); \
4671 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4672 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
4673 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4674 } IEM_MC_NATIVE_ELSE() { \
4675 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4676 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4677 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4678 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 2); \
4679 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4680 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4681 } IEM_MC_NATIVE_ENDIF(); \
4682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4683 IEM_MC_END(); \
4684 break; \
4685 } \
4686 \
4687 case IEMMODE_32BIT: \
4688 { \
4689 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4690 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4692 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4693 IEM_MC_LOCAL(uint32_t, u32Dst); \
4694 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4695 IEM_MC_LOCAL(uint32_t, uEFlags); \
4696 IEM_MC_FETCH_EFLAGS(uEFlags); \
4697 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4698 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
4699 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4700 } IEM_MC_NATIVE_ELSE() { \
4701 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4702 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4703 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4704 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 2); \
4705 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4706 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4707 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4708 } IEM_MC_NATIVE_ENDIF(); \
4709 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4710 IEM_MC_END(); \
4711 break; \
4712 } \
4713 \
4714 case IEMMODE_64BIT: \
4715 { \
4716 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4717 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4719 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4720 IEM_MC_LOCAL(uint64_t, u64Dst); \
4721 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4722 IEM_MC_LOCAL(uint32_t, uEFlags); \
4723 IEM_MC_FETCH_EFLAGS(uEFlags); \
4724 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4725 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
4726 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4727 } IEM_MC_NATIVE_ELSE() { \
4728 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4729 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4730 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4731 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 2); \
4732 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4733 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4734 } IEM_MC_NATIVE_ENDIF(); \
4735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4736 IEM_MC_END(); \
4737 break; \
4738 } \
4739 \
4740 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4741 } \
4742 } \
4743 else \
4744 { \
4745 /* memory target */ \
4746 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4747 { \
4748 switch (pVCpu->iem.s.enmEffOpSize) \
4749 { \
4750 case IEMMODE_16BIT: \
4751 { \
4752 IEM_MC_BEGIN(0, 0); \
4753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4755 \
4756 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4757 IEMOP_HLP_DONE_DECODING(); \
4758 \
4759 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4760 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4761 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4762 \
4763 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
4764 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4765 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4766 \
4767 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4768 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4769 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4770 IEM_MC_END(); \
4771 break; \
4772 } \
4773 \
4774 case IEMMODE_32BIT: \
4775 { \
4776 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4779 \
4780 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4781 IEMOP_HLP_DONE_DECODING(); \
4782 \
4783 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4784 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4785 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4786 \
4787 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
4788 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4789 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4790 \
4791 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4792 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4793 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4794 IEM_MC_END(); \
4795 break; \
4796 } \
4797 \
4798 case IEMMODE_64BIT: \
4799 { \
4800 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4801 \
4802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4804 \
4805 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4806 IEMOP_HLP_DONE_DECODING(); \
4807 \
4808 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4809 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4810 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4811 \
4812 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
4813 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4814 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4815 \
4816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4817 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4819 IEM_MC_END(); \
4820 break; \
4821 } \
4822 \
4823 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4824 } \
4825 } \
4826 else \
4827 { \
4828 switch (pVCpu->iem.s.enmEffOpSize) \
4829 { \
4830 case IEMMODE_16BIT: \
4831 { \
4832 IEM_MC_BEGIN(0, 0); \
4833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4835 \
4836 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4837 IEMOP_HLP_DONE_DECODING(); \
4838 \
4839 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4840 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4841 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4842 \
4843 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
4844 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4845 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
4846 \
4847 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4848 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4850 IEM_MC_END(); \
4851 break; \
4852 } \
4853 \
4854 case IEMMODE_32BIT: \
4855 { \
4856 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4859 \
4860 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4861 IEMOP_HLP_DONE_DECODING(); \
4862 \
4863 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4864 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4865 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4866 \
4867 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
4868 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4869 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
4870 \
4871 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4872 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4874 IEM_MC_END(); \
4875 break; \
4876 } \
4877 \
4878 case IEMMODE_64BIT: \
4879 { \
4880 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4883 \
4884 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4885 IEMOP_HLP_DONE_DECODING(); \
4886 \
4887 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4888 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4889 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4890 \
4891 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
4892 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4893 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
4894 \
4895 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4896 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4897 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4898 IEM_MC_END(); \
4899 break; \
4900 } \
4901 \
4902 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4903 } \
4904 } \
4905 } \
4906 (void)0
4907
4908/* read-only version */
4909#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_InsNm, a_fNativeArchs) \
4910 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4911 { \
4912 /* register target */ \
4913 switch (pVCpu->iem.s.enmEffOpSize) \
4914 { \
4915 case IEMMODE_16BIT: \
4916 { \
4917 IEM_MC_BEGIN(0, 0); \
4918 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4920 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4921 IEM_MC_LOCAL(uint16_t, u16Dst); \
4922 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4923 IEM_MC_LOCAL(uint32_t, uEFlags); \
4924 IEM_MC_FETCH_EFLAGS(uEFlags); \
4925 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4926 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4927 } IEM_MC_NATIVE_ELSE() { \
4928 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
4929 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4930 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4931 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 2); \
4932 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4933 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4934 } IEM_MC_NATIVE_ENDIF(); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 } \
4939 \
4940 case IEMMODE_32BIT: \
4941 { \
4942 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4943 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4945 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4946 IEM_MC_LOCAL(uint32_t, u32Dst); \
4947 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4948 IEM_MC_LOCAL(uint32_t, uEFlags); \
4949 IEM_MC_FETCH_EFLAGS(uEFlags); \
4950 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4951 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4952 } IEM_MC_NATIVE_ELSE() { \
4953 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
4954 IEM_MC_REF_GREG_U32_CONST (pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4955 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4956 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 2); \
4957 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4958 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4959 } IEM_MC_NATIVE_ENDIF(); \
4960 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4961 IEM_MC_END(); \
4962 break; \
4963 } \
4964 \
4965 case IEMMODE_64BIT: \
4966 { \
4967 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4968 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4970 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4971 IEM_MC_LOCAL(uint64_t, u64Dst); \
4972 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4973 IEM_MC_LOCAL(uint32_t, uEFlags); \
4974 IEM_MC_FETCH_EFLAGS(uEFlags); \
4975 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4976 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4977 } IEM_MC_NATIVE_ELSE() { \
4978 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
4979 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4980 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4981 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 2); \
4982 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4983 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4984 } IEM_MC_NATIVE_ENDIF(); \
4985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4986 IEM_MC_END(); \
4987 break; \
4988 } \
4989 \
4990 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4991 } \
4992 } \
4993 else \
4994 { \
4995 /* memory target */ \
4996 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4997 { \
4998 switch (pVCpu->iem.s.enmEffOpSize) \
4999 { \
5000 case IEMMODE_16BIT: \
5001 { \
5002 IEM_MC_BEGIN(0, 0); \
5003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
5005 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
5006 IEMOP_HLP_DONE_DECODING(); \
5007 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5008 IEM_MC_LOCAL(uint16_t, u16Dst); \
5009 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5010 IEM_MC_LOCAL_EFLAGS(uEFlags); \
5011 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
5012 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5013 } IEM_MC_NATIVE_ELSE() { \
5014 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5015 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
5016 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5017 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5018 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
5019 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5020 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5021 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5022 } IEM_MC_NATIVE_ENDIF(); \
5023 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5024 IEM_MC_END(); \
5025 break; \
5026 } \
5027 \
5028 case IEMMODE_32BIT: \
5029 { \
5030 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
5033 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
5034 IEMOP_HLP_DONE_DECODING(); \
5035 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5036 IEM_MC_LOCAL(uint32_t, u32Dst); \
5037 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5038 IEM_MC_LOCAL_EFLAGS(uEFlags); \
5039 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
5040 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5041 } IEM_MC_NATIVE_ELSE() { \
5042 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5043 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
5044 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5045 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5046 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
5047 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5048 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5049 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5050 } IEM_MC_NATIVE_ENDIF(); \
5051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5052 IEM_MC_END(); \
5053 break; \
5054 } \
5055 \
5056 case IEMMODE_64BIT: \
5057 { \
5058 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
5061 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
5062 IEMOP_HLP_DONE_DECODING(); \
5063 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5064 IEM_MC_LOCAL(uint64_t, u64Dst); \
5065 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5066 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5067 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
5068 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5069 } IEM_MC_NATIVE_ELSE() { \
5070 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5071 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
5072 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5073 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5074 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
5075 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5076 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5077 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5078 } IEM_MC_NATIVE_ENDIF(); \
5079 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5080 IEM_MC_END(); \
5081 break; \
5082 } \
5083 \
5084 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5085 } \
5086 } \
5087 else \
5088 { \
5089 IEMOP_HLP_DONE_DECODING(); \
5090 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5091 } \
5092 } \
5093 (void)0
5094
5095
5096/**
5097 * @opmaps grp1_81
5098 * @opcode /0
5099 * @opflclass arithmetic
5100 */
5101FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
5102{
5103 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
5104 IEMOP_BODY_BINARY_Ev_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5105}
5106
5107
5108/**
5109 * @opmaps grp1_81
5110 * @opcode /1
5111 * @opflclass logical
5112 */
5113FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
5114{
5115 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
5116 IEMOP_BODY_BINARY_Ev_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5117}
5118
5119
5120/**
5121 * @opmaps grp1_81
5122 * @opcode /2
5123 * @opflclass arithmetic_carry
5124 */
5125FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
5126{
5127 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
5128 IEMOP_BODY_BINARY_Ev_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5129}
5130
5131
5132/**
5133 * @opmaps grp1_81
5134 * @opcode /3
5135 * @opflclass arithmetic_carry
5136 */
5137FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
5138{
5139 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
5140 IEMOP_BODY_BINARY_Ev_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5141}
5142
5143
5144/**
5145 * @opmaps grp1_81
5146 * @opcode /4
5147 * @opflclass logical
5148 */
5149FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
5150{
5151 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
5152 IEMOP_BODY_BINARY_Ev_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5153}
5154
5155
5156/**
5157 * @opmaps grp1_81
5158 * @opcode /5
5159 * @opflclass arithmetic
5160 */
5161FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
5162{
5163 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
5164 IEMOP_BODY_BINARY_Ev_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5165}
5166
5167
5168/**
5169 * @opmaps grp1_81
5170 * @opcode /6
5171 * @opflclass logical
5172 */
5173FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
5174{
5175 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
5176 IEMOP_BODY_BINARY_Ev_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5177}
5178
5179
5180/**
5181 * @opmaps grp1_81
5182 * @opcode /7
5183 * @opflclass arithmetic
5184 */
5185FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
5186{
5187 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
5188 IEMOP_BODY_BINARY_Ev_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5189}
5190
5191
5192/**
5193 * @opcode 0x81
5194 */
5195FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
5196{
5197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5198 switch (IEM_GET_MODRM_REG_8(bRm))
5199 {
5200 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
5201 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
5202 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
5203 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
5204 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
5205 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
5206 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
5207 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
5208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5209 }
5210}
5211
5212
5213/**
5214 * @opcode 0x82
5215 * @opmnemonic grp1_82
5216 * @opgroup og_groups
5217 */
5218FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
5219{
5220 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
5221 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
5222}
5223
5224
5225/**
5226 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
5227 * iemOp_Grp1_Ev_Ib.
5228 */
5229#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
5230 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5231 { \
5232 /* \
5233 * Register target \
5234 */ \
5235 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5236 switch (pVCpu->iem.s.enmEffOpSize) \
5237 { \
5238 case IEMMODE_16BIT: \
5239 IEM_MC_BEGIN(0, 0); \
5240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5241 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5242 IEM_MC_LOCAL(uint16_t, u16Dst); \
5243 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5244 IEM_MC_LOCAL(uint32_t, uEFlags); \
5245 IEM_MC_FETCH_EFLAGS(uEFlags); \
5246 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5247 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
5248 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5249 } IEM_MC_NATIVE_ELSE() { \
5250 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5251 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5252 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5253 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5254 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5255 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5256 } IEM_MC_NATIVE_ENDIF(); \
5257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5258 IEM_MC_END(); \
5259 break; \
5260 \
5261 case IEMMODE_32BIT: \
5262 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5264 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5265 IEM_MC_LOCAL(uint32_t, u32Dst); \
5266 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5267 IEM_MC_LOCAL(uint32_t, uEFlags); \
5268 IEM_MC_FETCH_EFLAGS(uEFlags); \
5269 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5270 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
5271 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5272 } IEM_MC_NATIVE_ELSE() { \
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5274 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5275 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5276 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5277 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5278 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
5279 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5280 } IEM_MC_NATIVE_ENDIF(); \
5281 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5282 IEM_MC_END(); \
5283 break; \
5284 \
5285 case IEMMODE_64BIT: \
5286 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5288 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5289 IEM_MC_LOCAL(uint64_t, u64Dst); \
5290 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5291 IEM_MC_LOCAL(uint32_t, uEFlags); \
5292 IEM_MC_FETCH_EFLAGS(uEFlags); \
5293 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5294 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
5295 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5296 } IEM_MC_NATIVE_ELSE() { \
5297 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5298 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5299 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5300 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5301 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5302 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5303 } IEM_MC_NATIVE_ENDIF(); \
5304 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5305 IEM_MC_END(); \
5306 break; \
5307 \
5308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5309 } \
5310 } \
5311 else \
5312 { \
5313 /* \
5314 * Memory target. \
5315 */ \
5316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5317 { \
5318 switch (pVCpu->iem.s.enmEffOpSize) \
5319 { \
5320 case IEMMODE_16BIT: \
5321 IEM_MC_BEGIN(0, 0); \
5322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5324 \
5325 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5326 IEMOP_HLP_DONE_DECODING(); \
5327 \
5328 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5329 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5330 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5331 \
5332 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5333 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5334 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5335 \
5336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5337 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5339 IEM_MC_END(); \
5340 break; \
5341 \
5342 case IEMMODE_32BIT: \
5343 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5346 \
5347 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5348 IEMOP_HLP_DONE_DECODING(); \
5349 \
5350 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5351 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5352 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5353 \
5354 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5355 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5356 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5357 \
5358 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5359 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5361 IEM_MC_END(); \
5362 break; \
5363 \
5364 case IEMMODE_64BIT: \
5365 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5368 \
5369 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5370 IEMOP_HLP_DONE_DECODING(); \
5371 \
5372 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5373 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5374 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 \
5376 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5377 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5378 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5379 \
5380 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5381 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5383 IEM_MC_END(); \
5384 break; \
5385 \
5386 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5387 } \
5388 } \
5389 else \
5390 { \
5391 switch (pVCpu->iem.s.enmEffOpSize) \
5392 { \
5393 case IEMMODE_16BIT: \
5394 IEM_MC_BEGIN(0, 0); \
5395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5397 \
5398 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5399 IEMOP_HLP_DONE_DECODING(); \
5400 \
5401 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5402 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5403 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5404 \
5405 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5406 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5407 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
5408 \
5409 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5410 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5412 IEM_MC_END(); \
5413 break; \
5414 \
5415 case IEMMODE_32BIT: \
5416 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5419 \
5420 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5421 IEMOP_HLP_DONE_DECODING(); \
5422 \
5423 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5424 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5425 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5426 \
5427 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5428 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5429 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
5430 \
5431 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5432 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5433 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5434 IEM_MC_END(); \
5435 break; \
5436 \
5437 case IEMMODE_64BIT: \
5438 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5441 \
5442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5443 IEMOP_HLP_DONE_DECODING(); \
5444 \
5445 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5446 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5447 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5448 \
5449 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5450 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5451 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
5452 \
5453 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5454 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5455 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5456 IEM_MC_END(); \
5457 break; \
5458 \
5459 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5460 } \
5461 } \
5462 } \
5463 (void)0
5464
5465/* read-only variant */
5466#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_InsNm, a_fNativeArchs) \
5467 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5468 { \
5469 /* \
5470 * Register target \
5471 */ \
5472 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5473 switch (pVCpu->iem.s.enmEffOpSize) \
5474 { \
5475 case IEMMODE_16BIT: \
5476 IEM_MC_BEGIN(0, 0); \
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5478 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5479 IEM_MC_LOCAL(uint16_t, u16Dst); \
5480 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5481 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5482 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5483 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5484 } IEM_MC_NATIVE_ELSE() { \
5485 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
5486 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5487 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5488 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5489 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5490 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5491 } IEM_MC_NATIVE_ENDIF(); \
5492 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5493 IEM_MC_END(); \
5494 break; \
5495 \
5496 case IEMMODE_32BIT: \
5497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5499 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5500 IEM_MC_LOCAL(uint32_t, u32Dst); \
5501 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5502 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5503 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5504 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5505 } IEM_MC_NATIVE_ELSE() { \
5506 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
5507 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5508 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5509 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5510 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5511 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5512 } IEM_MC_NATIVE_ENDIF(); \
5513 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5514 IEM_MC_END(); \
5515 break; \
5516 \
5517 case IEMMODE_64BIT: \
5518 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5520 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5521 IEM_MC_LOCAL(uint64_t, u64Dst); \
5522 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5523 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5524 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5525 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5526 } IEM_MC_NATIVE_ELSE() { \
5527 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
5528 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5529 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5530 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5531 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5532 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5533 } IEM_MC_NATIVE_ENDIF(); \
5534 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5535 IEM_MC_END(); \
5536 break; \
5537 \
5538 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5539 } \
5540 } \
5541 else \
5542 { \
5543 /* \
5544 * Memory target. \
5545 */ \
5546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5547 { \
5548 switch (pVCpu->iem.s.enmEffOpSize) \
5549 { \
5550 case IEMMODE_16BIT: \
5551 IEM_MC_BEGIN(0, 0); \
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5554 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5555 IEMOP_HLP_DONE_DECODING(); \
5556 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5557 IEM_MC_LOCAL(uint16_t, u16Dst); \
5558 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5559 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5560 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5561 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5562 } IEM_MC_NATIVE_ELSE() { \
5563 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5564 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
5565 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5566 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5567 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5568 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5570 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5571 } IEM_MC_NATIVE_ENDIF(); \
5572 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5573 IEM_MC_END(); \
5574 break; \
5575 \
5576 case IEMMODE_32BIT: \
5577 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5580 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5581 IEMOP_HLP_DONE_DECODING(); \
5582 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5583 IEM_MC_LOCAL(uint32_t, u32Dst); \
5584 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5585 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5586 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5587 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5588 } IEM_MC_NATIVE_ELSE() { \
5589 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5590 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
5591 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5592 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5593 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5594 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5595 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5596 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5597 } IEM_MC_NATIVE_ENDIF(); \
5598 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5599 IEM_MC_END(); \
5600 break; \
5601 \
5602 case IEMMODE_64BIT: \
5603 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5606 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5607 IEMOP_HLP_DONE_DECODING(); \
5608 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5609 IEM_MC_LOCAL(uint64_t, u64Dst); \
5610 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5611 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5612 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5613 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5614 } IEM_MC_NATIVE_ELSE() { \
5615 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5616 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
5617 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5618 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5619 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5620 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5621 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5622 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5623 } IEM_MC_NATIVE_ENDIF(); \
5624 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5625 IEM_MC_END(); \
5626 break; \
5627 \
5628 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5629 } \
5630 } \
5631 else \
5632 { \
5633 IEMOP_HLP_DONE_DECODING(); \
5634 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5635 } \
5636 } \
5637 (void)0
5638
5639/**
5640 * @opmaps grp1_83
5641 * @opcode /0
5642 * @opflclass arithmetic
5643 */
5644FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5645{
5646 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5647 IEMOP_BODY_BINARY_Ev_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5648}
5649
5650
5651/**
5652 * @opmaps grp1_83
5653 * @opcode /1
5654 * @opflclass logical
5655 */
5656FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5657{
5658 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5659 IEMOP_BODY_BINARY_Ev_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5660}
5661
5662
5663/**
5664 * @opmaps grp1_83
5665 * @opcode /2
5666 * @opflclass arithmetic_carry
5667 */
5668FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5669{
5670 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5671 IEMOP_BODY_BINARY_Ev_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5672}
5673
5674
5675/**
5676 * @opmaps grp1_83
5677 * @opcode /3
5678 * @opflclass arithmetic_carry
5679 */
5680FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5681{
5682 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5683 IEMOP_BODY_BINARY_Ev_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5684}
5685
5686
5687/**
5688 * @opmaps grp1_83
5689 * @opcode /4
5690 * @opflclass logical
5691 */
5692FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5693{
5694 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5695 IEMOP_BODY_BINARY_Ev_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5696}
5697
5698
5699/**
5700 * @opmaps grp1_83
5701 * @opcode /5
5702 * @opflclass arithmetic
5703 */
5704FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5705{
5706 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5707 IEMOP_BODY_BINARY_Ev_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5708}
5709
5710
5711/**
5712 * @opmaps grp1_83
5713 * @opcode /6
5714 * @opflclass logical
5715 */
5716FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5717{
5718 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5719 IEMOP_BODY_BINARY_Ev_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5720}
5721
5722
5723/**
5724 * @opmaps grp1_83
5725 * @opcode /7
5726 * @opflclass arithmetic
5727 */
5728FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5729{
5730 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5731 IEMOP_BODY_BINARY_Ev_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5732}
5733
5734
5735/**
5736 * @opcode 0x83
5737 */
5738FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5739{
5740 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5741 to the 386 even if absent in the intel reference manuals and some
5742 3rd party opcode listings. */
5743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5744 switch (IEM_GET_MODRM_REG_8(bRm))
5745 {
5746 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5747 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5748 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5749 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5750 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5751 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5752 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5753 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5755 }
5756}
5757
5758
5759/**
5760 * @opcode 0x84
5761 * @opflclass logical
5762 */
5763FNIEMOP_DEF(iemOp_test_Eb_Gb)
5764{
5765 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5766 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5767
5768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5769
5770 /*
5771 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5772 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5773 */
5774 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5775 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5776 {
5777 IEM_MC_BEGIN(0, 0);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_ARG(uint8_t, u8Src, 2);
5780 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5781 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5782 IEM_MC_LOCAL_EFLAGS(uEFlags);
5783 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5784 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5785 } IEM_MC_NATIVE_ELSE() {
5786 IEM_MC_ARG(uint8_t *, pu8Dst, 1);
5787 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5788 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5789 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u8, fEFlagsIn, pu8Dst, u8Src);
5790 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5791 } IEM_MC_NATIVE_ENDIF();
5792 IEM_MC_ADVANCE_RIP_AND_FINISH();
5793 IEM_MC_END();
5794 }
5795
5796 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5797}
5798
5799
5800/**
5801 * @opcode 0x85
5802 * @opflclass logical
5803 */
5804FNIEMOP_DEF(iemOp_test_Ev_Gv)
5805{
5806 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5808
5809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5810
5811 /*
5812 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5813 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5814 */
5815 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5816 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5817 {
5818 switch (pVCpu->iem.s.enmEffOpSize)
5819 {
5820 case IEMMODE_16BIT:
5821 IEM_MC_BEGIN(0, 0);
5822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5823 IEM_MC_ARG(uint16_t, u16Src, 2);
5824 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5825 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5826 IEM_MC_LOCAL_EFLAGS(uEFlags);
5827 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5828 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5829 } IEM_MC_NATIVE_ELSE() {
5830 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
5831 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5832 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5833 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u16, fEFlagsIn, pu16Dst, u16Src);
5834 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5835 } IEM_MC_NATIVE_ENDIF();
5836 IEM_MC_ADVANCE_RIP_AND_FINISH();
5837 IEM_MC_END();
5838 break;
5839
5840 case IEMMODE_32BIT:
5841 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5843 IEM_MC_ARG(uint32_t, u32Src, 2);
5844 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5845 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5846 IEM_MC_LOCAL_EFLAGS(uEFlags);
5847 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5848 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5849 } IEM_MC_NATIVE_ELSE() {
5850 IEM_MC_ARG(uint32_t *, pu32Dst, 1);
5851 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5852 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5853 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u32, fEFlagsIn, pu32Dst, u32Src);
5854 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5855 } IEM_MC_NATIVE_ENDIF();
5856 IEM_MC_ADVANCE_RIP_AND_FINISH();
5857 IEM_MC_END();
5858 break;
5859
5860 case IEMMODE_64BIT:
5861 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863 IEM_MC_ARG(uint64_t, u64Src, 2);
5864 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5865 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5866 IEM_MC_LOCAL_EFLAGS(uEFlags);
5867 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5868 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5869 } IEM_MC_NATIVE_ELSE() {
5870 IEM_MC_ARG(uint64_t *, pu64Dst, 1);
5871 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5872 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5873 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u64, fEFlagsIn, pu64Dst, u64Src);
5874 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5875 } IEM_MC_NATIVE_ENDIF();
5876 IEM_MC_ADVANCE_RIP_AND_FINISH();
5877 IEM_MC_END();
5878 break;
5879
5880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5881 }
5882 }
5883
5884 IEMOP_BODY_BINARY_rm_rv_RO(bRm, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5885}
5886
5887
5888/**
5889 * @opcode 0x86
5890 */
5891FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5892{
5893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5894 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5895
5896 /*
5897 * If rm is denoting a register, no more instruction bytes.
5898 */
5899 if (IEM_IS_MODRM_REG_MODE(bRm))
5900 {
5901 IEM_MC_BEGIN(0, 0);
5902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5903 IEM_MC_LOCAL(uint8_t, uTmp1);
5904 IEM_MC_LOCAL(uint8_t, uTmp2);
5905
5906 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5907 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5908 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5909 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5910
5911 IEM_MC_ADVANCE_RIP_AND_FINISH();
5912 IEM_MC_END();
5913 }
5914 else
5915 {
5916 /*
5917 * We're accessing memory.
5918 */
5919#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5920 IEM_MC_BEGIN(0, 0); \
5921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5922 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5923 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5924 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5925 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5926 \
5927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5928 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5929 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5930 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5931 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5932 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5933 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5934 \
5935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5936 IEM_MC_END()
5937
5938 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5939 {
5940 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5941 }
5942 else
5943 {
5944 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5945 }
5946 }
5947}
5948
5949
5950/**
5951 * @opcode 0x87
5952 */
5953FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5954{
5955 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5957
5958 /*
5959 * If rm is denoting a register, no more instruction bytes.
5960 */
5961 if (IEM_IS_MODRM_REG_MODE(bRm))
5962 {
5963 switch (pVCpu->iem.s.enmEffOpSize)
5964 {
5965 case IEMMODE_16BIT:
5966 IEM_MC_BEGIN(0, 0);
5967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5968 IEM_MC_LOCAL(uint16_t, uTmp1);
5969 IEM_MC_LOCAL(uint16_t, uTmp2);
5970
5971 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5972 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5973 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5974 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5975
5976 IEM_MC_ADVANCE_RIP_AND_FINISH();
5977 IEM_MC_END();
5978 break;
5979
5980 case IEMMODE_32BIT:
5981 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5983 IEM_MC_LOCAL(uint32_t, uTmp1);
5984 IEM_MC_LOCAL(uint32_t, uTmp2);
5985
5986 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5987 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5988 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5989 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5990
5991 IEM_MC_ADVANCE_RIP_AND_FINISH();
5992 IEM_MC_END();
5993 break;
5994
5995 case IEMMODE_64BIT:
5996 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5998 IEM_MC_LOCAL(uint64_t, uTmp1);
5999 IEM_MC_LOCAL(uint64_t, uTmp2);
6000
6001 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
6002 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
6003 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
6004 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
6005
6006 IEM_MC_ADVANCE_RIP_AND_FINISH();
6007 IEM_MC_END();
6008 break;
6009
6010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6011 }
6012 }
6013 else
6014 {
6015 /*
6016 * We're accessing memory.
6017 */
6018#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
6019 do { \
6020 switch (pVCpu->iem.s.enmEffOpSize) \
6021 { \
6022 case IEMMODE_16BIT: \
6023 IEM_MC_BEGIN(0, 0); \
6024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6025 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6026 IEM_MC_LOCAL(uint16_t, uTmpReg); \
6027 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
6028 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
6029 \
6030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6031 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
6032 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6033 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6034 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
6035 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6036 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6037 \
6038 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6039 IEM_MC_END(); \
6040 break; \
6041 \
6042 case IEMMODE_32BIT: \
6043 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
6044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6045 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6046 IEM_MC_LOCAL(uint32_t, uTmpReg); \
6047 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
6048 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
6049 \
6050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6051 IEMOP_HLP_DONE_DECODING(); \
6052 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6053 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6054 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
6055 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6056 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6057 \
6058 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6059 IEM_MC_END(); \
6060 break; \
6061 \
6062 case IEMMODE_64BIT: \
6063 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
6064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6065 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6066 IEM_MC_LOCAL(uint64_t, uTmpReg); \
6067 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
6068 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
6069 \
6070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6071 IEMOP_HLP_DONE_DECODING(); \
6072 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6073 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6074 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
6075 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6076 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6077 \
6078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6079 IEM_MC_END(); \
6080 break; \
6081 \
6082 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6083 } \
6084 } while (0)
6085 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
6086 {
6087 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
6088 }
6089 else
6090 {
6091 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
6092 }
6093 }
6094}
6095
6096
6097/**
6098 * @opcode 0x88
6099 */
6100FNIEMOP_DEF(iemOp_mov_Eb_Gb)
6101{
6102 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
6103
6104 uint8_t bRm;
6105 IEM_OPCODE_GET_NEXT_U8(&bRm);
6106
6107 /*
6108 * If rm is denoting a register, no more instruction bytes.
6109 */
6110 if (IEM_IS_MODRM_REG_MODE(bRm))
6111 {
6112 IEM_MC_BEGIN(0, 0);
6113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6114 IEM_MC_LOCAL(uint8_t, u8Value);
6115 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6116 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
6117 IEM_MC_ADVANCE_RIP_AND_FINISH();
6118 IEM_MC_END();
6119 }
6120 else
6121 {
6122 /*
6123 * We're writing a register to memory.
6124 */
6125 IEM_MC_BEGIN(0, 0);
6126 IEM_MC_LOCAL(uint8_t, u8Value);
6127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6130 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6131 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
6132 IEM_MC_ADVANCE_RIP_AND_FINISH();
6133 IEM_MC_END();
6134 }
6135}
6136
6137
6138/**
6139 * @opcode 0x89
6140 */
6141FNIEMOP_DEF(iemOp_mov_Ev_Gv)
6142{
6143 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
6144
6145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6146
6147 /*
6148 * If rm is denoting a register, no more instruction bytes.
6149 */
6150 if (IEM_IS_MODRM_REG_MODE(bRm))
6151 {
6152 switch (pVCpu->iem.s.enmEffOpSize)
6153 {
6154 case IEMMODE_16BIT:
6155 IEM_MC_BEGIN(0, 0);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157 IEM_MC_LOCAL(uint16_t, u16Value);
6158 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6159 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6160 IEM_MC_ADVANCE_RIP_AND_FINISH();
6161 IEM_MC_END();
6162 break;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_LOCAL(uint32_t, u32Value);
6168 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6169 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6170 IEM_MC_ADVANCE_RIP_AND_FINISH();
6171 IEM_MC_END();
6172 break;
6173
6174 case IEMMODE_64BIT:
6175 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6177 IEM_MC_LOCAL(uint64_t, u64Value);
6178 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6179 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6180 IEM_MC_ADVANCE_RIP_AND_FINISH();
6181 IEM_MC_END();
6182 break;
6183
6184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6185 }
6186 }
6187 else
6188 {
6189 /*
6190 * We're writing a register to memory.
6191 */
6192 switch (pVCpu->iem.s.enmEffOpSize)
6193 {
6194 case IEMMODE_16BIT:
6195 IEM_MC_BEGIN(0, 0);
6196 IEM_MC_LOCAL(uint16_t, u16Value);
6197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6200 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6201 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6202 IEM_MC_ADVANCE_RIP_AND_FINISH();
6203 IEM_MC_END();
6204 break;
6205
6206 case IEMMODE_32BIT:
6207 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6208 IEM_MC_LOCAL(uint32_t, u32Value);
6209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6213 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6214 IEM_MC_ADVANCE_RIP_AND_FINISH();
6215 IEM_MC_END();
6216 break;
6217
6218 case IEMMODE_64BIT:
6219 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6220 IEM_MC_LOCAL(uint64_t, u64Value);
6221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6224 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6225 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6226 IEM_MC_ADVANCE_RIP_AND_FINISH();
6227 IEM_MC_END();
6228 break;
6229
6230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6231 }
6232 }
6233}
6234
6235
6236/**
6237 * @opcode 0x8a
6238 */
6239FNIEMOP_DEF(iemOp_mov_Gb_Eb)
6240{
6241 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
6242
6243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6244
6245 /*
6246 * If rm is denoting a register, no more instruction bytes.
6247 */
6248 if (IEM_IS_MODRM_REG_MODE(bRm))
6249 {
6250 IEM_MC_BEGIN(0, 0);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6252 IEM_MC_LOCAL(uint8_t, u8Value);
6253 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6254 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6255 IEM_MC_ADVANCE_RIP_AND_FINISH();
6256 IEM_MC_END();
6257 }
6258 else
6259 {
6260 /*
6261 * We're loading a register from memory.
6262 */
6263 IEM_MC_BEGIN(0, 0);
6264 IEM_MC_LOCAL(uint8_t, u8Value);
6265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6268 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6269 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6270 IEM_MC_ADVANCE_RIP_AND_FINISH();
6271 IEM_MC_END();
6272 }
6273}
6274
6275
6276/**
6277 * @opcode 0x8b
6278 */
6279FNIEMOP_DEF(iemOp_mov_Gv_Ev)
6280{
6281 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
6282
6283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6284
6285 /*
6286 * If rm is denoting a register, no more instruction bytes.
6287 */
6288 if (IEM_IS_MODRM_REG_MODE(bRm))
6289 {
6290 switch (pVCpu->iem.s.enmEffOpSize)
6291 {
6292 case IEMMODE_16BIT:
6293 IEM_MC_BEGIN(0, 0);
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295 IEM_MC_LOCAL(uint16_t, u16Value);
6296 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6297 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6298 IEM_MC_ADVANCE_RIP_AND_FINISH();
6299 IEM_MC_END();
6300 break;
6301
6302 case IEMMODE_32BIT:
6303 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 IEM_MC_LOCAL(uint32_t, u32Value);
6306 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6308 IEM_MC_ADVANCE_RIP_AND_FINISH();
6309 IEM_MC_END();
6310 break;
6311
6312 case IEMMODE_64BIT:
6313 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6315 IEM_MC_LOCAL(uint64_t, u64Value);
6316 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6317 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6318 IEM_MC_ADVANCE_RIP_AND_FINISH();
6319 IEM_MC_END();
6320 break;
6321
6322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6323 }
6324 }
6325 else
6326 {
6327 /*
6328 * We're loading a register from memory.
6329 */
6330 switch (pVCpu->iem.s.enmEffOpSize)
6331 {
6332 case IEMMODE_16BIT:
6333 IEM_MC_BEGIN(0, 0);
6334 IEM_MC_LOCAL(uint16_t, u16Value);
6335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6338 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6339 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6340 IEM_MC_ADVANCE_RIP_AND_FINISH();
6341 IEM_MC_END();
6342 break;
6343
6344 case IEMMODE_32BIT:
6345 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6346 IEM_MC_LOCAL(uint32_t, u32Value);
6347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6351 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6352 IEM_MC_ADVANCE_RIP_AND_FINISH();
6353 IEM_MC_END();
6354 break;
6355
6356 case IEMMODE_64BIT:
6357 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6358 IEM_MC_LOCAL(uint64_t, u64Value);
6359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6363 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6364 IEM_MC_ADVANCE_RIP_AND_FINISH();
6365 IEM_MC_END();
6366 break;
6367
6368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6369 }
6370 }
6371}
6372
6373
6374/**
6375 * opcode 0x63
6376 * @todo Table fixme
6377 */
6378FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
6379{
6380 if (!IEM_IS_64BIT_CODE(pVCpu))
6381 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
6382 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6383 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
6384 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
6385}
6386
6387
6388/**
6389 * @opcode 0x8c
6390 */
6391FNIEMOP_DEF(iemOp_mov_Ev_Sw)
6392{
6393 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
6394
6395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6396
6397 /*
6398 * Check that the destination register exists. The REX.R prefix is ignored.
6399 */
6400 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6401 if (iSegReg > X86_SREG_GS)
6402 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6403
6404 /*
6405 * If rm is denoting a register, no more instruction bytes.
6406 * In that case, the operand size is respected and the upper bits are
6407 * cleared (starting with some pentium).
6408 */
6409 if (IEM_IS_MODRM_REG_MODE(bRm))
6410 {
6411 switch (pVCpu->iem.s.enmEffOpSize)
6412 {
6413 case IEMMODE_16BIT:
6414 IEM_MC_BEGIN(0, 0);
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6416 IEM_MC_LOCAL(uint16_t, u16Value);
6417 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6418 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6419 IEM_MC_ADVANCE_RIP_AND_FINISH();
6420 IEM_MC_END();
6421 break;
6422
6423 case IEMMODE_32BIT:
6424 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_LOCAL(uint32_t, u32Value);
6427 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6428 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6429 IEM_MC_ADVANCE_RIP_AND_FINISH();
6430 IEM_MC_END();
6431 break;
6432
6433 case IEMMODE_64BIT:
6434 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_LOCAL(uint64_t, u64Value);
6437 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6438 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6439 IEM_MC_ADVANCE_RIP_AND_FINISH();
6440 IEM_MC_END();
6441 break;
6442
6443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6444 }
6445 }
6446 else
6447 {
6448 /*
6449 * We're saving the register to memory. The access is word sized
6450 * regardless of operand size prefixes.
6451 */
6452#if 0 /* not necessary */
6453 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6454#endif
6455 IEM_MC_BEGIN(0, 0);
6456 IEM_MC_LOCAL(uint16_t, u16Value);
6457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6461 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6462 IEM_MC_ADVANCE_RIP_AND_FINISH();
6463 IEM_MC_END();
6464 }
6465}
6466
6467
6468
6469
6470/**
6471 * @opcode 0x8d
6472 */
6473FNIEMOP_DEF(iemOp_lea_Gv_M)
6474{
6475 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6477 if (IEM_IS_MODRM_REG_MODE(bRm))
6478 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6479
6480 switch (pVCpu->iem.s.enmEffOpSize)
6481 {
6482 case IEMMODE_16BIT:
6483 IEM_MC_BEGIN(0, 0);
6484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6487 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6488 * operand-size, which is usually the case. It'll save an instruction
6489 * and a register. */
6490 IEM_MC_LOCAL(uint16_t, u16Cast);
6491 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6492 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6493 IEM_MC_ADVANCE_RIP_AND_FINISH();
6494 IEM_MC_END();
6495 break;
6496
6497 case IEMMODE_32BIT:
6498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6503 * operand-size, which is usually the case. It'll save an instruction
6504 * and a register. */
6505 IEM_MC_LOCAL(uint32_t, u32Cast);
6506 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6507 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6508 IEM_MC_ADVANCE_RIP_AND_FINISH();
6509 IEM_MC_END();
6510 break;
6511
6512 case IEMMODE_64BIT:
6513 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6518 IEM_MC_ADVANCE_RIP_AND_FINISH();
6519 IEM_MC_END();
6520 break;
6521
6522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6523 }
6524}
6525
6526
6527/**
6528 * @opcode 0x8e
6529 */
6530FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6531{
6532 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6533
6534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6535
6536 /*
6537 * The practical operand size is 16-bit.
6538 */
6539#if 0 /* not necessary */
6540 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6541#endif
6542
6543 /*
6544 * Check that the destination register exists and can be used with this
6545 * instruction. The REX.R prefix is ignored.
6546 */
6547 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6548 /** @todo r=bird: What does 8086 do here wrt CS? */
6549 if ( iSegReg == X86_SREG_CS
6550 || iSegReg > X86_SREG_GS)
6551 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6552
6553 /*
6554 * If rm is denoting a register, no more instruction bytes.
6555 *
6556 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6557 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6558 * register. This is a restriction of the current recompiler
6559 * approach.
6560 */
6561 if (IEM_IS_MODRM_REG_MODE(bRm))
6562 {
6563#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6564 IEM_MC_BEGIN(0, a_fCImplFlags); \
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6566 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6567 IEM_MC_ARG(uint16_t, u16Value, 1); \
6568 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6569 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6570 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6571 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6572 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6573 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6574 iemCImpl_load_SReg, iSRegArg, u16Value); \
6575 IEM_MC_END()
6576
6577 if (iSegReg == X86_SREG_SS)
6578 {
6579 if (IEM_IS_32BIT_CODE(pVCpu))
6580 {
6581 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6582 }
6583 else
6584 {
6585 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6586 }
6587 }
6588 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6589 {
6590 IEMOP_MOV_SW_EV_REG_BODY(0);
6591 }
6592 else
6593 {
6594 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6595 }
6596#undef IEMOP_MOV_SW_EV_REG_BODY
6597 }
6598 else
6599 {
6600 /*
6601 * We're loading the register from memory. The access is word sized
6602 * regardless of operand size prefixes.
6603 */
6604#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6605 IEM_MC_BEGIN(0, a_fCImplFlags); \
6606 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6607 IEM_MC_ARG(uint16_t, u16Value, 1); \
6608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6611 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6612 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6613 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6614 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6615 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6616 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6617 iemCImpl_load_SReg, iSRegArg, u16Value); \
6618 IEM_MC_END()
6619
6620 if (iSegReg == X86_SREG_SS)
6621 {
6622 if (IEM_IS_32BIT_CODE(pVCpu))
6623 {
6624 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6625 }
6626 else
6627 {
6628 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6629 }
6630 }
6631 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6632 {
6633 IEMOP_MOV_SW_EV_MEM_BODY(0);
6634 }
6635 else
6636 {
6637 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6638 }
6639#undef IEMOP_MOV_SW_EV_MEM_BODY
6640 }
6641}
6642
6643
6644/** Opcode 0x8f /0. */
6645FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6646{
6647 /* This bugger is rather annoying as it requires rSP to be updated before
6648 doing the effective address calculations. Will eventually require a
6649 split between the R/M+SIB decoding and the effective address
6650 calculation - which is something that is required for any attempt at
6651 reusing this code for a recompiler. It may also be good to have if we
6652 need to delay #UD exception caused by invalid lock prefixes.
6653
6654 For now, we'll do a mostly safe interpreter-only implementation here. */
6655 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6656 * now until tests show it's checked.. */
6657 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6658
6659 /* Register access is relatively easy and can share code. */
6660 if (IEM_IS_MODRM_REG_MODE(bRm))
6661 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6662
6663 /*
6664 * Memory target.
6665 *
6666 * Intel says that RSP is incremented before it's used in any effective
6667 * address calcuations. This means some serious extra annoyance here since
6668 * we decode and calculate the effective address in one step and like to
6669 * delay committing registers till everything is done.
6670 *
6671 * So, we'll decode and calculate the effective address twice. This will
6672 * require some recoding if turned into a recompiler.
6673 */
6674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6675
6676#if 1 /* This can be compiled, optimize later if needed. */
6677 switch (pVCpu->iem.s.enmEffOpSize)
6678 {
6679 case IEMMODE_16BIT:
6680 IEM_MC_BEGIN(0, 0);
6681 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6684 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6685 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6686 IEM_MC_END();
6687 break;
6688
6689 case IEMMODE_32BIT:
6690 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6691 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6694 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6695 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6696 IEM_MC_END();
6697 break;
6698
6699 case IEMMODE_64BIT:
6700 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6701 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6704 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6705 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6706 IEM_MC_END();
6707 break;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711
6712#else
6713# ifndef TST_IEM_CHECK_MC
6714 /* Calc effective address with modified ESP. */
6715/** @todo testcase */
6716 RTGCPTR GCPtrEff;
6717 VBOXSTRICTRC rcStrict;
6718 switch (pVCpu->iem.s.enmEffOpSize)
6719 {
6720 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6721 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6722 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6724 }
6725 if (rcStrict != VINF_SUCCESS)
6726 return rcStrict;
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728
6729 /* Perform the operation - this should be CImpl. */
6730 RTUINT64U TmpRsp;
6731 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6732 switch (pVCpu->iem.s.enmEffOpSize)
6733 {
6734 case IEMMODE_16BIT:
6735 {
6736 uint16_t u16Value;
6737 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6738 if (rcStrict == VINF_SUCCESS)
6739 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6740 break;
6741 }
6742
6743 case IEMMODE_32BIT:
6744 {
6745 uint32_t u32Value;
6746 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6747 if (rcStrict == VINF_SUCCESS)
6748 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6749 break;
6750 }
6751
6752 case IEMMODE_64BIT:
6753 {
6754 uint64_t u64Value;
6755 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6756 if (rcStrict == VINF_SUCCESS)
6757 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6758 break;
6759 }
6760
6761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6762 }
6763 if (rcStrict == VINF_SUCCESS)
6764 {
6765 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6766 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6767 }
6768 return rcStrict;
6769
6770# else
6771 return VERR_IEM_IPE_2;
6772# endif
6773#endif
6774}
6775
6776
6777/**
6778 * @opcode 0x8f
6779 */
6780FNIEMOP_DEF(iemOp_Grp1A__xop)
6781{
6782 /*
6783 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6784 * three byte VEX prefix, except that the mmmmm field cannot have the values
6785 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6786 */
6787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6788 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6789 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6790
6791 IEMOP_MNEMONIC(xop, "xop");
6792 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6793 {
6794 /** @todo Test when exctly the XOP conformance checks kick in during
6795 * instruction decoding and fetching (using \#PF). */
6796 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6797 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6798 if ( ( pVCpu->iem.s.fPrefixes
6799 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6800 == 0)
6801 {
6802 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6803 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6805 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6806 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6807 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6808 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6809 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6810 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6811
6812 /** @todo XOP: Just use new tables and decoders. */
6813 switch (bRm & 0x1f)
6814 {
6815 case 8: /* xop opcode map 8. */
6816 IEMOP_BITCH_ABOUT_STUB();
6817 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6818
6819 case 9: /* xop opcode map 9. */
6820 IEMOP_BITCH_ABOUT_STUB();
6821 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6822
6823 case 10: /* xop opcode map 10. */
6824 IEMOP_BITCH_ABOUT_STUB();
6825 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6826
6827 default:
6828 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6829 IEMOP_RAISE_INVALID_OPCODE_RET();
6830 }
6831 }
6832 else
6833 Log(("XOP: Invalid prefix mix!\n"));
6834 }
6835 else
6836 Log(("XOP: XOP support disabled!\n"));
6837 IEMOP_RAISE_INVALID_OPCODE_RET();
6838}
6839
6840
6841/**
6842 * Common 'xchg reg,rAX' helper.
6843 */
6844FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6845{
6846 iReg |= pVCpu->iem.s.uRexB;
6847 switch (pVCpu->iem.s.enmEffOpSize)
6848 {
6849 case IEMMODE_16BIT:
6850 IEM_MC_BEGIN(0, 0);
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6853 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6854 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6855 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6856 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6857 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6858 IEM_MC_ADVANCE_RIP_AND_FINISH();
6859 IEM_MC_END();
6860 break;
6861
6862 case IEMMODE_32BIT:
6863 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6866 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6867 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6868 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6869 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6870 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6871 IEM_MC_ADVANCE_RIP_AND_FINISH();
6872 IEM_MC_END();
6873 break;
6874
6875 case IEMMODE_64BIT:
6876 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6878 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6879 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6880 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6881 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6882 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6883 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6884 IEM_MC_ADVANCE_RIP_AND_FINISH();
6885 IEM_MC_END();
6886 break;
6887
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890}
6891
6892
6893/**
6894 * @opcode 0x90
6895 */
6896FNIEMOP_DEF(iemOp_nop)
6897{
6898 /* R8/R8D and RAX/EAX can be exchanged. */
6899 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6900 {
6901 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6902 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6903 }
6904
6905 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6906 {
6907 IEMOP_MNEMONIC(pause, "pause");
6908 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6909 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6910 if (!IEM_IS_IN_GUEST(pVCpu))
6911 { /* probable */ }
6912#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6913 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6914 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6915#endif
6916#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6917 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6918 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6919#endif
6920 }
6921 else
6922 IEMOP_MNEMONIC(nop, "nop");
6923 /** @todo testcase: lock nop; lock pause */
6924 IEM_MC_BEGIN(0, 0);
6925 IEMOP_HLP_DONE_DECODING();
6926 IEM_MC_ADVANCE_RIP_AND_FINISH();
6927 IEM_MC_END();
6928}
6929
6930
6931/**
6932 * @opcode 0x91
6933 */
6934FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6935{
6936 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6937 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6938}
6939
6940
6941/**
6942 * @opcode 0x92
6943 */
6944FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6945{
6946 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6947 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6948}
6949
6950
6951/**
6952 * @opcode 0x93
6953 */
6954FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6955{
6956 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6957 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6958}
6959
6960
6961/**
6962 * @opcode 0x94
6963 */
6964FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6965{
6966 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6967 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6968}
6969
6970
6971/**
6972 * @opcode 0x95
6973 */
6974FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6975{
6976 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6977 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6978}
6979
6980
6981/**
6982 * @opcode 0x96
6983 */
6984FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6985{
6986 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6987 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6988}
6989
6990
6991/**
6992 * @opcode 0x97
6993 */
6994FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6995{
6996 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6997 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6998}
6999
7000
7001/**
7002 * @opcode 0x98
7003 */
7004FNIEMOP_DEF(iemOp_cbw)
7005{
7006 switch (pVCpu->iem.s.enmEffOpSize)
7007 {
7008 case IEMMODE_16BIT:
7009 IEMOP_MNEMONIC(cbw, "cbw");
7010 IEM_MC_BEGIN(0, 0);
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
7013 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
7014 } IEM_MC_ELSE() {
7015 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
7016 } IEM_MC_ENDIF();
7017 IEM_MC_ADVANCE_RIP_AND_FINISH();
7018 IEM_MC_END();
7019 break;
7020
7021 case IEMMODE_32BIT:
7022 IEMOP_MNEMONIC(cwde, "cwde");
7023 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7025 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
7026 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
7027 } IEM_MC_ELSE() {
7028 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
7029 } IEM_MC_ENDIF();
7030 IEM_MC_ADVANCE_RIP_AND_FINISH();
7031 IEM_MC_END();
7032 break;
7033
7034 case IEMMODE_64BIT:
7035 IEMOP_MNEMONIC(cdqe, "cdqe");
7036 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7038 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
7039 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
7040 } IEM_MC_ELSE() {
7041 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
7042 } IEM_MC_ENDIF();
7043 IEM_MC_ADVANCE_RIP_AND_FINISH();
7044 IEM_MC_END();
7045 break;
7046
7047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7048 }
7049}
7050
7051
7052/**
7053 * @opcode 0x99
7054 */
7055FNIEMOP_DEF(iemOp_cwd)
7056{
7057 switch (pVCpu->iem.s.enmEffOpSize)
7058 {
7059 case IEMMODE_16BIT:
7060 IEMOP_MNEMONIC(cwd, "cwd");
7061 IEM_MC_BEGIN(0, 0);
7062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7063 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
7064 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
7065 } IEM_MC_ELSE() {
7066 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
7067 } IEM_MC_ENDIF();
7068 IEM_MC_ADVANCE_RIP_AND_FINISH();
7069 IEM_MC_END();
7070 break;
7071
7072 case IEMMODE_32BIT:
7073 IEMOP_MNEMONIC(cdq, "cdq");
7074 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7076 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
7077 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
7078 } IEM_MC_ELSE() {
7079 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
7080 } IEM_MC_ENDIF();
7081 IEM_MC_ADVANCE_RIP_AND_FINISH();
7082 IEM_MC_END();
7083 break;
7084
7085 case IEMMODE_64BIT:
7086 IEMOP_MNEMONIC(cqo, "cqo");
7087 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7089 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
7090 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
7091 } IEM_MC_ELSE() {
7092 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
7093 } IEM_MC_ENDIF();
7094 IEM_MC_ADVANCE_RIP_AND_FINISH();
7095 IEM_MC_END();
7096 break;
7097
7098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7099 }
7100}
7101
7102
7103/**
7104 * @opcode 0x9a
7105 */
7106FNIEMOP_DEF(iemOp_call_Ap)
7107{
7108 IEMOP_MNEMONIC(call_Ap, "call Ap");
7109 IEMOP_HLP_NO_64BIT();
7110
7111 /* Decode the far pointer address and pass it on to the far call C implementation. */
7112 uint32_t off32Seg;
7113 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
7114 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
7115 else
7116 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
7117 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
7118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7119 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
7120 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
7121 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
7122 /** @todo make task-switches, ring-switches, ++ return non-zero status */
7123}
7124
7125
7126/** Opcode 0x9b. (aka fwait) */
7127FNIEMOP_DEF(iemOp_wait)
7128{
7129 IEMOP_MNEMONIC(wait, "wait");
7130 IEM_MC_BEGIN(0, 0);
7131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7132 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
7133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7134 IEM_MC_ADVANCE_RIP_AND_FINISH();
7135 IEM_MC_END();
7136}
7137
7138
7139/**
7140 * @opcode 0x9c
7141 */
7142FNIEMOP_DEF(iemOp_pushf_Fv)
7143{
7144 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
7145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7146 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7147 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
7148 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
7149}
7150
7151
7152/**
7153 * @opcode 0x9d
7154 */
7155FNIEMOP_DEF(iemOp_popf_Fv)
7156{
7157 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
7158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7160 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
7161 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
7162 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
7163}
7164
7165
7166/**
7167 * @opcode 0x9e
7168 * @opflmodify cf,pf,af,zf,sf
7169 */
7170FNIEMOP_DEF(iemOp_sahf)
7171{
7172 IEMOP_MNEMONIC(sahf, "sahf");
7173 if ( IEM_IS_64BIT_CODE(pVCpu)
7174 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7175 IEMOP_RAISE_INVALID_OPCODE_RET();
7176 IEM_MC_BEGIN(0, 0);
7177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7178 IEM_MC_LOCAL(uint32_t, u32Flags);
7179 IEM_MC_LOCAL(uint32_t, EFlags);
7180 IEM_MC_FETCH_EFLAGS(EFlags);
7181 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
7182 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7183 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
7184 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
7185 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
7186 IEM_MC_COMMIT_EFLAGS(EFlags);
7187 IEM_MC_ADVANCE_RIP_AND_FINISH();
7188 IEM_MC_END();
7189}
7190
7191
7192/**
7193 * @opcode 0x9f
7194 * @opfltest cf,pf,af,zf,sf
7195 */
7196FNIEMOP_DEF(iemOp_lahf)
7197{
7198 IEMOP_MNEMONIC(lahf, "lahf");
7199 if ( IEM_IS_64BIT_CODE(pVCpu)
7200 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7201 IEMOP_RAISE_INVALID_OPCODE_RET();
7202 IEM_MC_BEGIN(0, 0);
7203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7204 IEM_MC_LOCAL(uint8_t, u8Flags);
7205 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
7206 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
7207 IEM_MC_ADVANCE_RIP_AND_FINISH();
7208 IEM_MC_END();
7209}
7210
7211
7212/**
7213 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
7214 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
7215 * Will return/throw on failures.
7216 * @param a_GCPtrMemOff The variable to store the offset in.
7217 */
7218#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
7219 do \
7220 { \
7221 switch (pVCpu->iem.s.enmEffAddrMode) \
7222 { \
7223 case IEMMODE_16BIT: \
7224 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
7225 break; \
7226 case IEMMODE_32BIT: \
7227 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
7228 break; \
7229 case IEMMODE_64BIT: \
7230 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
7231 break; \
7232 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
7233 } \
7234 } while (0)
7235
7236/**
7237 * @opcode 0xa0
7238 */
7239FNIEMOP_DEF(iemOp_mov_AL_Ob)
7240{
7241 /*
7242 * Get the offset.
7243 */
7244 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
7245 RTGCPTR GCPtrMemOffDecode;
7246 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7247
7248 /*
7249 * Fetch AL.
7250 */
7251 IEM_MC_BEGIN(0, 0);
7252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7253 IEM_MC_LOCAL(uint8_t, u8Tmp);
7254 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7255 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7256 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7257 IEM_MC_ADVANCE_RIP_AND_FINISH();
7258 IEM_MC_END();
7259}
7260
7261
7262/**
7263 * @opcode 0xa1
7264 */
7265FNIEMOP_DEF(iemOp_mov_rAX_Ov)
7266{
7267 /*
7268 * Get the offset.
7269 */
7270 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
7271 RTGCPTR GCPtrMemOffDecode;
7272 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7273
7274 /*
7275 * Fetch rAX.
7276 */
7277 switch (pVCpu->iem.s.enmEffOpSize)
7278 {
7279 case IEMMODE_16BIT:
7280 IEM_MC_BEGIN(0, 0);
7281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7282 IEM_MC_LOCAL(uint16_t, u16Tmp);
7283 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7284 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7285 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
7286 IEM_MC_ADVANCE_RIP_AND_FINISH();
7287 IEM_MC_END();
7288 break;
7289
7290 case IEMMODE_32BIT:
7291 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7293 IEM_MC_LOCAL(uint32_t, u32Tmp);
7294 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7295 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7296 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
7297 IEM_MC_ADVANCE_RIP_AND_FINISH();
7298 IEM_MC_END();
7299 break;
7300
7301 case IEMMODE_64BIT:
7302 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7304 IEM_MC_LOCAL(uint64_t, u64Tmp);
7305 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7306 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7307 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
7308 IEM_MC_ADVANCE_RIP_AND_FINISH();
7309 IEM_MC_END();
7310 break;
7311
7312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7313 }
7314}
7315
7316
7317/**
7318 * @opcode 0xa2
7319 */
7320FNIEMOP_DEF(iemOp_mov_Ob_AL)
7321{
7322 /*
7323 * Get the offset.
7324 */
7325 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
7326 RTGCPTR GCPtrMemOffDecode;
7327 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7328
7329 /*
7330 * Store AL.
7331 */
7332 IEM_MC_BEGIN(0, 0);
7333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7334 IEM_MC_LOCAL(uint8_t, u8Tmp);
7335 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
7336 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7337 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
7338 IEM_MC_ADVANCE_RIP_AND_FINISH();
7339 IEM_MC_END();
7340}
7341
7342
7343/**
7344 * @opcode 0xa3
7345 */
7346FNIEMOP_DEF(iemOp_mov_Ov_rAX)
7347{
7348 /*
7349 * Get the offset.
7350 */
7351 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
7352 RTGCPTR GCPtrMemOffDecode;
7353 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7354
7355 /*
7356 * Store rAX.
7357 */
7358 switch (pVCpu->iem.s.enmEffOpSize)
7359 {
7360 case IEMMODE_16BIT:
7361 IEM_MC_BEGIN(0, 0);
7362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7363 IEM_MC_LOCAL(uint16_t, u16Tmp);
7364 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
7365 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7366 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
7367 IEM_MC_ADVANCE_RIP_AND_FINISH();
7368 IEM_MC_END();
7369 break;
7370
7371 case IEMMODE_32BIT:
7372 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEM_MC_LOCAL(uint32_t, u32Tmp);
7375 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
7376 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7377 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
7378 IEM_MC_ADVANCE_RIP_AND_FINISH();
7379 IEM_MC_END();
7380 break;
7381
7382 case IEMMODE_64BIT:
7383 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7385 IEM_MC_LOCAL(uint64_t, u64Tmp);
7386 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
7387 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7388 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
7389 IEM_MC_ADVANCE_RIP_AND_FINISH();
7390 IEM_MC_END();
7391 break;
7392
7393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7394 }
7395}
7396
7397/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7398#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7399 IEM_MC_BEGIN(a_fMcFlags, 0); \
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7401 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7402 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7403 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7404 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7405 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7406 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7407 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7408 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7409 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7410 } IEM_MC_ELSE() { \
7411 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7412 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7413 } IEM_MC_ENDIF(); \
7414 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7415 IEM_MC_END() \
7416
7417/**
7418 * @opcode 0xa4
7419 * @opfltest df
7420 */
7421FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7422{
7423 /*
7424 * Use the C implementation if a repeat prefix is encountered.
7425 */
7426 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7427 {
7428 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7430 switch (pVCpu->iem.s.enmEffAddrMode)
7431 {
7432 case IEMMODE_16BIT:
7433 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7434 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7436 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7437 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7438 case IEMMODE_32BIT:
7439 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7440 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7441 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7442 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7443 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7444 case IEMMODE_64BIT:
7445 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7449 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7451 }
7452 }
7453
7454 /*
7455 * Sharing case implementation with movs[wdq] below.
7456 */
7457 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7458 switch (pVCpu->iem.s.enmEffAddrMode)
7459 {
7460 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7461 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7462 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7464 }
7465}
7466
7467
7468/**
7469 * @opcode 0xa5
7470 * @opfltest df
7471 */
7472FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7473{
7474
7475 /*
7476 * Use the C implementation if a repeat prefix is encountered.
7477 */
7478 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7479 {
7480 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7482 switch (pVCpu->iem.s.enmEffOpSize)
7483 {
7484 case IEMMODE_16BIT:
7485 switch (pVCpu->iem.s.enmEffAddrMode)
7486 {
7487 case IEMMODE_16BIT:
7488 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7489 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7492 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7493 case IEMMODE_32BIT:
7494 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7495 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7497 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7498 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7499 case IEMMODE_64BIT:
7500 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7501 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7504 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7506 }
7507 break;
7508 case IEMMODE_32BIT:
7509 switch (pVCpu->iem.s.enmEffAddrMode)
7510 {
7511 case IEMMODE_16BIT:
7512 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7513 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7516 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7517 case IEMMODE_32BIT:
7518 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7519 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7522 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7523 case IEMMODE_64BIT:
7524 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7525 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7526 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7527 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7528 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7530 }
7531 case IEMMODE_64BIT:
7532 switch (pVCpu->iem.s.enmEffAddrMode)
7533 {
7534 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7535 case IEMMODE_32BIT:
7536 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7540 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7541 case IEMMODE_64BIT:
7542 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7543 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7546 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7548 }
7549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7550 }
7551 }
7552
7553 /*
7554 * Annoying double switch here.
7555 * Using ugly macro for implementing the cases, sharing it with movsb.
7556 */
7557 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7558 switch (pVCpu->iem.s.enmEffOpSize)
7559 {
7560 case IEMMODE_16BIT:
7561 switch (pVCpu->iem.s.enmEffAddrMode)
7562 {
7563 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7564 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7565 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7567 }
7568 break;
7569
7570 case IEMMODE_32BIT:
7571 switch (pVCpu->iem.s.enmEffAddrMode)
7572 {
7573 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7574 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7575 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7577 }
7578 break;
7579
7580 case IEMMODE_64BIT:
7581 switch (pVCpu->iem.s.enmEffAddrMode)
7582 {
7583 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7584 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7585 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7587 }
7588 break;
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591}
7592
7593#undef IEM_MOVS_CASE
7594
7595/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7596#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7597 IEM_MC_BEGIN(a_fMcFlags, 0); \
7598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7599 \
7600 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7601 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7602 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7603 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7604 \
7605 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7606 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7607 IEM_MC_ARG(uint##ValBits##_t, uValue2, 2); \
7608 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7609 \
7610 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 1); \
7611 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
7612 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_cmp_u##ValBits, fEFlagsIn, puValue1, uValue2); \
7613 \
7614 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
7615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7616 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7617 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7618 } IEM_MC_ELSE() { \
7619 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7620 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7621 } IEM_MC_ENDIF(); \
7622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7623 IEM_MC_END() \
7624
7625/**
7626 * @opcode 0xa6
7627 * @opflclass arithmetic
7628 * @opfltest df
7629 */
7630FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7631{
7632
7633 /*
7634 * Use the C implementation if a repeat prefix is encountered.
7635 */
7636 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7637 {
7638 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640 switch (pVCpu->iem.s.enmEffAddrMode)
7641 {
7642 case IEMMODE_16BIT:
7643 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7644 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7647 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7648 case IEMMODE_32BIT:
7649 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7650 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7652 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7653 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7654 case IEMMODE_64BIT:
7655 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7656 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7657 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7658 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7659 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7661 }
7662 }
7663 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7664 {
7665 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 switch (pVCpu->iem.s.enmEffAddrMode)
7668 {
7669 case IEMMODE_16BIT:
7670 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7671 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7672 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7673 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7674 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7675 case IEMMODE_32BIT:
7676 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7677 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7679 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7680 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7681 case IEMMODE_64BIT:
7682 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7683 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7685 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7686 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7688 }
7689 }
7690
7691 /*
7692 * Sharing case implementation with cmps[wdq] below.
7693 */
7694 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7695 switch (pVCpu->iem.s.enmEffAddrMode)
7696 {
7697 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7698 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7699 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7701 }
7702}
7703
7704
7705/**
7706 * @opcode 0xa7
7707 * @opflclass arithmetic
7708 * @opfltest df
7709 */
7710FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7711{
7712 /*
7713 * Use the C implementation if a repeat prefix is encountered.
7714 */
7715 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7716 {
7717 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719 switch (pVCpu->iem.s.enmEffOpSize)
7720 {
7721 case IEMMODE_16BIT:
7722 switch (pVCpu->iem.s.enmEffAddrMode)
7723 {
7724 case IEMMODE_16BIT:
7725 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7726 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7727 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7728 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7729 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7730 case IEMMODE_32BIT:
7731 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7732 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7733 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7734 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7735 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7736 case IEMMODE_64BIT:
7737 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7738 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7739 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7740 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7741 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7743 }
7744 break;
7745 case IEMMODE_32BIT:
7746 switch (pVCpu->iem.s.enmEffAddrMode)
7747 {
7748 case IEMMODE_16BIT:
7749 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7750 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7751 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7754 case IEMMODE_32BIT:
7755 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7758 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7759 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7760 case IEMMODE_64BIT:
7761 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7762 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7763 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7765 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7767 }
7768 case IEMMODE_64BIT:
7769 switch (pVCpu->iem.s.enmEffAddrMode)
7770 {
7771 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7772 case IEMMODE_32BIT:
7773 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7774 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7775 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7776 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7777 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7778 case IEMMODE_64BIT:
7779 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7780 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7781 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7782 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7783 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7785 }
7786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7787 }
7788 }
7789
7790 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7791 {
7792 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7794 switch (pVCpu->iem.s.enmEffOpSize)
7795 {
7796 case IEMMODE_16BIT:
7797 switch (pVCpu->iem.s.enmEffAddrMode)
7798 {
7799 case IEMMODE_16BIT:
7800 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7804 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7805 case IEMMODE_32BIT:
7806 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7809 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7810 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7811 case IEMMODE_64BIT:
7812 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7816 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7818 }
7819 break;
7820 case IEMMODE_32BIT:
7821 switch (pVCpu->iem.s.enmEffAddrMode)
7822 {
7823 case IEMMODE_16BIT:
7824 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7825 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7828 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7829 case IEMMODE_32BIT:
7830 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7833 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7834 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7835 case IEMMODE_64BIT:
7836 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7837 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7838 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 case IEMMODE_64BIT:
7844 switch (pVCpu->iem.s.enmEffAddrMode)
7845 {
7846 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7847 case IEMMODE_32BIT:
7848 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7849 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7850 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7852 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7853 case IEMMODE_64BIT:
7854 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7855 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7856 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7857 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7858 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7860 }
7861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7862 }
7863 }
7864
7865 /*
7866 * Annoying double switch here.
7867 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7868 */
7869 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7870 switch (pVCpu->iem.s.enmEffOpSize)
7871 {
7872 case IEMMODE_16BIT:
7873 switch (pVCpu->iem.s.enmEffAddrMode)
7874 {
7875 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7876 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7877 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7879 }
7880 break;
7881
7882 case IEMMODE_32BIT:
7883 switch (pVCpu->iem.s.enmEffAddrMode)
7884 {
7885 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7886 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7887 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7889 }
7890 break;
7891
7892 case IEMMODE_64BIT:
7893 switch (pVCpu->iem.s.enmEffAddrMode)
7894 {
7895 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7896 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7897 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7899 }
7900 break;
7901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7902 }
7903}
7904
7905#undef IEM_CMPS_CASE
7906
7907/**
7908 * @opcode 0xa8
7909 * @opflclass logical
7910 */
7911FNIEMOP_DEF(iemOp_test_AL_Ib)
7912{
7913 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7915 IEMOP_BODY_BINARY_AL_Ib(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7916}
7917
7918
7919/**
7920 * @opcode 0xa9
7921 * @opflclass logical
7922 */
7923FNIEMOP_DEF(iemOp_test_eAX_Iz)
7924{
7925 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7927 IEMOP_BODY_BINARY_rAX_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7928}
7929
7930
7931/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7932#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7933 IEM_MC_BEGIN(a_fMcFlags, 0); \
7934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7935 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7936 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7937 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7938 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7939 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7941 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7942 } IEM_MC_ELSE() { \
7943 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7944 } IEM_MC_ENDIF(); \
7945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7946 IEM_MC_END() \
7947
7948/**
7949 * @opcode 0xaa
7950 */
7951FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7952{
7953 /*
7954 * Use the C implementation if a repeat prefix is encountered.
7955 */
7956 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7957 {
7958 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7960 switch (pVCpu->iem.s.enmEffAddrMode)
7961 {
7962 case IEMMODE_16BIT:
7963 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7964 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7965 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7966 iemCImpl_stos_al_m16);
7967 case IEMMODE_32BIT:
7968 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7969 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7971 iemCImpl_stos_al_m32);
7972 case IEMMODE_64BIT:
7973 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7974 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7975 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7976 iemCImpl_stos_al_m64);
7977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7978 }
7979 }
7980
7981 /*
7982 * Sharing case implementation with stos[wdq] below.
7983 */
7984 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7985 switch (pVCpu->iem.s.enmEffAddrMode)
7986 {
7987 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7988 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7989 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7991 }
7992}
7993
7994
7995/**
7996 * @opcode 0xab
7997 */
7998FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7999{
8000 /*
8001 * Use the C implementation if a repeat prefix is encountered.
8002 */
8003 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8004 {
8005 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007 switch (pVCpu->iem.s.enmEffOpSize)
8008 {
8009 case IEMMODE_16BIT:
8010 switch (pVCpu->iem.s.enmEffAddrMode)
8011 {
8012 case IEMMODE_16BIT:
8013 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8014 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8015 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8016 iemCImpl_stos_ax_m16);
8017 case IEMMODE_32BIT:
8018 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8019 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8020 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8021 iemCImpl_stos_ax_m32);
8022 case IEMMODE_64BIT:
8023 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8024 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8025 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8026 iemCImpl_stos_ax_m64);
8027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8028 }
8029 break;
8030 case IEMMODE_32BIT:
8031 switch (pVCpu->iem.s.enmEffAddrMode)
8032 {
8033 case IEMMODE_16BIT:
8034 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8035 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8037 iemCImpl_stos_eax_m16);
8038 case IEMMODE_32BIT:
8039 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8042 iemCImpl_stos_eax_m32);
8043 case IEMMODE_64BIT:
8044 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8045 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8046 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8047 iemCImpl_stos_eax_m64);
8048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8049 }
8050 case IEMMODE_64BIT:
8051 switch (pVCpu->iem.s.enmEffAddrMode)
8052 {
8053 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
8054 case IEMMODE_32BIT:
8055 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8056 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8057 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8058 iemCImpl_stos_rax_m32);
8059 case IEMMODE_64BIT:
8060 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8061 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8062 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8063 iemCImpl_stos_rax_m64);
8064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8065 }
8066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8067 }
8068 }
8069
8070 /*
8071 * Annoying double switch here.
8072 * Using ugly macro for implementing the cases, sharing it with stosb.
8073 */
8074 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
8075 switch (pVCpu->iem.s.enmEffOpSize)
8076 {
8077 case IEMMODE_16BIT:
8078 switch (pVCpu->iem.s.enmEffAddrMode)
8079 {
8080 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8081 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8082 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
8083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8084 }
8085 break;
8086
8087 case IEMMODE_32BIT:
8088 switch (pVCpu->iem.s.enmEffAddrMode)
8089 {
8090 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8091 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8092 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
8093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8094 }
8095 break;
8096
8097 case IEMMODE_64BIT:
8098 switch (pVCpu->iem.s.enmEffAddrMode)
8099 {
8100 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8101 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
8102 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
8103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8104 }
8105 break;
8106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8107 }
8108}
8109
8110#undef IEM_STOS_CASE
8111
8112/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
8113#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
8114 IEM_MC_BEGIN(a_fMcFlags, 0); \
8115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8116 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8117 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8118 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8119 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
8120 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
8121 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8122 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8123 } IEM_MC_ELSE() { \
8124 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8125 } IEM_MC_ENDIF(); \
8126 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8127 IEM_MC_END() \
8128
8129/**
8130 * @opcode 0xac
8131 * @opfltest df
8132 */
8133FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
8134{
8135 /*
8136 * Use the C implementation if a repeat prefix is encountered.
8137 */
8138 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8139 {
8140 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
8141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8142 switch (pVCpu->iem.s.enmEffAddrMode)
8143 {
8144 case IEMMODE_16BIT:
8145 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8146 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8147 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8148 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8149 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
8150 case IEMMODE_32BIT:
8151 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8152 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8154 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8155 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
8156 case IEMMODE_64BIT:
8157 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8158 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8160 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8161 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
8162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8163 }
8164 }
8165
8166 /*
8167 * Sharing case implementation with stos[wdq] below.
8168 */
8169 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
8170 switch (pVCpu->iem.s.enmEffAddrMode)
8171 {
8172 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8173 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8174 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
8175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8176 }
8177}
8178
8179
8180/**
8181 * @opcode 0xad
8182 * @opfltest df
8183 */
8184FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
8185{
8186 /*
8187 * Use the C implementation if a repeat prefix is encountered.
8188 */
8189 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8190 {
8191 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193 switch (pVCpu->iem.s.enmEffOpSize)
8194 {
8195 case IEMMODE_16BIT:
8196 switch (pVCpu->iem.s.enmEffAddrMode)
8197 {
8198 case IEMMODE_16BIT:
8199 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8200 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8201 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8202 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8203 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
8204 case IEMMODE_32BIT:
8205 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8206 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8207 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8208 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8209 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
8210 case IEMMODE_64BIT:
8211 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8212 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8213 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8214 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8215 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
8216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8217 }
8218 break;
8219 case IEMMODE_32BIT:
8220 switch (pVCpu->iem.s.enmEffAddrMode)
8221 {
8222 case IEMMODE_16BIT:
8223 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8224 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8225 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8226 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8227 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
8228 case IEMMODE_32BIT:
8229 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8230 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8231 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8232 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8233 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
8234 case IEMMODE_64BIT:
8235 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8236 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8237 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8238 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8239 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
8240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8241 }
8242 case IEMMODE_64BIT:
8243 switch (pVCpu->iem.s.enmEffAddrMode)
8244 {
8245 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
8246 case IEMMODE_32BIT:
8247 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8248 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8249 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8250 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8251 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
8252 case IEMMODE_64BIT:
8253 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8254 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8255 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8256 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8257 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
8258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8259 }
8260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8261 }
8262 }
8263
8264 /*
8265 * Annoying double switch here.
8266 * Using ugly macro for implementing the cases, sharing it with lodsb.
8267 */
8268 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
8269 switch (pVCpu->iem.s.enmEffOpSize)
8270 {
8271 case IEMMODE_16BIT:
8272 switch (pVCpu->iem.s.enmEffAddrMode)
8273 {
8274 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8275 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8276 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
8277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8278 }
8279 break;
8280
8281 case IEMMODE_32BIT:
8282 switch (pVCpu->iem.s.enmEffAddrMode)
8283 {
8284 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8285 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8286 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
8287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8288 }
8289 break;
8290
8291 case IEMMODE_64BIT:
8292 switch (pVCpu->iem.s.enmEffAddrMode)
8293 {
8294 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8295 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
8296 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
8297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8298 }
8299 break;
8300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8301 }
8302}
8303
8304#undef IEM_LODS_CASE
8305
8306/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8307#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
8308 IEM_MC_BEGIN(a_fMcFlags, 0); \
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8310 \
8311 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8312 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8313 \
8314 IEM_MC_ARG(uint##ValBits##_t, uValue, 2); \
8315 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
8316 IEM_MC_ARG(uint##ValBits##_t *, puRax, 1); \
8317 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
8318 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8319 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_cmp_u##ValBits, fEFlagsIn, puRax, uValue); \
8320 \
8321 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);\
8322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8323 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8324 } IEM_MC_ELSE() { \
8325 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8326 } IEM_MC_ENDIF(); \
8327 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8328 IEM_MC_END();
8329
8330/**
8331 * @opcode 0xae
8332 * @opflclass arithmetic
8333 * @opfltest df
8334 */
8335FNIEMOP_DEF(iemOp_scasb_AL_Xb)
8336{
8337 /*
8338 * Use the C implementation if a repeat prefix is encountered.
8339 */
8340 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8341 {
8342 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
8343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8344 switch (pVCpu->iem.s.enmEffAddrMode)
8345 {
8346 case IEMMODE_16BIT:
8347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8348 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8349 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8350 iemCImpl_repe_scas_al_m16);
8351 case IEMMODE_32BIT:
8352 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8354 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8355 iemCImpl_repe_scas_al_m32);
8356 case IEMMODE_64BIT:
8357 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8360 iemCImpl_repe_scas_al_m64);
8361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8362 }
8363 }
8364 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8365 {
8366 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 switch (pVCpu->iem.s.enmEffAddrMode)
8369 {
8370 case IEMMODE_16BIT:
8371 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8372 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8373 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8374 iemCImpl_repne_scas_al_m16);
8375 case IEMMODE_32BIT:
8376 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8377 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8378 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8379 iemCImpl_repne_scas_al_m32);
8380 case IEMMODE_64BIT:
8381 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8382 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8383 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8384 iemCImpl_repne_scas_al_m64);
8385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8386 }
8387 }
8388
8389 /*
8390 * Sharing case implementation with stos[wdq] below.
8391 */
8392 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
8393 switch (pVCpu->iem.s.enmEffAddrMode)
8394 {
8395 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8396 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8397 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
8398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8399 }
8400}
8401
8402
8403/**
8404 * @opcode 0xaf
8405 * @opflclass arithmetic
8406 * @opfltest df
8407 */
8408FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8409{
8410 /*
8411 * Use the C implementation if a repeat prefix is encountered.
8412 */
8413 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8414 {
8415 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8417 switch (pVCpu->iem.s.enmEffOpSize)
8418 {
8419 case IEMMODE_16BIT:
8420 switch (pVCpu->iem.s.enmEffAddrMode)
8421 {
8422 case IEMMODE_16BIT:
8423 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8424 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8425 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8426 iemCImpl_repe_scas_ax_m16);
8427 case IEMMODE_32BIT:
8428 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8430 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8431 iemCImpl_repe_scas_ax_m32);
8432 case IEMMODE_64BIT:
8433 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8434 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8436 iemCImpl_repe_scas_ax_m64);
8437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8438 }
8439 break;
8440 case IEMMODE_32BIT:
8441 switch (pVCpu->iem.s.enmEffAddrMode)
8442 {
8443 case IEMMODE_16BIT:
8444 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8445 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8446 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8447 iemCImpl_repe_scas_eax_m16);
8448 case IEMMODE_32BIT:
8449 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8450 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8451 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8452 iemCImpl_repe_scas_eax_m32);
8453 case IEMMODE_64BIT:
8454 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8455 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8457 iemCImpl_repe_scas_eax_m64);
8458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8459 }
8460 case IEMMODE_64BIT:
8461 switch (pVCpu->iem.s.enmEffAddrMode)
8462 {
8463 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8464 case IEMMODE_32BIT:
8465 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8466 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8467 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8468 iemCImpl_repe_scas_rax_m32);
8469 case IEMMODE_64BIT:
8470 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8471 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8472 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8473 iemCImpl_repe_scas_rax_m64);
8474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8475 }
8476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8477 }
8478 }
8479 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8480 {
8481 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 switch (pVCpu->iem.s.enmEffOpSize)
8484 {
8485 case IEMMODE_16BIT:
8486 switch (pVCpu->iem.s.enmEffAddrMode)
8487 {
8488 case IEMMODE_16BIT:
8489 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8492 iemCImpl_repne_scas_ax_m16);
8493 case IEMMODE_32BIT:
8494 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8495 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8497 iemCImpl_repne_scas_ax_m32);
8498 case IEMMODE_64BIT:
8499 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8502 iemCImpl_repne_scas_ax_m64);
8503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8504 }
8505 break;
8506 case IEMMODE_32BIT:
8507 switch (pVCpu->iem.s.enmEffAddrMode)
8508 {
8509 case IEMMODE_16BIT:
8510 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8513 iemCImpl_repne_scas_eax_m16);
8514 case IEMMODE_32BIT:
8515 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8518 iemCImpl_repne_scas_eax_m32);
8519 case IEMMODE_64BIT:
8520 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8521 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8523 iemCImpl_repne_scas_eax_m64);
8524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8525 }
8526 case IEMMODE_64BIT:
8527 switch (pVCpu->iem.s.enmEffAddrMode)
8528 {
8529 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8530 case IEMMODE_32BIT:
8531 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8532 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8533 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8534 iemCImpl_repne_scas_rax_m32);
8535 case IEMMODE_64BIT:
8536 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8539 iemCImpl_repne_scas_rax_m64);
8540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8541 }
8542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8543 }
8544 }
8545
8546 /*
8547 * Annoying double switch here.
8548 * Using ugly macro for implementing the cases, sharing it with scasb.
8549 */
8550 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8551 switch (pVCpu->iem.s.enmEffOpSize)
8552 {
8553 case IEMMODE_16BIT:
8554 switch (pVCpu->iem.s.enmEffAddrMode)
8555 {
8556 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8557 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8558 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8560 }
8561 break;
8562
8563 case IEMMODE_32BIT:
8564 switch (pVCpu->iem.s.enmEffAddrMode)
8565 {
8566 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8567 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8568 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8570 }
8571 break;
8572
8573 case IEMMODE_64BIT:
8574 switch (pVCpu->iem.s.enmEffAddrMode)
8575 {
8576 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8577 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8578 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8580 }
8581 break;
8582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8583 }
8584}
8585
8586#undef IEM_SCAS_CASE
8587
8588/**
8589 * Common 'mov r8, imm8' helper.
8590 */
8591FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8592{
8593 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8594 IEM_MC_BEGIN(0, 0);
8595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8596 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8597 IEM_MC_ADVANCE_RIP_AND_FINISH();
8598 IEM_MC_END();
8599}
8600
8601
8602/**
8603 * @opcode 0xb0
8604 */
8605FNIEMOP_DEF(iemOp_mov_AL_Ib)
8606{
8607 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8608 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8609}
8610
8611
8612/**
8613 * @opcode 0xb1
8614 */
8615FNIEMOP_DEF(iemOp_CL_Ib)
8616{
8617 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8618 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8619}
8620
8621
8622/**
8623 * @opcode 0xb2
8624 */
8625FNIEMOP_DEF(iemOp_DL_Ib)
8626{
8627 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8628 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8629}
8630
8631
8632/**
8633 * @opcode 0xb3
8634 */
8635FNIEMOP_DEF(iemOp_BL_Ib)
8636{
8637 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8638 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8639}
8640
8641
8642/**
8643 * @opcode 0xb4
8644 */
8645FNIEMOP_DEF(iemOp_mov_AH_Ib)
8646{
8647 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8648 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8649}
8650
8651
8652/**
8653 * @opcode 0xb5
8654 */
8655FNIEMOP_DEF(iemOp_CH_Ib)
8656{
8657 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8658 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8659}
8660
8661
8662/**
8663 * @opcode 0xb6
8664 */
8665FNIEMOP_DEF(iemOp_DH_Ib)
8666{
8667 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8668 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8669}
8670
8671
8672/**
8673 * @opcode 0xb7
8674 */
8675FNIEMOP_DEF(iemOp_BH_Ib)
8676{
8677 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8678 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8679}
8680
8681
8682/**
8683 * Common 'mov regX,immX' helper.
8684 */
8685FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8686{
8687 switch (pVCpu->iem.s.enmEffOpSize)
8688 {
8689 case IEMMODE_16BIT:
8690 IEM_MC_BEGIN(0, 0);
8691 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8694 IEM_MC_ADVANCE_RIP_AND_FINISH();
8695 IEM_MC_END();
8696 break;
8697
8698 case IEMMODE_32BIT:
8699 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8700 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8703 IEM_MC_ADVANCE_RIP_AND_FINISH();
8704 IEM_MC_END();
8705 break;
8706
8707 case IEMMODE_64BIT:
8708 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
8709 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8711 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8712 IEM_MC_ADVANCE_RIP_AND_FINISH();
8713 IEM_MC_END();
8714 break;
8715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8716 }
8717}
8718
8719
8720/**
8721 * @opcode 0xb8
8722 */
8723FNIEMOP_DEF(iemOp_eAX_Iv)
8724{
8725 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8726 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8727}
8728
8729
8730/**
8731 * @opcode 0xb9
8732 */
8733FNIEMOP_DEF(iemOp_eCX_Iv)
8734{
8735 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8736 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8737}
8738
8739
8740/**
8741 * @opcode 0xba
8742 */
8743FNIEMOP_DEF(iemOp_eDX_Iv)
8744{
8745 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8746 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8747}
8748
8749
8750/**
8751 * @opcode 0xbb
8752 */
8753FNIEMOP_DEF(iemOp_eBX_Iv)
8754{
8755 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8756 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8757}
8758
8759
8760/**
8761 * @opcode 0xbc
8762 */
8763FNIEMOP_DEF(iemOp_eSP_Iv)
8764{
8765 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8766 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8767}
8768
8769
8770/**
8771 * @opcode 0xbd
8772 */
8773FNIEMOP_DEF(iemOp_eBP_Iv)
8774{
8775 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8776 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8777}
8778
8779
8780/**
8781 * @opcode 0xbe
8782 */
8783FNIEMOP_DEF(iemOp_eSI_Iv)
8784{
8785 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8786 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8787}
8788
8789
8790/**
8791 * @opcode 0xbf
8792 */
8793FNIEMOP_DEF(iemOp_eDI_Iv)
8794{
8795 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8796 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8797}
8798
8799
8800/**
8801 * @opcode 0xc0
8802 */
8803FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8804{
8805 IEMOP_HLP_MIN_186();
8806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8807
8808 /* Need to use a body macro here since the EFLAGS behaviour differs between
8809 the shifts, rotates and rotate w/ carry. Sigh. */
8810#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8811 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8812 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8813 { \
8814 /* register */ \
8815 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8816 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8818 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8819 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8820 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8821 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8822 IEM_MC_REF_EFLAGS(pEFlags); \
8823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8824 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8825 IEM_MC_END(); \
8826 } \
8827 else \
8828 { \
8829 /* memory */ \
8830 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8833 \
8834 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8836 \
8837 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8838 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8839 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8840 \
8841 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8842 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8844 \
8845 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8846 IEM_MC_COMMIT_EFLAGS(EFlags); \
8847 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8848 IEM_MC_END(); \
8849 } (void)0
8850
8851 switch (IEM_GET_MODRM_REG_8(bRm))
8852 {
8853 /**
8854 * @opdone
8855 * @opmaps grp2_c0
8856 * @opcode /0
8857 * @opflclass rotate_count
8858 */
8859 case 0:
8860 {
8861 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8863 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8864 break;
8865 }
8866 /**
8867 * @opdone
8868 * @opmaps grp2_c0
8869 * @opcode /1
8870 * @opflclass rotate_count
8871 */
8872 case 1:
8873 {
8874 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8876 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8877 break;
8878 }
8879 /**
8880 * @opdone
8881 * @opmaps grp2_c0
8882 * @opcode /2
8883 * @opflclass rotate_carry_count
8884 */
8885 case 2:
8886 {
8887 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8888 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8889 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8890 break;
8891 }
8892 /**
8893 * @opdone
8894 * @opmaps grp2_c0
8895 * @opcode /3
8896 * @opflclass rotate_carry_count
8897 */
8898 case 3:
8899 {
8900 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8902 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8903 break;
8904 }
8905 /**
8906 * @opdone
8907 * @opmaps grp2_c0
8908 * @opcode /4
8909 * @opflclass shift_count
8910 */
8911 case 4:
8912 {
8913 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8915 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8916 break;
8917 }
8918 /**
8919 * @opdone
8920 * @opmaps grp2_c0
8921 * @opcode /5
8922 * @opflclass shift_count
8923 */
8924 case 5:
8925 {
8926 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8928 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8929 break;
8930 }
8931 /**
8932 * @opdone
8933 * @opmaps grp2_c0
8934 * @opcode /7
8935 * @opflclass shift_count
8936 */
8937 case 7:
8938 {
8939 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8940 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8941 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8942 break;
8943 }
8944
8945 /** @opdone */
8946 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8948 }
8949#undef GRP2_BODY_Eb_Ib
8950}
8951
8952
8953/* Need to use a body macro here since the EFLAGS behaviour differs between
8954 the shifts, rotates and rotate w/ carry. Sigh. */
8955#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8956 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8957 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8958 { \
8959 /* register */ \
8960 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8961 switch (pVCpu->iem.s.enmEffOpSize) \
8962 { \
8963 case IEMMODE_16BIT: \
8964 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8966 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8967 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8968 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8969 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8970 IEM_MC_REF_EFLAGS(pEFlags); \
8971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8973 IEM_MC_END(); \
8974 break; \
8975 \
8976 case IEMMODE_32BIT: \
8977 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8979 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8980 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8981 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8982 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8983 IEM_MC_REF_EFLAGS(pEFlags); \
8984 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8985 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8987 IEM_MC_END(); \
8988 break; \
8989 \
8990 case IEMMODE_64BIT: \
8991 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8993 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8994 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8995 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8996 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8997 IEM_MC_REF_EFLAGS(pEFlags); \
8998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8999 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9000 IEM_MC_END(); \
9001 break; \
9002 \
9003 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9004 } \
9005 } \
9006 else \
9007 { \
9008 /* memory */ \
9009 switch (pVCpu->iem.s.enmEffOpSize) \
9010 { \
9011 case IEMMODE_16BIT: \
9012 IEM_MC_BEGIN(0, 0); \
9013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9015 \
9016 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9018 \
9019 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9020 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9021 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9022 \
9023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
9024 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9026 \
9027 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9028 IEM_MC_COMMIT_EFLAGS(EFlags); \
9029 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9030 IEM_MC_END(); \
9031 break; \
9032 \
9033 case IEMMODE_32BIT: \
9034 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9037 \
9038 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9040 \
9041 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9042 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9043 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9044 \
9045 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
9046 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9047 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9048 \
9049 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9050 IEM_MC_COMMIT_EFLAGS(EFlags); \
9051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9052 IEM_MC_END(); \
9053 break; \
9054 \
9055 case IEMMODE_64BIT: \
9056 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9059 \
9060 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9062 \
9063 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9064 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9065 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9066 \
9067 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
9068 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9070 \
9071 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9072 IEM_MC_COMMIT_EFLAGS(EFlags); \
9073 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9074 IEM_MC_END(); \
9075 break; \
9076 \
9077 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9078 } \
9079 } (void)0
9080
9081/**
9082 * @opmaps grp2_c1
9083 * @opcode /0
9084 * @opflclass rotate_count
9085 */
9086FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9089 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9090}
9091
9092
9093/**
9094 * @opmaps grp2_c1
9095 * @opcode /1
9096 * @opflclass rotate_count
9097 */
9098FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
9099{
9100 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9101 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9102}
9103
9104
9105/**
9106 * @opmaps grp2_c1
9107 * @opcode /2
9108 * @opflclass rotate_carry_count
9109 */
9110FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
9111{
9112 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9113 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9114}
9115
9116
9117/**
9118 * @opmaps grp2_c1
9119 * @opcode /3
9120 * @opflclass rotate_carry_count
9121 */
9122FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
9123{
9124 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9125 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9126}
9127
9128
9129/**
9130 * @opmaps grp2_c1
9131 * @opcode /4
9132 * @opflclass shift_count
9133 */
9134FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
9135{
9136 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9137 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9138}
9139
9140
9141/**
9142 * @opmaps grp2_c1
9143 * @opcode /5
9144 * @opflclass shift_count
9145 */
9146FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
9147{
9148 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9149 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9150}
9151
9152
9153/**
9154 * @opmaps grp2_c1
9155 * @opcode /7
9156 * @opflclass shift_count
9157 */
9158FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
9159{
9160 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9161 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9162}
9163
9164#undef GRP2_BODY_Ev_Ib
9165
9166/**
9167 * @opcode 0xc1
9168 */
9169FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
9170{
9171 IEMOP_HLP_MIN_186();
9172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9173
9174 switch (IEM_GET_MODRM_REG_8(bRm))
9175 {
9176 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
9177 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
9178 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
9179 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
9180 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
9181 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
9182 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
9183 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9184 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9185 }
9186}
9187
9188
9189/**
9190 * @opcode 0xc2
9191 */
9192FNIEMOP_DEF(iemOp_retn_Iw)
9193{
9194 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
9195 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9198 switch (pVCpu->iem.s.enmEffOpSize)
9199 {
9200 case IEMMODE_16BIT:
9201 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9202 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
9203 case IEMMODE_32BIT:
9204 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
9206 case IEMMODE_64BIT:
9207 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9208 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
9209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9210 }
9211}
9212
9213
9214/**
9215 * @opcode 0xc3
9216 */
9217FNIEMOP_DEF(iemOp_retn)
9218{
9219 IEMOP_MNEMONIC(retn, "retn");
9220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9222 switch (pVCpu->iem.s.enmEffOpSize)
9223 {
9224 case IEMMODE_16BIT:
9225 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9226 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
9227 case IEMMODE_32BIT:
9228 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9229 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
9230 case IEMMODE_64BIT:
9231 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9232 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
9233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9234 }
9235}
9236
9237
9238/**
9239 * @opcode 0xc4
9240 */
9241FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
9242{
9243 /* The LDS instruction is invalid 64-bit mode. In legacy and
9244 compatability mode it is invalid with MOD=3.
9245 The use as a VEX prefix is made possible by assigning the inverted
9246 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
9247 outside of 64-bit mode. VEX is not available in real or v86 mode. */
9248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9249 if ( IEM_IS_64BIT_CODE(pVCpu)
9250 || IEM_IS_MODRM_REG_MODE(bRm) )
9251 {
9252 IEMOP_MNEMONIC(vex3_prefix, "vex3");
9253 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9254 {
9255 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9256 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9257 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
9258 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9260#if 1
9261 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
9262 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
9263#else
9264 if (bVex2 & 0x80 /* VEX.W */)
9265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
9266#endif
9267 if (IEM_IS_64BIT_CODE(pVCpu))
9268 {
9269#if 1
9270 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
9271 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
9272#else
9273 if (~bRm & 0x20 /* VEX.~B */)
9274 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
9275 if (~bRm & 0x40 /* VEX.~X */)
9276 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
9277 if (~bRm & 0x80 /* VEX.~R */)
9278 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
9279#endif
9280 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9281 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
9282 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
9283 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
9284 }
9285 else
9286 {
9287 pVCpu->iem.s.uRexReg = 0;
9288 pVCpu->iem.s.uRexIndex = 0;
9289 pVCpu->iem.s.uRexB = 0;
9290 /** @todo testcase: Will attemps to access registers 8 thru 15 from 16&32 bit
9291 * code raise \#UD or just be ignored? We're ignoring for now... */
9292 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0x7;
9293 }
9294 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
9295 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
9296
9297 switch (bRm & 0x1f)
9298 {
9299 case 1: /* 0x0f lead opcode byte. */
9300#ifdef IEM_WITH_VEX
9301 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9302#else
9303 IEMOP_BITCH_ABOUT_STUB();
9304 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9305#endif
9306
9307 case 2: /* 0x0f 0x38 lead opcode bytes. */
9308#ifdef IEM_WITH_VEX
9309 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9310#else
9311 IEMOP_BITCH_ABOUT_STUB();
9312 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9313#endif
9314
9315 case 3: /* 0x0f 0x3a lead opcode bytes. */
9316#ifdef IEM_WITH_VEX
9317 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9318#else
9319 IEMOP_BITCH_ABOUT_STUB();
9320 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9321#endif
9322
9323 default:
9324 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
9325 IEMOP_RAISE_INVALID_OPCODE_RET();
9326 }
9327 }
9328 Log(("VEX3: VEX support disabled!\n"));
9329 IEMOP_RAISE_INVALID_OPCODE_RET();
9330 }
9331
9332 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
9333 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
9334}
9335
9336
9337/**
9338 * @opcode 0xc5
9339 */
9340FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
9341{
9342 /* The LES instruction is invalid 64-bit mode. In legacy and
9343 compatability mode it is invalid with MOD=3.
9344 The use as a VEX prefix is made possible by assigning the inverted
9345 REX.R to the top MOD bit, and the top bit in the inverted register
9346 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
9347 to accessing registers 0..7 in this VEX form. */
9348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9349 if ( IEM_IS_64BIT_CODE(pVCpu)
9350 || IEM_IS_MODRM_REG_MODE(bRm))
9351 {
9352 IEMOP_MNEMONIC(vex2_prefix, "vex2");
9353 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9354 {
9355 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9356 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9357 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9358 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9359 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
9360 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
9361 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9362 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
9363 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
9364 pVCpu->iem.s.idxPrefix = bRm & 0x3;
9365
9366#ifdef IEM_WITH_VEX
9367 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9368#else
9369 IEMOP_BITCH_ABOUT_STUB();
9370 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9371#endif
9372 }
9373
9374 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
9375 Log(("VEX2: VEX support disabled!\n"));
9376 IEMOP_RAISE_INVALID_OPCODE_RET();
9377 }
9378
9379 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
9380 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
9381}
9382
9383
9384/**
9385 * @opcode 0xc6
9386 */
9387FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9388{
9389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9390 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9391 IEMOP_RAISE_INVALID_OPCODE_RET();
9392 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
9393
9394 if (IEM_IS_MODRM_REG_MODE(bRm))
9395 {
9396 /* register access */
9397 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9398 IEM_MC_BEGIN(0, 0);
9399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9400 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
9401 IEM_MC_ADVANCE_RIP_AND_FINISH();
9402 IEM_MC_END();
9403 }
9404 else
9405 {
9406 /* memory access. */
9407 IEM_MC_BEGIN(0, 0);
9408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9410 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9413 IEM_MC_ADVANCE_RIP_AND_FINISH();
9414 IEM_MC_END();
9415 }
9416}
9417
9418
9419/**
9420 * @opcode 0xc7
9421 */
9422FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9423{
9424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9425 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9426 IEMOP_RAISE_INVALID_OPCODE_RET();
9427 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9428
9429 if (IEM_IS_MODRM_REG_MODE(bRm))
9430 {
9431 /* register access */
9432 switch (pVCpu->iem.s.enmEffOpSize)
9433 {
9434 case IEMMODE_16BIT:
9435 IEM_MC_BEGIN(0, 0);
9436 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9438 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9439 IEM_MC_ADVANCE_RIP_AND_FINISH();
9440 IEM_MC_END();
9441 break;
9442
9443 case IEMMODE_32BIT:
9444 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9445 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9447 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9448 IEM_MC_ADVANCE_RIP_AND_FINISH();
9449 IEM_MC_END();
9450 break;
9451
9452 case IEMMODE_64BIT:
9453 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9454 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9456 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9457 IEM_MC_ADVANCE_RIP_AND_FINISH();
9458 IEM_MC_END();
9459 break;
9460
9461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9462 }
9463 }
9464 else
9465 {
9466 /* memory access. */
9467 switch (pVCpu->iem.s.enmEffOpSize)
9468 {
9469 case IEMMODE_16BIT:
9470 IEM_MC_BEGIN(0, 0);
9471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9473 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9475 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9476 IEM_MC_ADVANCE_RIP_AND_FINISH();
9477 IEM_MC_END();
9478 break;
9479
9480 case IEMMODE_32BIT:
9481 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9484 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9486 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9487 IEM_MC_ADVANCE_RIP_AND_FINISH();
9488 IEM_MC_END();
9489 break;
9490
9491 case IEMMODE_64BIT:
9492 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9497 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9498 IEM_MC_ADVANCE_RIP_AND_FINISH();
9499 IEM_MC_END();
9500 break;
9501
9502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9503 }
9504 }
9505}
9506
9507
9508
9509
9510/**
9511 * @opcode 0xc8
9512 */
9513FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9514{
9515 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9516 IEMOP_HLP_MIN_186();
9517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9518 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9519 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9524 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9525}
9526
9527
9528/**
9529 * @opcode 0xc9
9530 */
9531FNIEMOP_DEF(iemOp_leave)
9532{
9533 IEMOP_MNEMONIC(leave, "leave");
9534 IEMOP_HLP_MIN_186();
9535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9537 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9538 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9540 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9541}
9542
9543
9544/**
9545 * @opcode 0xca
9546 */
9547FNIEMOP_DEF(iemOp_retf_Iw)
9548{
9549 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9550 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9552 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9553 | IEM_CIMPL_F_MODE,
9554 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9555 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9556 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9557 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9558 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9559 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9560 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9561 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9562 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9563 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9564 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9565 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9566 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9567 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9568 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9569 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9570 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9571 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9572}
9573
9574
9575/**
9576 * @opcode 0xcb
9577 */
9578FNIEMOP_DEF(iemOp_retf)
9579{
9580 IEMOP_MNEMONIC(retf, "retf");
9581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9582 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9583 | IEM_CIMPL_F_MODE,
9584 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9585 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9586 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9587 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9588 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9589 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9590 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9591 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9592 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9593 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9594 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9595 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9596 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9597 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9598 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9599 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9600 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9601 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9602}
9603
9604
9605/**
9606 * @opcode 0xcc
9607 */
9608FNIEMOP_DEF(iemOp_int3)
9609{
9610 IEMOP_MNEMONIC(int3, "int3");
9611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9612 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9613 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9614 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9615}
9616
9617
9618/**
9619 * @opcode 0xcd
9620 */
9621FNIEMOP_DEF(iemOp_int_Ib)
9622{
9623 IEMOP_MNEMONIC(int_Ib, "int Ib");
9624 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9626 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9627 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9628 iemCImpl_int, u8Int, IEMINT_INTN);
9629 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9630}
9631
9632
9633/**
9634 * @opcode 0xce
9635 */
9636FNIEMOP_DEF(iemOp_into)
9637{
9638 IEMOP_MNEMONIC(into, "into");
9639 IEMOP_HLP_NO_64BIT();
9640 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9641 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9642 UINT64_MAX,
9643 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9644 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9645}
9646
9647
9648/**
9649 * @opcode 0xcf
9650 */
9651FNIEMOP_DEF(iemOp_iret)
9652{
9653 IEMOP_MNEMONIC(iret, "iret");
9654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9655 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9656 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9657 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9658 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9659 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9660 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9661 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9662 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9663 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9664 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9665 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9666 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9667 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9668 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9669 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9670 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9671 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9672 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9673 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9674 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9675 /* Segment registers are sanitized when returning to an outer ring, or fully
9676 reloaded when returning to v86 mode. Thus the large flush list above. */
9677}
9678
9679
9680/**
9681 * @opcode 0xd0
9682 */
9683FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9684{
9685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9686
9687 /* Need to use a body macro here since the EFLAGS behaviour differs between
9688 the shifts, rotates and rotate w/ carry. Sigh. */
9689#define GRP2_BODY_Eb_1(a_pImplExpr) \
9690 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9691 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9692 { \
9693 /* register */ \
9694 IEM_MC_BEGIN(0, 0); \
9695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9696 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9697 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9698 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9699 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9700 IEM_MC_REF_EFLAGS(pEFlags); \
9701 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9702 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9703 IEM_MC_END(); \
9704 } \
9705 else \
9706 { \
9707 /* memory */ \
9708 IEM_MC_BEGIN(0, 0); \
9709 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9710 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9712 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9713 \
9714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9716 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9717 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9718 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9719 \
9720 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9721 IEM_MC_COMMIT_EFLAGS(EFlags); \
9722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9723 IEM_MC_END(); \
9724 } (void)0
9725
9726 switch (IEM_GET_MODRM_REG_8(bRm))
9727 {
9728 /**
9729 * @opdone
9730 * @opmaps grp2_d0
9731 * @opcode /0
9732 * @opflclass rotate_1
9733 */
9734 case 0:
9735 {
9736 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9737 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9738 break;
9739 }
9740 /**
9741 * @opdone
9742 * @opmaps grp2_d0
9743 * @opcode /1
9744 * @opflclass rotate_1
9745 */
9746 case 1:
9747 {
9748 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9749 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9750 break;
9751 }
9752 /**
9753 * @opdone
9754 * @opmaps grp2_d0
9755 * @opcode /2
9756 * @opflclass rotate_carry_1
9757 */
9758 case 2:
9759 {
9760 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9761 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9762 break;
9763 }
9764 /**
9765 * @opdone
9766 * @opmaps grp2_d0
9767 * @opcode /3
9768 * @opflclass rotate_carry_1
9769 */
9770 case 3:
9771 {
9772 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9773 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9774 break;
9775 }
9776 /**
9777 * @opdone
9778 * @opmaps grp2_d0
9779 * @opcode /4
9780 * @opflclass shift_1
9781 */
9782 case 4:
9783 {
9784 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9785 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9786 break;
9787 }
9788 /**
9789 * @opdone
9790 * @opmaps grp2_d0
9791 * @opcode /5
9792 * @opflclass shift_1
9793 */
9794 case 5:
9795 {
9796 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9797 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9798 break;
9799 }
9800 /**
9801 * @opdone
9802 * @opmaps grp2_d0
9803 * @opcode /7
9804 * @opflclass shift_1
9805 */
9806 case 7:
9807 {
9808 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9809 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9810 break;
9811 }
9812 /** @opdone */
9813 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9814 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9815 }
9816#undef GRP2_BODY_Eb_1
9817}
9818
9819
9820/* Need to use a body macro here since the EFLAGS behaviour differs between
9821 the shifts, rotates and rotate w/ carry. Sigh. */
9822#define GRP2_BODY_Ev_1(a_pImplExpr) \
9823 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9824 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9825 { \
9826 /* register */ \
9827 switch (pVCpu->iem.s.enmEffOpSize) \
9828 { \
9829 case IEMMODE_16BIT: \
9830 IEM_MC_BEGIN(0, 0); \
9831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9832 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9833 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9834 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9835 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9836 IEM_MC_REF_EFLAGS(pEFlags); \
9837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9838 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9839 IEM_MC_END(); \
9840 break; \
9841 \
9842 case IEMMODE_32BIT: \
9843 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9845 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9846 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9847 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9848 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9849 IEM_MC_REF_EFLAGS(pEFlags); \
9850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9851 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9853 IEM_MC_END(); \
9854 break; \
9855 \
9856 case IEMMODE_64BIT: \
9857 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9859 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9860 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9861 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9862 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9863 IEM_MC_REF_EFLAGS(pEFlags); \
9864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9865 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9866 IEM_MC_END(); \
9867 break; \
9868 \
9869 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9870 } \
9871 } \
9872 else \
9873 { \
9874 /* memory */ \
9875 switch (pVCpu->iem.s.enmEffOpSize) \
9876 { \
9877 case IEMMODE_16BIT: \
9878 IEM_MC_BEGIN(0, 0); \
9879 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9880 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9882 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9883 \
9884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9886 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9887 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9888 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9889 \
9890 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9891 IEM_MC_COMMIT_EFLAGS(EFlags); \
9892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9893 IEM_MC_END(); \
9894 break; \
9895 \
9896 case IEMMODE_32BIT: \
9897 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9898 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9899 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9902 \
9903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9905 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9906 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9907 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9908 \
9909 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9910 IEM_MC_COMMIT_EFLAGS(EFlags); \
9911 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9912 IEM_MC_END(); \
9913 break; \
9914 \
9915 case IEMMODE_64BIT: \
9916 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9917 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9918 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9920 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9921 \
9922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9924 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9925 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9926 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9927 \
9928 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9929 IEM_MC_COMMIT_EFLAGS(EFlags); \
9930 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9931 IEM_MC_END(); \
9932 break; \
9933 \
9934 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9935 } \
9936 } (void)0
9937
9938/**
9939 * @opmaps grp2_d1
9940 * @opcode /0
9941 * @opflclass rotate_1
9942 */
9943FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9944{
9945 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9946 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9947}
9948
9949
9950/**
9951 * @opmaps grp2_d1
9952 * @opcode /1
9953 * @opflclass rotate_1
9954 */
9955FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9956{
9957 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9958 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9959}
9960
9961
9962/**
9963 * @opmaps grp2_d1
9964 * @opcode /2
9965 * @opflclass rotate_carry_1
9966 */
9967FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9968{
9969 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9970 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9971}
9972
9973
9974/**
9975 * @opmaps grp2_d1
9976 * @opcode /3
9977 * @opflclass rotate_carry_1
9978 */
9979FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9980{
9981 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9982 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9983}
9984
9985
9986/**
9987 * @opmaps grp2_d1
9988 * @opcode /4
9989 * @opflclass shift_1
9990 */
9991FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9992{
9993 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9994 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9995}
9996
9997
9998/**
9999 * @opmaps grp2_d1
10000 * @opcode /5
10001 * @opflclass shift_1
10002 */
10003FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
10004{
10005 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
10006 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10007}
10008
10009
10010/**
10011 * @opmaps grp2_d1
10012 * @opcode /7
10013 * @opflclass shift_1
10014 */
10015FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
10016{
10017 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
10018 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10019}
10020
10021#undef GRP2_BODY_Ev_1
10022
10023/**
10024 * @opcode 0xd1
10025 */
10026FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10027{
10028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10029 switch (IEM_GET_MODRM_REG_8(bRm))
10030 {
10031 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
10032 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
10033 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
10034 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
10035 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
10036 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
10037 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
10038 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10039 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10040 }
10041}
10042
10043
10044/**
10045 * @opcode 0xd2
10046 */
10047FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
10048{
10049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10050
10051 /* Need to use a body macro here since the EFLAGS behaviour differs between
10052 the shifts, rotates and rotate w/ carry. Sigh. */
10053#define GRP2_BODY_Eb_CL(a_pImplExpr) \
10054 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10055 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10056 { \
10057 /* register */ \
10058 IEM_MC_BEGIN(0, 0); \
10059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10060 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10061 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10062 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10063 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10064 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10065 IEM_MC_REF_EFLAGS(pEFlags); \
10066 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
10067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10068 IEM_MC_END(); \
10069 } \
10070 else \
10071 { \
10072 /* memory */ \
10073 IEM_MC_BEGIN(0, 0); \
10074 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10075 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10077 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10078 \
10079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10081 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10082 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10083 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10084 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
10085 \
10086 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10087 IEM_MC_COMMIT_EFLAGS(EFlags); \
10088 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10089 IEM_MC_END(); \
10090 } (void)0
10091
10092 switch (IEM_GET_MODRM_REG_8(bRm))
10093 {
10094 /**
10095 * @opdone
10096 * @opmaps grp2_d0
10097 * @opcode /0
10098 * @opflclass rotate_count
10099 */
10100 case 0:
10101 {
10102 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10103 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
10104 break;
10105 }
10106 /**
10107 * @opdone
10108 * @opmaps grp2_d0
10109 * @opcode /1
10110 * @opflclass rotate_count
10111 */
10112 case 1:
10113 {
10114 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10115 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
10116 break;
10117 }
10118 /**
10119 * @opdone
10120 * @opmaps grp2_d0
10121 * @opcode /2
10122 * @opflclass rotate_carry_count
10123 */
10124 case 2:
10125 {
10126 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10127 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
10128 break;
10129 }
10130 /**
10131 * @opdone
10132 * @opmaps grp2_d0
10133 * @opcode /3
10134 * @opflclass rotate_carry_count
10135 */
10136 case 3:
10137 {
10138 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10139 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
10140 break;
10141 }
10142 /**
10143 * @opdone
10144 * @opmaps grp2_d0
10145 * @opcode /4
10146 * @opflclass shift_count
10147 */
10148 case 4:
10149 {
10150 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10151 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
10152 break;
10153 }
10154 /**
10155 * @opdone
10156 * @opmaps grp2_d0
10157 * @opcode /5
10158 * @opflclass shift_count
10159 */
10160 case 5:
10161 {
10162 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10163 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10164 break;
10165 }
10166 /**
10167 * @opdone
10168 * @opmaps grp2_d0
10169 * @opcode /7
10170 * @opflclass shift_count
10171 */
10172 case 7:
10173 {
10174 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10175 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10176 break;
10177 }
10178 /** @opdone */
10179 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10180 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10181 }
10182#undef GRP2_BODY_Eb_CL
10183}
10184
10185
10186/* Need to use a body macro here since the EFLAGS behaviour differs between
10187 the shifts, rotates and rotate w/ carry. Sigh. */
10188#define GRP2_BODY_Ev_CL(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
10189 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10190 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10191 { \
10192 /* register */ \
10193 switch (pVCpu->iem.s.enmEffOpSize) \
10194 { \
10195 case IEMMODE_16BIT: \
10196 IEM_MC_BEGIN(0, 0); \
10197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10198 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10199 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10200 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10201 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10202 IEM_MC_LOCAL(uint16_t, u16Dst); \
10203 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10204 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10205 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u16Dst, cShiftArg, fEFlags, 16); \
10206 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
10207 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10208 } IEM_MC_NATIVE_ELSE() { \
10209 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10210 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10211 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10212 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10213 IEM_MC_REF_EFLAGS(pEFlags); \
10214 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10215 } IEM_MC_NATIVE_ENDIF(); \
10216 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10217 IEM_MC_END(); \
10218 break; \
10219 \
10220 case IEMMODE_32BIT: \
10221 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10223 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10224 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10225 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10226 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10227 IEM_MC_LOCAL(uint32_t, u32Dst); \
10228 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10229 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10230 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u32Dst, cShiftArg, fEFlags, 32); \
10231 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
10232 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10233 } IEM_MC_NATIVE_ELSE() { \
10234 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10235 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10236 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10237 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10238 IEM_MC_REF_EFLAGS(pEFlags); \
10239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10240 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10241 } IEM_MC_NATIVE_ENDIF(); \
10242 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10243 IEM_MC_END(); \
10244 break; \
10245 \
10246 case IEMMODE_64BIT: \
10247 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10249 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10250 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10251 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10252 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10253 IEM_MC_LOCAL(uint64_t, u64Dst); \
10254 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10255 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10256 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u64Dst, cShiftArg, fEFlags, 64); \
10257 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
10258 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10259 } IEM_MC_NATIVE_ELSE() { \
10260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10261 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10262 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10263 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10264 IEM_MC_REF_EFLAGS(pEFlags); \
10265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10266 } IEM_MC_NATIVE_ENDIF(); \
10267 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10268 IEM_MC_END(); \
10269 break; \
10270 \
10271 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10272 } \
10273 } \
10274 else \
10275 { \
10276 /* memory */ \
10277 switch (pVCpu->iem.s.enmEffOpSize) \
10278 { \
10279 case IEMMODE_16BIT: \
10280 IEM_MC_BEGIN(0, 0); \
10281 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10282 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10285 \
10286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10288 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10289 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10290 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10292 \
10293 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10294 IEM_MC_COMMIT_EFLAGS(EFlags); \
10295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10296 IEM_MC_END(); \
10297 break; \
10298 \
10299 case IEMMODE_32BIT: \
10300 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10301 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10302 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10304 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10305 \
10306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10308 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10309 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10310 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10312 \
10313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10314 IEM_MC_COMMIT_EFLAGS(EFlags); \
10315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10316 IEM_MC_END(); \
10317 break; \
10318 \
10319 case IEMMODE_64BIT: \
10320 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10321 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10322 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10324 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10325 \
10326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10328 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10329 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10331 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10332 \
10333 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10334 IEM_MC_COMMIT_EFLAGS(EFlags); \
10335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10336 IEM_MC_END(); \
10337 break; \
10338 \
10339 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10340 } \
10341 } (void)0
10342
10343
10344/**
10345 * @opmaps grp2_d0
10346 * @opcode /0
10347 * @opflclass rotate_count
10348 */
10349FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
10350{
10351 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10352 GRP2_BODY_Ev_CL(rol, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags), 0, 0);
10353}
10354
10355
10356/**
10357 * @opmaps grp2_d0
10358 * @opcode /1
10359 * @opflclass rotate_count
10360 */
10361FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
10362{
10363 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10364 GRP2_BODY_Ev_CL(ror, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags), 0, 0);
10365}
10366
10367
10368/**
10369 * @opmaps grp2_d0
10370 * @opcode /2
10371 * @opflclass rotate_carry_count
10372 */
10373FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
10374{
10375 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10376 GRP2_BODY_Ev_CL(rcl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags), 0, 0);
10377}
10378
10379
10380/**
10381 * @opmaps grp2_d0
10382 * @opcode /3
10383 * @opflclass rotate_carry_count
10384 */
10385FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
10386{
10387 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10388 GRP2_BODY_Ev_CL(rcr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags), 0, 0);
10389}
10390
10391
10392/**
10393 * @opmaps grp2_d0
10394 * @opcode /4
10395 * @opflclass shift_count
10396 */
10397FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
10398{
10399 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10400 GRP2_BODY_Ev_CL(shl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags), RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
10401}
10402
10403
10404/**
10405 * @opmaps grp2_d0
10406 * @opcode /5
10407 * @opflclass shift_count
10408 */
10409FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
10410{
10411 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10412 GRP2_BODY_Ev_CL(shr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags), 0, 0);
10413}
10414
10415
10416/**
10417 * @opmaps grp2_d0
10418 * @opcode /7
10419 * @opflclass shift_count
10420 */
10421FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
10422{
10423 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10424 GRP2_BODY_Ev_CL(sar, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags), 0, 0);
10425}
10426
10427#undef GRP2_BODY_Ev_CL
10428
10429/**
10430 * @opcode 0xd3
10431 */
10432FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10433{
10434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10435 switch (IEM_GET_MODRM_REG_8(bRm))
10436 {
10437 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10438 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10439 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10440 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10441 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10442 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10443 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10444 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10445 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10446 }
10447}
10448
10449
10450/**
10451 * @opcode 0xd4
10452 * @opflmodify cf,pf,af,zf,sf,of
10453 * @opflundef cf,af,of
10454 */
10455FNIEMOP_DEF(iemOp_aam_Ib)
10456{
10457/** @todo testcase: aam */
10458 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10459 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10461 IEMOP_HLP_NO_64BIT();
10462 if (!bImm)
10463 IEMOP_RAISE_DIVIDE_ERROR_RET();
10464 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10465}
10466
10467
10468/**
10469 * @opcode 0xd5
10470 * @opflmodify cf,pf,af,zf,sf,of
10471 * @opflundef cf,af,of
10472 */
10473FNIEMOP_DEF(iemOp_aad_Ib)
10474{
10475/** @todo testcase: aad? */
10476 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10477 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10479 IEMOP_HLP_NO_64BIT();
10480 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10481}
10482
10483
10484/**
10485 * @opcode 0xd6
10486 */
10487FNIEMOP_DEF(iemOp_salc)
10488{
10489 IEMOP_MNEMONIC(salc, "salc");
10490 IEMOP_HLP_NO_64BIT();
10491
10492 IEM_MC_BEGIN(0, 0);
10493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10495 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10496 } IEM_MC_ELSE() {
10497 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10498 } IEM_MC_ENDIF();
10499 IEM_MC_ADVANCE_RIP_AND_FINISH();
10500 IEM_MC_END();
10501}
10502
10503
10504/**
10505 * @opcode 0xd7
10506 */
10507FNIEMOP_DEF(iemOp_xlat)
10508{
10509 IEMOP_MNEMONIC(xlat, "xlat");
10510 switch (pVCpu->iem.s.enmEffAddrMode)
10511 {
10512 case IEMMODE_16BIT:
10513 IEM_MC_BEGIN(0, 0);
10514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10515 IEM_MC_LOCAL(uint8_t, u8Tmp);
10516 IEM_MC_LOCAL(uint16_t, u16Addr);
10517 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10518 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10519 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10520 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10521 IEM_MC_ADVANCE_RIP_AND_FINISH();
10522 IEM_MC_END();
10523 break;
10524
10525 case IEMMODE_32BIT:
10526 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEM_MC_LOCAL(uint8_t, u8Tmp);
10529 IEM_MC_LOCAL(uint32_t, u32Addr);
10530 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10531 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10532 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10533 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10534 IEM_MC_ADVANCE_RIP_AND_FINISH();
10535 IEM_MC_END();
10536 break;
10537
10538 case IEMMODE_64BIT:
10539 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541 IEM_MC_LOCAL(uint8_t, u8Tmp);
10542 IEM_MC_LOCAL(uint64_t, u64Addr);
10543 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10544 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10545 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10546 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10547 IEM_MC_ADVANCE_RIP_AND_FINISH();
10548 IEM_MC_END();
10549 break;
10550
10551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10552 }
10553}
10554
10555
10556/**
10557 * Common worker for FPU instructions working on ST0 and STn, and storing the
10558 * result in ST0.
10559 *
10560 * @param bRm Mod R/M byte.
10561 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10562 */
10563FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10564{
10565 IEM_MC_BEGIN(0, 0);
10566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10567 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10568 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10570 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10571
10572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10573 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10574 IEM_MC_PREPARE_FPU_USAGE();
10575 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10576 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10577 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10578 } IEM_MC_ELSE() {
10579 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10580 } IEM_MC_ENDIF();
10581 IEM_MC_ADVANCE_RIP_AND_FINISH();
10582
10583 IEM_MC_END();
10584}
10585
10586
10587/**
10588 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10589 * flags.
10590 *
10591 * @param bRm Mod R/M byte.
10592 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10593 */
10594FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10595{
10596 IEM_MC_BEGIN(0, 0);
10597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10598 IEM_MC_LOCAL(uint16_t, u16Fsw);
10599 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10600 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10602
10603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10605 IEM_MC_PREPARE_FPU_USAGE();
10606 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10607 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10608 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10609 } IEM_MC_ELSE() {
10610 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10611 } IEM_MC_ENDIF();
10612 IEM_MC_ADVANCE_RIP_AND_FINISH();
10613
10614 IEM_MC_END();
10615}
10616
10617
10618/**
10619 * Common worker for FPU instructions working on ST0 and STn, only affecting
10620 * flags, and popping when done.
10621 *
10622 * @param bRm Mod R/M byte.
10623 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10624 */
10625FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10626{
10627 IEM_MC_BEGIN(0, 0);
10628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10629 IEM_MC_LOCAL(uint16_t, u16Fsw);
10630 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10631 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10632 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10633
10634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10635 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10636 IEM_MC_PREPARE_FPU_USAGE();
10637 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10638 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10639 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10640 } IEM_MC_ELSE() {
10641 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10642 } IEM_MC_ENDIF();
10643 IEM_MC_ADVANCE_RIP_AND_FINISH();
10644
10645 IEM_MC_END();
10646}
10647
10648
10649/** Opcode 0xd8 11/0. */
10650FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10651{
10652 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10653 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10654}
10655
10656
10657/** Opcode 0xd8 11/1. */
10658FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10659{
10660 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10661 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10662}
10663
10664
10665/** Opcode 0xd8 11/2. */
10666FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10667{
10668 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10669 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10670}
10671
10672
10673/** Opcode 0xd8 11/3. */
10674FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10675{
10676 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10677 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10678}
10679
10680
10681/** Opcode 0xd8 11/4. */
10682FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10683{
10684 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10685 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10686}
10687
10688
10689/** Opcode 0xd8 11/5. */
10690FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10691{
10692 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10693 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10694}
10695
10696
10697/** Opcode 0xd8 11/6. */
10698FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10699{
10700 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10701 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10702}
10703
10704
10705/** Opcode 0xd8 11/7. */
10706FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10707{
10708 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10710}
10711
10712
10713/**
10714 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10715 * the result in ST0.
10716 *
10717 * @param bRm Mod R/M byte.
10718 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10719 */
10720FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10721{
10722 IEM_MC_BEGIN(0, 0);
10723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10724 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10725 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10726 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10727 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10728 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10729
10730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10732
10733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10735 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10736
10737 IEM_MC_PREPARE_FPU_USAGE();
10738 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10739 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10740 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10741 } IEM_MC_ELSE() {
10742 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10743 } IEM_MC_ENDIF();
10744 IEM_MC_ADVANCE_RIP_AND_FINISH();
10745
10746 IEM_MC_END();
10747}
10748
10749
10750/** Opcode 0xd8 !11/0. */
10751FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10752{
10753 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10754 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10755}
10756
10757
10758/** Opcode 0xd8 !11/1. */
10759FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10760{
10761 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10762 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10763}
10764
10765
10766/** Opcode 0xd8 !11/2. */
10767FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10768{
10769 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10770
10771 IEM_MC_BEGIN(0, 0);
10772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10773 IEM_MC_LOCAL(uint16_t, u16Fsw);
10774 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10775 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10776 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10777 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10778
10779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10781
10782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10784 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10785
10786 IEM_MC_PREPARE_FPU_USAGE();
10787 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10788 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10789 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10790 } IEM_MC_ELSE() {
10791 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10792 } IEM_MC_ENDIF();
10793 IEM_MC_ADVANCE_RIP_AND_FINISH();
10794
10795 IEM_MC_END();
10796}
10797
10798
10799/** Opcode 0xd8 !11/3. */
10800FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10801{
10802 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10803
10804 IEM_MC_BEGIN(0, 0);
10805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10806 IEM_MC_LOCAL(uint16_t, u16Fsw);
10807 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10808 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10809 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10810 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10811
10812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10814
10815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10817 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10818
10819 IEM_MC_PREPARE_FPU_USAGE();
10820 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10821 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10822 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10823 } IEM_MC_ELSE() {
10824 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10825 } IEM_MC_ENDIF();
10826 IEM_MC_ADVANCE_RIP_AND_FINISH();
10827
10828 IEM_MC_END();
10829}
10830
10831
10832/** Opcode 0xd8 !11/4. */
10833FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10834{
10835 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10836 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10837}
10838
10839
10840/** Opcode 0xd8 !11/5. */
10841FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10842{
10843 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10844 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10845}
10846
10847
10848/** Opcode 0xd8 !11/6. */
10849FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10850{
10851 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10853}
10854
10855
10856/** Opcode 0xd8 !11/7. */
10857FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10858{
10859 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10860 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10861}
10862
10863
10864/**
10865 * @opcode 0xd8
10866 */
10867FNIEMOP_DEF(iemOp_EscF0)
10868{
10869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10870 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10871
10872 if (IEM_IS_MODRM_REG_MODE(bRm))
10873 {
10874 switch (IEM_GET_MODRM_REG_8(bRm))
10875 {
10876 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10877 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10878 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10879 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10880 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10881 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10882 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10883 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10885 }
10886 }
10887 else
10888 {
10889 switch (IEM_GET_MODRM_REG_8(bRm))
10890 {
10891 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10892 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10893 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10894 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10895 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10896 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10897 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10898 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10900 }
10901 }
10902}
10903
10904
10905/** Opcode 0xd9 /0 mem32real
10906 * @sa iemOp_fld_m64r */
10907FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10908{
10909 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10910
10911 IEM_MC_BEGIN(0, 0);
10912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10913 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10914 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10915 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10916 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10917
10918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10920
10921 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10922 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10923 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10924 IEM_MC_PREPARE_FPU_USAGE();
10925 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10926 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10927 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10928 } IEM_MC_ELSE() {
10929 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10930 } IEM_MC_ENDIF();
10931 IEM_MC_ADVANCE_RIP_AND_FINISH();
10932
10933 IEM_MC_END();
10934}
10935
10936
10937/** Opcode 0xd9 !11/2 mem32real */
10938FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10939{
10940 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10941 IEM_MC_BEGIN(0, 0);
10942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10944
10945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10948 IEM_MC_PREPARE_FPU_USAGE();
10949
10950 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10951 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10952 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10953
10954 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10955 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10956 IEM_MC_LOCAL(uint16_t, u16Fsw);
10957 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10958 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10959 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10960 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10961 } IEM_MC_ELSE() {
10962 IEM_MC_IF_FCW_IM() {
10963 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10964 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10965 } IEM_MC_ELSE() {
10966 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10967 } IEM_MC_ENDIF();
10968 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10969 } IEM_MC_ENDIF();
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971
10972 IEM_MC_END();
10973}
10974
10975
10976/** Opcode 0xd9 !11/3 */
10977FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10978{
10979 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10980 IEM_MC_BEGIN(0, 0);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10983
10984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10985 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10986 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10987 IEM_MC_PREPARE_FPU_USAGE();
10988
10989 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10990 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10991 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10992
10993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10994 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10995 IEM_MC_LOCAL(uint16_t, u16Fsw);
10996 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10997 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10998 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10999 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11000 } IEM_MC_ELSE() {
11001 IEM_MC_IF_FCW_IM() {
11002 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11003 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11004 } IEM_MC_ELSE() {
11005 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11006 } IEM_MC_ENDIF();
11007 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11008 } IEM_MC_ENDIF();
11009 IEM_MC_ADVANCE_RIP_AND_FINISH();
11010
11011 IEM_MC_END();
11012}
11013
11014
11015/** Opcode 0xd9 !11/4 */
11016FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11017{
11018 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
11019 IEM_MC_BEGIN(0, 0);
11020 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11022
11023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11025 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11026
11027 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11028 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11029 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
11030 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11031 IEM_MC_END();
11032}
11033
11034
11035/** Opcode 0xd9 !11/5 */
11036FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11037{
11038 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
11039 IEM_MC_BEGIN(0, 0);
11040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11042
11043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11046
11047 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11048 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11049
11050 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
11051 iemCImpl_fldcw, u16Fsw);
11052 IEM_MC_END();
11053}
11054
11055
11056/** Opcode 0xd9 !11/6 */
11057FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11058{
11059 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
11060 IEM_MC_BEGIN(0, 0);
11061 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11063
11064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11067
11068 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11069 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11070 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11071 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11072 IEM_MC_END();
11073}
11074
11075
11076/** Opcode 0xd9 !11/7 */
11077FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11078{
11079 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
11080 IEM_MC_BEGIN(0, 0);
11081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11082 IEM_MC_LOCAL(uint16_t, u16Fcw);
11083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11085 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11086 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11087 IEM_MC_FETCH_FCW(u16Fcw);
11088 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
11089 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11090 IEM_MC_END();
11091}
11092
11093
11094/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
11095FNIEMOP_DEF(iemOp_fnop)
11096{
11097 IEMOP_MNEMONIC(fnop, "fnop");
11098 IEM_MC_BEGIN(0, 0);
11099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11102 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11103 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11104 * intel optimizations. Investigate. */
11105 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11106 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11107 IEM_MC_END();
11108}
11109
11110
11111/** Opcode 0xd9 11/0 stN */
11112FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11113{
11114 IEMOP_MNEMONIC(fld_stN, "fld stN");
11115 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11116 * indicates that it does. */
11117 IEM_MC_BEGIN(0, 0);
11118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11119 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11120 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11123
11124 IEM_MC_PREPARE_FPU_USAGE();
11125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
11126 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11127 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11128 } IEM_MC_ELSE() {
11129 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
11130 } IEM_MC_ENDIF();
11131
11132 IEM_MC_ADVANCE_RIP_AND_FINISH();
11133 IEM_MC_END();
11134}
11135
11136
11137/** Opcode 0xd9 11/3 stN */
11138FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11139{
11140 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
11141 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11142 * indicates that it does. */
11143 IEM_MC_BEGIN(0, 0);
11144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11145 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11146 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11147 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11148 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
11149 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
11150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11152
11153 IEM_MC_PREPARE_FPU_USAGE();
11154 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
11155 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11156 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
11157 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11158 } IEM_MC_ELSE() {
11159 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
11160 } IEM_MC_ENDIF();
11161
11162 IEM_MC_ADVANCE_RIP_AND_FINISH();
11163 IEM_MC_END();
11164}
11165
11166
11167/** Opcode 0xd9 11/4, 0xdd 11/2. */
11168FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11169{
11170 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
11171
11172 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
11173 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
11174 if (!iDstReg)
11175 {
11176 IEM_MC_BEGIN(0, 0);
11177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11178 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11180 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11181
11182 IEM_MC_PREPARE_FPU_USAGE();
11183 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
11184 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11185 } IEM_MC_ELSE() {
11186 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
11187 } IEM_MC_ENDIF();
11188
11189 IEM_MC_ADVANCE_RIP_AND_FINISH();
11190 IEM_MC_END();
11191 }
11192 else
11193 {
11194 IEM_MC_BEGIN(0, 0);
11195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11196 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11197 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11198 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11199 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11200
11201 IEM_MC_PREPARE_FPU_USAGE();
11202 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11203 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11204 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
11205 } IEM_MC_ELSE() {
11206 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
11207 } IEM_MC_ENDIF();
11208
11209 IEM_MC_ADVANCE_RIP_AND_FINISH();
11210 IEM_MC_END();
11211 }
11212}
11213
11214
11215/**
11216 * Common worker for FPU instructions working on ST0 and replaces it with the
11217 * result, i.e. unary operators.
11218 *
11219 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11220 */
11221FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11222{
11223 IEM_MC_BEGIN(0, 0);
11224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11225 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11226 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11227 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11228
11229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11231 IEM_MC_PREPARE_FPU_USAGE();
11232 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11233 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11234 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11235 } IEM_MC_ELSE() {
11236 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11237 } IEM_MC_ENDIF();
11238 IEM_MC_ADVANCE_RIP_AND_FINISH();
11239
11240 IEM_MC_END();
11241}
11242
11243
11244/** Opcode 0xd9 0xe0. */
11245FNIEMOP_DEF(iemOp_fchs)
11246{
11247 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
11248 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11249}
11250
11251
11252/** Opcode 0xd9 0xe1. */
11253FNIEMOP_DEF(iemOp_fabs)
11254{
11255 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
11256 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11257}
11258
11259
11260/** Opcode 0xd9 0xe4. */
11261FNIEMOP_DEF(iemOp_ftst)
11262{
11263 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
11264 IEM_MC_BEGIN(0, 0);
11265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11266 IEM_MC_LOCAL(uint16_t, u16Fsw);
11267 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11269
11270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11272 IEM_MC_PREPARE_FPU_USAGE();
11273 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11274 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
11275 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11276 } IEM_MC_ELSE() {
11277 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
11278 } IEM_MC_ENDIF();
11279 IEM_MC_ADVANCE_RIP_AND_FINISH();
11280
11281 IEM_MC_END();
11282}
11283
11284
11285/** Opcode 0xd9 0xe5. */
11286FNIEMOP_DEF(iemOp_fxam)
11287{
11288 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
11289 IEM_MC_BEGIN(0, 0);
11290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11291 IEM_MC_LOCAL(uint16_t, u16Fsw);
11292 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11293 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11294
11295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11297 IEM_MC_PREPARE_FPU_USAGE();
11298 IEM_MC_REF_FPUREG(pr80Value, 0);
11299 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
11300 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11301 IEM_MC_ADVANCE_RIP_AND_FINISH();
11302
11303 IEM_MC_END();
11304}
11305
11306
11307/**
11308 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11309 *
11310 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11311 */
11312FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11313{
11314 IEM_MC_BEGIN(0, 0);
11315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11316 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11317 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11318
11319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11321 IEM_MC_PREPARE_FPU_USAGE();
11322 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11323 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11324 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11325 } IEM_MC_ELSE() {
11326 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
11327 } IEM_MC_ENDIF();
11328 IEM_MC_ADVANCE_RIP_AND_FINISH();
11329
11330 IEM_MC_END();
11331}
11332
11333
11334/** Opcode 0xd9 0xe8. */
11335FNIEMOP_DEF(iemOp_fld1)
11336{
11337 IEMOP_MNEMONIC(fld1, "fld1");
11338 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11339}
11340
11341
11342/** Opcode 0xd9 0xe9. */
11343FNIEMOP_DEF(iemOp_fldl2t)
11344{
11345 IEMOP_MNEMONIC(fldl2t, "fldl2t");
11346 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11347}
11348
11349
11350/** Opcode 0xd9 0xea. */
11351FNIEMOP_DEF(iemOp_fldl2e)
11352{
11353 IEMOP_MNEMONIC(fldl2e, "fldl2e");
11354 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11355}
11356
11357/** Opcode 0xd9 0xeb. */
11358FNIEMOP_DEF(iemOp_fldpi)
11359{
11360 IEMOP_MNEMONIC(fldpi, "fldpi");
11361 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11362}
11363
11364
11365/** Opcode 0xd9 0xec. */
11366FNIEMOP_DEF(iemOp_fldlg2)
11367{
11368 IEMOP_MNEMONIC(fldlg2, "fldlg2");
11369 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11370}
11371
11372/** Opcode 0xd9 0xed. */
11373FNIEMOP_DEF(iemOp_fldln2)
11374{
11375 IEMOP_MNEMONIC(fldln2, "fldln2");
11376 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11377}
11378
11379
11380/** Opcode 0xd9 0xee. */
11381FNIEMOP_DEF(iemOp_fldz)
11382{
11383 IEMOP_MNEMONIC(fldz, "fldz");
11384 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11385}
11386
11387
11388/** Opcode 0xd9 0xf0.
11389 *
11390 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
11391 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
11392 * to produce proper results for +Inf and -Inf.
11393 *
11394 * This is probably usful in the implementation pow() and similar.
11395 */
11396FNIEMOP_DEF(iemOp_f2xm1)
11397{
11398 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
11399 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11400}
11401
11402
11403/**
11404 * Common worker for FPU instructions working on STn and ST0, storing the result
11405 * in STn, and popping the stack unless IE, DE or ZE was raised.
11406 *
11407 * @param bRm Mod R/M byte.
11408 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11409 */
11410FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11411{
11412 IEM_MC_BEGIN(0, 0);
11413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11414 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11415 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11417 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11418
11419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11421
11422 IEM_MC_PREPARE_FPU_USAGE();
11423 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11424 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11425 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11426 } IEM_MC_ELSE() {
11427 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11428 } IEM_MC_ENDIF();
11429 IEM_MC_ADVANCE_RIP_AND_FINISH();
11430
11431 IEM_MC_END();
11432}
11433
11434
11435/** Opcode 0xd9 0xf1. */
11436FNIEMOP_DEF(iemOp_fyl2x)
11437{
11438 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11439 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11440}
11441
11442
11443/**
11444 * Common worker for FPU instructions working on ST0 and having two outputs, one
11445 * replacing ST0 and one pushed onto the stack.
11446 *
11447 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11448 */
11449FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11450{
11451 IEM_MC_BEGIN(0, 0);
11452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11453 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11454 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11456
11457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11459 IEM_MC_PREPARE_FPU_USAGE();
11460 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11461 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11462 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11463 } IEM_MC_ELSE() {
11464 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11465 } IEM_MC_ENDIF();
11466 IEM_MC_ADVANCE_RIP_AND_FINISH();
11467
11468 IEM_MC_END();
11469}
11470
11471
11472/** Opcode 0xd9 0xf2. */
11473FNIEMOP_DEF(iemOp_fptan)
11474{
11475 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11476 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11477}
11478
11479
11480/** Opcode 0xd9 0xf3. */
11481FNIEMOP_DEF(iemOp_fpatan)
11482{
11483 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11484 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11485}
11486
11487
11488/** Opcode 0xd9 0xf4. */
11489FNIEMOP_DEF(iemOp_fxtract)
11490{
11491 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11492 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11493}
11494
11495
11496/** Opcode 0xd9 0xf5. */
11497FNIEMOP_DEF(iemOp_fprem1)
11498{
11499 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11500 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11501}
11502
11503
11504/** Opcode 0xd9 0xf6. */
11505FNIEMOP_DEF(iemOp_fdecstp)
11506{
11507 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11508 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11509 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11510 * FINCSTP and FDECSTP. */
11511 IEM_MC_BEGIN(0, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513
11514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11516
11517 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11518 IEM_MC_FPU_STACK_DEC_TOP();
11519 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11520
11521 IEM_MC_ADVANCE_RIP_AND_FINISH();
11522 IEM_MC_END();
11523}
11524
11525
11526/** Opcode 0xd9 0xf7. */
11527FNIEMOP_DEF(iemOp_fincstp)
11528{
11529 IEMOP_MNEMONIC(fincstp, "fincstp");
11530 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11531 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11532 * FINCSTP and FDECSTP. */
11533 IEM_MC_BEGIN(0, 0);
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535
11536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11538
11539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11540 IEM_MC_FPU_STACK_INC_TOP();
11541 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11542
11543 IEM_MC_ADVANCE_RIP_AND_FINISH();
11544 IEM_MC_END();
11545}
11546
11547
11548/** Opcode 0xd9 0xf8. */
11549FNIEMOP_DEF(iemOp_fprem)
11550{
11551 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11552 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11553}
11554
11555
11556/** Opcode 0xd9 0xf9. */
11557FNIEMOP_DEF(iemOp_fyl2xp1)
11558{
11559 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11560 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11561}
11562
11563
11564/** Opcode 0xd9 0xfa. */
11565FNIEMOP_DEF(iemOp_fsqrt)
11566{
11567 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11568 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11569}
11570
11571
11572/** Opcode 0xd9 0xfb. */
11573FNIEMOP_DEF(iemOp_fsincos)
11574{
11575 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11576 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11577}
11578
11579
11580/** Opcode 0xd9 0xfc. */
11581FNIEMOP_DEF(iemOp_frndint)
11582{
11583 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11584 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11585}
11586
11587
11588/** Opcode 0xd9 0xfd. */
11589FNIEMOP_DEF(iemOp_fscale)
11590{
11591 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11592 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11593}
11594
11595
11596/** Opcode 0xd9 0xfe. */
11597FNIEMOP_DEF(iemOp_fsin)
11598{
11599 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11600 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11601}
11602
11603
11604/** Opcode 0xd9 0xff. */
11605FNIEMOP_DEF(iemOp_fcos)
11606{
11607 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11608 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11609}
11610
11611
11612/** Used by iemOp_EscF1. */
11613IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11614{
11615 /* 0xe0 */ iemOp_fchs,
11616 /* 0xe1 */ iemOp_fabs,
11617 /* 0xe2 */ iemOp_Invalid,
11618 /* 0xe3 */ iemOp_Invalid,
11619 /* 0xe4 */ iemOp_ftst,
11620 /* 0xe5 */ iemOp_fxam,
11621 /* 0xe6 */ iemOp_Invalid,
11622 /* 0xe7 */ iemOp_Invalid,
11623 /* 0xe8 */ iemOp_fld1,
11624 /* 0xe9 */ iemOp_fldl2t,
11625 /* 0xea */ iemOp_fldl2e,
11626 /* 0xeb */ iemOp_fldpi,
11627 /* 0xec */ iemOp_fldlg2,
11628 /* 0xed */ iemOp_fldln2,
11629 /* 0xee */ iemOp_fldz,
11630 /* 0xef */ iemOp_Invalid,
11631 /* 0xf0 */ iemOp_f2xm1,
11632 /* 0xf1 */ iemOp_fyl2x,
11633 /* 0xf2 */ iemOp_fptan,
11634 /* 0xf3 */ iemOp_fpatan,
11635 /* 0xf4 */ iemOp_fxtract,
11636 /* 0xf5 */ iemOp_fprem1,
11637 /* 0xf6 */ iemOp_fdecstp,
11638 /* 0xf7 */ iemOp_fincstp,
11639 /* 0xf8 */ iemOp_fprem,
11640 /* 0xf9 */ iemOp_fyl2xp1,
11641 /* 0xfa */ iemOp_fsqrt,
11642 /* 0xfb */ iemOp_fsincos,
11643 /* 0xfc */ iemOp_frndint,
11644 /* 0xfd */ iemOp_fscale,
11645 /* 0xfe */ iemOp_fsin,
11646 /* 0xff */ iemOp_fcos
11647};
11648
11649
11650/**
11651 * @opcode 0xd9
11652 */
11653FNIEMOP_DEF(iemOp_EscF1)
11654{
11655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11656 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11657
11658 if (IEM_IS_MODRM_REG_MODE(bRm))
11659 {
11660 switch (IEM_GET_MODRM_REG_8(bRm))
11661 {
11662 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11663 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11664 case 2:
11665 if (bRm == 0xd0)
11666 return FNIEMOP_CALL(iemOp_fnop);
11667 IEMOP_RAISE_INVALID_OPCODE_RET();
11668 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11669 case 4:
11670 case 5:
11671 case 6:
11672 case 7:
11673 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11674 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11676 }
11677 }
11678 else
11679 {
11680 switch (IEM_GET_MODRM_REG_8(bRm))
11681 {
11682 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11683 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11684 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11685 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11686 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11687 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11688 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11689 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11691 }
11692 }
11693}
11694
11695
11696/** Opcode 0xda 11/0. */
11697FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11698{
11699 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11700 IEM_MC_BEGIN(0, 0);
11701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11702 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11703
11704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11706
11707 IEM_MC_PREPARE_FPU_USAGE();
11708 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11710 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11711 } IEM_MC_ENDIF();
11712 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11713 } IEM_MC_ELSE() {
11714 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11715 } IEM_MC_ENDIF();
11716 IEM_MC_ADVANCE_RIP_AND_FINISH();
11717
11718 IEM_MC_END();
11719}
11720
11721
11722/** Opcode 0xda 11/1. */
11723FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11724{
11725 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11726 IEM_MC_BEGIN(0, 0);
11727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11728 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11729
11730 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11731 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11732
11733 IEM_MC_PREPARE_FPU_USAGE();
11734 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11736 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11737 } IEM_MC_ENDIF();
11738 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11739 } IEM_MC_ELSE() {
11740 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11741 } IEM_MC_ENDIF();
11742 IEM_MC_ADVANCE_RIP_AND_FINISH();
11743
11744 IEM_MC_END();
11745}
11746
11747
11748/** Opcode 0xda 11/2. */
11749FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11750{
11751 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11752 IEM_MC_BEGIN(0, 0);
11753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11754 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11755
11756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11758
11759 IEM_MC_PREPARE_FPU_USAGE();
11760 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11761 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11762 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11763 } IEM_MC_ENDIF();
11764 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11765 } IEM_MC_ELSE() {
11766 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11767 } IEM_MC_ENDIF();
11768 IEM_MC_ADVANCE_RIP_AND_FINISH();
11769
11770 IEM_MC_END();
11771}
11772
11773
11774/** Opcode 0xda 11/3. */
11775FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11776{
11777 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11778 IEM_MC_BEGIN(0, 0);
11779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11780 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11781
11782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11784
11785 IEM_MC_PREPARE_FPU_USAGE();
11786 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11788 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11789 } IEM_MC_ENDIF();
11790 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11791 } IEM_MC_ELSE() {
11792 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11793 } IEM_MC_ENDIF();
11794 IEM_MC_ADVANCE_RIP_AND_FINISH();
11795
11796 IEM_MC_END();
11797}
11798
11799
11800/**
11801 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11802 * flags, and popping twice when done.
11803 *
11804 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11805 */
11806FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11807{
11808 IEM_MC_BEGIN(0, 0);
11809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11810 IEM_MC_LOCAL(uint16_t, u16Fsw);
11811 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11812 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11814
11815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11817
11818 IEM_MC_PREPARE_FPU_USAGE();
11819 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11820 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11821 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11822 } IEM_MC_ELSE() {
11823 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11824 } IEM_MC_ENDIF();
11825 IEM_MC_ADVANCE_RIP_AND_FINISH();
11826
11827 IEM_MC_END();
11828}
11829
11830
11831/** Opcode 0xda 0xe9. */
11832FNIEMOP_DEF(iemOp_fucompp)
11833{
11834 IEMOP_MNEMONIC(fucompp, "fucompp");
11835 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11836}
11837
11838
11839/**
11840 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11841 * the result in ST0.
11842 *
11843 * @param bRm Mod R/M byte.
11844 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11845 */
11846FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11847{
11848 IEM_MC_BEGIN(0, 0);
11849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11850 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11851 IEM_MC_LOCAL(int32_t, i32Val2);
11852 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11853 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11854 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11855
11856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11858
11859 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11860 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11861 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11862
11863 IEM_MC_PREPARE_FPU_USAGE();
11864 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11865 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11866 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11867 } IEM_MC_ELSE() {
11868 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11869 } IEM_MC_ENDIF();
11870 IEM_MC_ADVANCE_RIP_AND_FINISH();
11871
11872 IEM_MC_END();
11873}
11874
11875
11876/** Opcode 0xda !11/0. */
11877FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11878{
11879 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11881}
11882
11883
11884/** Opcode 0xda !11/1. */
11885FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11886{
11887 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11889}
11890
11891
11892/** Opcode 0xda !11/2. */
11893FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11894{
11895 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11896
11897 IEM_MC_BEGIN(0, 0);
11898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11899 IEM_MC_LOCAL(uint16_t, u16Fsw);
11900 IEM_MC_LOCAL(int32_t, i32Val2);
11901 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11902 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11903 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11904
11905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11907
11908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11910 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11911
11912 IEM_MC_PREPARE_FPU_USAGE();
11913 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11914 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11915 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11916 } IEM_MC_ELSE() {
11917 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11918 } IEM_MC_ENDIF();
11919 IEM_MC_ADVANCE_RIP_AND_FINISH();
11920
11921 IEM_MC_END();
11922}
11923
11924
11925/** Opcode 0xda !11/3. */
11926FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11927{
11928 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11929
11930 IEM_MC_BEGIN(0, 0);
11931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11932 IEM_MC_LOCAL(uint16_t, u16Fsw);
11933 IEM_MC_LOCAL(int32_t, i32Val2);
11934 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11936 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11937
11938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11940
11941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11943 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11944
11945 IEM_MC_PREPARE_FPU_USAGE();
11946 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11947 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11948 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11949 } IEM_MC_ELSE() {
11950 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11951 } IEM_MC_ENDIF();
11952 IEM_MC_ADVANCE_RIP_AND_FINISH();
11953
11954 IEM_MC_END();
11955}
11956
11957
11958/** Opcode 0xda !11/4. */
11959FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11960{
11961 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11962 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11963}
11964
11965
11966/** Opcode 0xda !11/5. */
11967FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11968{
11969 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11970 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11971}
11972
11973
11974/** Opcode 0xda !11/6. */
11975FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11976{
11977 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11978 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11979}
11980
11981
11982/** Opcode 0xda !11/7. */
11983FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11984{
11985 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11986 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11987}
11988
11989
11990/**
11991 * @opcode 0xda
11992 */
11993FNIEMOP_DEF(iemOp_EscF2)
11994{
11995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11996 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11997 if (IEM_IS_MODRM_REG_MODE(bRm))
11998 {
11999 switch (IEM_GET_MODRM_REG_8(bRm))
12000 {
12001 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
12002 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
12003 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
12004 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
12005 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12006 case 5:
12007 if (bRm == 0xe9)
12008 return FNIEMOP_CALL(iemOp_fucompp);
12009 IEMOP_RAISE_INVALID_OPCODE_RET();
12010 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12011 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12013 }
12014 }
12015 else
12016 {
12017 switch (IEM_GET_MODRM_REG_8(bRm))
12018 {
12019 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12020 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12021 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12022 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12023 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12024 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12025 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12026 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12028 }
12029 }
12030}
12031
12032
12033/** Opcode 0xdb !11/0. */
12034FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12035{
12036 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
12037
12038 IEM_MC_BEGIN(0, 0);
12039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12040 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12041 IEM_MC_LOCAL(int32_t, i32Val);
12042 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12043 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12044
12045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12047
12048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12050 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12051
12052 IEM_MC_PREPARE_FPU_USAGE();
12053 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12054 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
12055 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12056 } IEM_MC_ELSE() {
12057 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12058 } IEM_MC_ENDIF();
12059 IEM_MC_ADVANCE_RIP_AND_FINISH();
12060
12061 IEM_MC_END();
12062}
12063
12064
12065/** Opcode 0xdb !11/1. */
12066FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12067{
12068 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
12069 IEM_MC_BEGIN(0, 0);
12070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12072
12073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12074 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12075 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12076 IEM_MC_PREPARE_FPU_USAGE();
12077
12078 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12079 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12080 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12081
12082 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12083 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12084 IEM_MC_LOCAL(uint16_t, u16Fsw);
12085 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12086 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12087 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12088 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12089 } IEM_MC_ELSE() {
12090 IEM_MC_IF_FCW_IM() {
12091 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12092 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12093 } IEM_MC_ELSE() {
12094 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12095 } IEM_MC_ENDIF();
12096 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12097 } IEM_MC_ENDIF();
12098 IEM_MC_ADVANCE_RIP_AND_FINISH();
12099
12100 IEM_MC_END();
12101}
12102
12103
12104/** Opcode 0xdb !11/2. */
12105FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12106{
12107 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
12108 IEM_MC_BEGIN(0, 0);
12109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12111
12112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12113 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12114 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12115 IEM_MC_PREPARE_FPU_USAGE();
12116
12117 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12118 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12119 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12120
12121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12122 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12123 IEM_MC_LOCAL(uint16_t, u16Fsw);
12124 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12125 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12126 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12127 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12128 } IEM_MC_ELSE() {
12129 IEM_MC_IF_FCW_IM() {
12130 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12131 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12132 } IEM_MC_ELSE() {
12133 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12134 } IEM_MC_ENDIF();
12135 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12136 } IEM_MC_ENDIF();
12137 IEM_MC_ADVANCE_RIP_AND_FINISH();
12138
12139 IEM_MC_END();
12140}
12141
12142
12143/** Opcode 0xdb !11/3. */
12144FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12145{
12146 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
12147 IEM_MC_BEGIN(0, 0);
12148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12150
12151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12154 IEM_MC_PREPARE_FPU_USAGE();
12155
12156 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12157 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12158 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12159
12160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12161 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12162 IEM_MC_LOCAL(uint16_t, u16Fsw);
12163 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12164 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12165 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12166 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12167 } IEM_MC_ELSE() {
12168 IEM_MC_IF_FCW_IM() {
12169 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12170 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12171 } IEM_MC_ELSE() {
12172 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12173 } IEM_MC_ENDIF();
12174 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12175 } IEM_MC_ENDIF();
12176 IEM_MC_ADVANCE_RIP_AND_FINISH();
12177
12178 IEM_MC_END();
12179}
12180
12181
12182/** Opcode 0xdb !11/5. */
12183FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12184{
12185 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
12186
12187 IEM_MC_BEGIN(0, 0);
12188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12189 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12190 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12191 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12192 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12193
12194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12196
12197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12199 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12200
12201 IEM_MC_PREPARE_FPU_USAGE();
12202 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12203 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12204 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12205 } IEM_MC_ELSE() {
12206 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12207 } IEM_MC_ENDIF();
12208 IEM_MC_ADVANCE_RIP_AND_FINISH();
12209
12210 IEM_MC_END();
12211}
12212
12213
12214/** Opcode 0xdb !11/7. */
12215FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12216{
12217 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
12218 IEM_MC_BEGIN(0, 0);
12219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12221
12222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12225 IEM_MC_PREPARE_FPU_USAGE();
12226
12227 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12228 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12229 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12230
12231 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12232 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12233 IEM_MC_LOCAL(uint16_t, u16Fsw);
12234 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12235 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12236 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12237 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12238 } IEM_MC_ELSE() {
12239 IEM_MC_IF_FCW_IM() {
12240 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12241 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12242 } IEM_MC_ELSE() {
12243 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12244 } IEM_MC_ENDIF();
12245 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12246 } IEM_MC_ENDIF();
12247 IEM_MC_ADVANCE_RIP_AND_FINISH();
12248
12249 IEM_MC_END();
12250}
12251
12252
12253/** Opcode 0xdb 11/0. */
12254FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12255{
12256 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
12257 IEM_MC_BEGIN(0, 0);
12258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12259 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12260
12261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12262 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12263
12264 IEM_MC_PREPARE_FPU_USAGE();
12265 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12266 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
12267 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12268 } IEM_MC_ENDIF();
12269 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12270 } IEM_MC_ELSE() {
12271 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12272 } IEM_MC_ENDIF();
12273 IEM_MC_ADVANCE_RIP_AND_FINISH();
12274
12275 IEM_MC_END();
12276}
12277
12278
12279/** Opcode 0xdb 11/1. */
12280FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12281{
12282 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
12283 IEM_MC_BEGIN(0, 0);
12284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12285 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12286
12287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12289
12290 IEM_MC_PREPARE_FPU_USAGE();
12291 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12292 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12293 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12294 } IEM_MC_ENDIF();
12295 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12296 } IEM_MC_ELSE() {
12297 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12298 } IEM_MC_ENDIF();
12299 IEM_MC_ADVANCE_RIP_AND_FINISH();
12300
12301 IEM_MC_END();
12302}
12303
12304
12305/** Opcode 0xdb 11/2. */
12306FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12307{
12308 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
12309 IEM_MC_BEGIN(0, 0);
12310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12311 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12312
12313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12315
12316 IEM_MC_PREPARE_FPU_USAGE();
12317 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12318 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
12319 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12320 } IEM_MC_ENDIF();
12321 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12322 } IEM_MC_ELSE() {
12323 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12324 } IEM_MC_ENDIF();
12325 IEM_MC_ADVANCE_RIP_AND_FINISH();
12326
12327 IEM_MC_END();
12328}
12329
12330
12331/** Opcode 0xdb 11/3. */
12332FNIEMOP_DEF_1(iemOp_fcmovnu_stN, uint8_t, bRm)
12333{
12334 IEMOP_MNEMONIC(fcmovnu_st0_stN, "fcmovnu st0,stN");
12335 IEM_MC_BEGIN(0, 0);
12336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12337 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12338
12339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12341
12342 IEM_MC_PREPARE_FPU_USAGE();
12343 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12344 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
12345 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12346 } IEM_MC_ENDIF();
12347 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12348 } IEM_MC_ELSE() {
12349 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12350 } IEM_MC_ENDIF();
12351 IEM_MC_ADVANCE_RIP_AND_FINISH();
12352
12353 IEM_MC_END();
12354}
12355
12356
12357/** Opcode 0xdb 0xe0. */
12358FNIEMOP_DEF(iemOp_fneni)
12359{
12360 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
12361 IEM_MC_BEGIN(0, 0);
12362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12364 IEM_MC_ADVANCE_RIP_AND_FINISH();
12365 IEM_MC_END();
12366}
12367
12368
12369/** Opcode 0xdb 0xe1. */
12370FNIEMOP_DEF(iemOp_fndisi)
12371{
12372 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
12373 IEM_MC_BEGIN(0, 0);
12374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12376 IEM_MC_ADVANCE_RIP_AND_FINISH();
12377 IEM_MC_END();
12378}
12379
12380
12381/** Opcode 0xdb 0xe2. */
12382FNIEMOP_DEF(iemOp_fnclex)
12383{
12384 IEMOP_MNEMONIC(fnclex, "fnclex");
12385 IEM_MC_BEGIN(0, 0);
12386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12389 IEM_MC_CLEAR_FSW_EX();
12390 IEM_MC_ADVANCE_RIP_AND_FINISH();
12391 IEM_MC_END();
12392}
12393
12394
12395/** Opcode 0xdb 0xe3. */
12396FNIEMOP_DEF(iemOp_fninit)
12397{
12398 IEMOP_MNEMONIC(fninit, "fninit");
12399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12400 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12401 iemCImpl_finit, false /*fCheckXcpts*/);
12402}
12403
12404
12405/** Opcode 0xdb 0xe4. */
12406FNIEMOP_DEF(iemOp_fnsetpm)
12407{
12408 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12409 IEM_MC_BEGIN(0, 0);
12410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12411 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12412 IEM_MC_ADVANCE_RIP_AND_FINISH();
12413 IEM_MC_END();
12414}
12415
12416
12417/** Opcode 0xdb 0xe5. */
12418FNIEMOP_DEF(iemOp_frstpm)
12419{
12420 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12421#if 0 /* #UDs on newer CPUs */
12422 IEM_MC_BEGIN(0, 0);
12423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12425 IEM_MC_ADVANCE_RIP_AND_FINISH();
12426 IEM_MC_END();
12427 return VINF_SUCCESS;
12428#else
12429 IEMOP_RAISE_INVALID_OPCODE_RET();
12430#endif
12431}
12432
12433
12434/** Opcode 0xdb 11/5. */
12435FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12436{
12437 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12438 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12439 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12440 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12441}
12442
12443
12444/** Opcode 0xdb 11/6. */
12445FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12446{
12447 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12448 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12449 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12450 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12451}
12452
12453
12454/**
12455 * @opcode 0xdb
12456 */
12457FNIEMOP_DEF(iemOp_EscF3)
12458{
12459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12460 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12461 if (IEM_IS_MODRM_REG_MODE(bRm))
12462 {
12463 switch (IEM_GET_MODRM_REG_8(bRm))
12464 {
12465 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12466 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12467 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12468 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnu_stN, bRm);
12469 case 4:
12470 switch (bRm)
12471 {
12472 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12473 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12474 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12475 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12476 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12477 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12478 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12479 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12481 }
12482 break;
12483 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12484 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12485 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12487 }
12488 }
12489 else
12490 {
12491 switch (IEM_GET_MODRM_REG_8(bRm))
12492 {
12493 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12494 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12495 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12496 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12497 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12498 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12499 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12500 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12502 }
12503 }
12504}
12505
12506
12507/**
12508 * Common worker for FPU instructions working on STn and ST0, and storing the
12509 * result in STn unless IE, DE or ZE was raised.
12510 *
12511 * @param bRm Mod R/M byte.
12512 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12513 */
12514FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12515{
12516 IEM_MC_BEGIN(0, 0);
12517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12518 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12519 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12520 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12522
12523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12525
12526 IEM_MC_PREPARE_FPU_USAGE();
12527 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12528 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12529 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12530 } IEM_MC_ELSE() {
12531 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12532 } IEM_MC_ENDIF();
12533 IEM_MC_ADVANCE_RIP_AND_FINISH();
12534
12535 IEM_MC_END();
12536}
12537
12538
12539/** Opcode 0xdc 11/0. */
12540FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12541{
12542 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12543 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12544}
12545
12546
12547/** Opcode 0xdc 11/1. */
12548FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12549{
12550 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12551 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12552}
12553
12554
12555/** Opcode 0xdc 11/4. */
12556FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12557{
12558 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12559 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12560}
12561
12562
12563/** Opcode 0xdc 11/5. */
12564FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12565{
12566 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12567 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12568}
12569
12570
12571/** Opcode 0xdc 11/6. */
12572FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12573{
12574 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12575 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12576}
12577
12578
12579/** Opcode 0xdc 11/7. */
12580FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12581{
12582 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12583 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12584}
12585
12586
12587/**
12588 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12589 * memory operand, and storing the result in ST0.
12590 *
12591 * @param bRm Mod R/M byte.
12592 * @param pfnImpl Pointer to the instruction implementation (assembly).
12593 */
12594FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12595{
12596 IEM_MC_BEGIN(0, 0);
12597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12598 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12599 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12600 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12601 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12602 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12603
12604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12608
12609 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12610 IEM_MC_PREPARE_FPU_USAGE();
12611 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12612 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12613 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12614 } IEM_MC_ELSE() {
12615 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12616 } IEM_MC_ENDIF();
12617 IEM_MC_ADVANCE_RIP_AND_FINISH();
12618
12619 IEM_MC_END();
12620}
12621
12622
12623/** Opcode 0xdc !11/0. */
12624FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12625{
12626 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12627 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12628}
12629
12630
12631/** Opcode 0xdc !11/1. */
12632FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12633{
12634 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12635 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12636}
12637
12638
12639/** Opcode 0xdc !11/2. */
12640FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12641{
12642 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12643
12644 IEM_MC_BEGIN(0, 0);
12645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12646 IEM_MC_LOCAL(uint16_t, u16Fsw);
12647 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12648 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12649 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12650 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12651
12652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654
12655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12657 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12658
12659 IEM_MC_PREPARE_FPU_USAGE();
12660 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12661 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12662 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12663 } IEM_MC_ELSE() {
12664 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12665 } IEM_MC_ENDIF();
12666 IEM_MC_ADVANCE_RIP_AND_FINISH();
12667
12668 IEM_MC_END();
12669}
12670
12671
12672/** Opcode 0xdc !11/3. */
12673FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12674{
12675 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12676
12677 IEM_MC_BEGIN(0, 0);
12678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12679 IEM_MC_LOCAL(uint16_t, u16Fsw);
12680 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12681 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12682 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12683 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12684
12685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12687
12688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12690 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12691
12692 IEM_MC_PREPARE_FPU_USAGE();
12693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12694 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12695 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12696 } IEM_MC_ELSE() {
12697 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12698 } IEM_MC_ENDIF();
12699 IEM_MC_ADVANCE_RIP_AND_FINISH();
12700
12701 IEM_MC_END();
12702}
12703
12704
12705/** Opcode 0xdc !11/4. */
12706FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12707{
12708 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12709 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12710}
12711
12712
12713/** Opcode 0xdc !11/5. */
12714FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12715{
12716 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12717 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12718}
12719
12720
12721/** Opcode 0xdc !11/6. */
12722FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12723{
12724 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12725 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12726}
12727
12728
12729/** Opcode 0xdc !11/7. */
12730FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12731{
12732 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12733 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12734}
12735
12736
12737/**
12738 * @opcode 0xdc
12739 */
12740FNIEMOP_DEF(iemOp_EscF4)
12741{
12742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12743 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12744 if (IEM_IS_MODRM_REG_MODE(bRm))
12745 {
12746 switch (IEM_GET_MODRM_REG_8(bRm))
12747 {
12748 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12749 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12750 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12751 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12752 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12753 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12754 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12755 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12757 }
12758 }
12759 else
12760 {
12761 switch (IEM_GET_MODRM_REG_8(bRm))
12762 {
12763 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12764 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12765 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12766 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12767 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12768 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12769 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12770 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12772 }
12773 }
12774}
12775
12776
12777/** Opcode 0xdd !11/0.
12778 * @sa iemOp_fld_m32r */
12779FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12780{
12781 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12782
12783 IEM_MC_BEGIN(0, 0);
12784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12785 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12786 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12787 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12788 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12789
12790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12794
12795 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12796 IEM_MC_PREPARE_FPU_USAGE();
12797 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12798 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12799 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12800 } IEM_MC_ELSE() {
12801 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12802 } IEM_MC_ENDIF();
12803 IEM_MC_ADVANCE_RIP_AND_FINISH();
12804
12805 IEM_MC_END();
12806}
12807
12808
12809/** Opcode 0xdd !11/0. */
12810FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12811{
12812 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12813 IEM_MC_BEGIN(0, 0);
12814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12816
12817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12818 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12819 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12820 IEM_MC_PREPARE_FPU_USAGE();
12821
12822 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12823 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12824 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12825
12826 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12827 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12828 IEM_MC_LOCAL(uint16_t, u16Fsw);
12829 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12830 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12831 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12832 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12833 } IEM_MC_ELSE() {
12834 IEM_MC_IF_FCW_IM() {
12835 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12836 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12837 } IEM_MC_ELSE() {
12838 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12839 } IEM_MC_ENDIF();
12840 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12841 } IEM_MC_ENDIF();
12842 IEM_MC_ADVANCE_RIP_AND_FINISH();
12843
12844 IEM_MC_END();
12845}
12846
12847
12848/** Opcode 0xdd !11/0. */
12849FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12850{
12851 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12852 IEM_MC_BEGIN(0, 0);
12853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12855
12856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12858 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12859 IEM_MC_PREPARE_FPU_USAGE();
12860
12861 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12862 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12863 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12864
12865 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12866 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12867 IEM_MC_LOCAL(uint16_t, u16Fsw);
12868 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12869 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12870 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12871 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12872 } IEM_MC_ELSE() {
12873 IEM_MC_IF_FCW_IM() {
12874 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12875 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12876 } IEM_MC_ELSE() {
12877 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12878 } IEM_MC_ENDIF();
12879 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12880 } IEM_MC_ENDIF();
12881 IEM_MC_ADVANCE_RIP_AND_FINISH();
12882
12883 IEM_MC_END();
12884}
12885
12886
12887
12888
12889/** Opcode 0xdd !11/0. */
12890FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12891{
12892 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12893 IEM_MC_BEGIN(0, 0);
12894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12896
12897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12898 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12899 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12900 IEM_MC_PREPARE_FPU_USAGE();
12901
12902 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12903 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12904 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12905
12906 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12907 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12908 IEM_MC_LOCAL(uint16_t, u16Fsw);
12909 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12910 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12911 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12912 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12913 } IEM_MC_ELSE() {
12914 IEM_MC_IF_FCW_IM() {
12915 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12916 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12917 } IEM_MC_ELSE() {
12918 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12919 } IEM_MC_ENDIF();
12920 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12921 } IEM_MC_ENDIF();
12922 IEM_MC_ADVANCE_RIP_AND_FINISH();
12923
12924 IEM_MC_END();
12925}
12926
12927
12928/** Opcode 0xdd !11/0. */
12929FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12930{
12931 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12932 IEM_MC_BEGIN(0, 0);
12933 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12935
12936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12938 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12939
12940 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12941 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12942 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12943 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12944 IEM_MC_END();
12945}
12946
12947
12948/** Opcode 0xdd !11/0. */
12949FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12950{
12951 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12952 IEM_MC_BEGIN(0, 0);
12953 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12955
12956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12958 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12959
12960 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12961 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12962 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12963 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12964 IEM_MC_END();
12965}
12966
12967/** Opcode 0xdd !11/0. */
12968FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12969{
12970 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12971
12972 IEM_MC_BEGIN(0, 0);
12973 IEM_MC_LOCAL(uint16_t, u16Tmp);
12974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12975
12976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12979
12980 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12981 IEM_MC_FETCH_FSW(u16Tmp);
12982 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12983 IEM_MC_ADVANCE_RIP_AND_FINISH();
12984
12985/** @todo Debug / drop a hint to the verifier that things may differ
12986 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12987 * NT4SP1. (X86_FSW_PE) */
12988 IEM_MC_END();
12989}
12990
12991
12992/** Opcode 0xdd 11/0. */
12993FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12994{
12995 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12996 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12997 unmodified. */
12998 IEM_MC_BEGIN(0, 0);
12999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13000
13001 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13002 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13003
13004 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13005 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13006 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13007
13008 IEM_MC_ADVANCE_RIP_AND_FINISH();
13009 IEM_MC_END();
13010}
13011
13012
13013/** Opcode 0xdd 11/1. */
13014FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13015{
13016 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
13017 IEM_MC_BEGIN(0, 0);
13018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13019 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13020 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13023
13024 IEM_MC_PREPARE_FPU_USAGE();
13025 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13026 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13027 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
13028 } IEM_MC_ELSE() {
13029 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
13030 } IEM_MC_ENDIF();
13031
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033 IEM_MC_END();
13034}
13035
13036
13037/** Opcode 0xdd 11/3. */
13038FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13039{
13040 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
13041 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13042}
13043
13044
13045/** Opcode 0xdd 11/4. */
13046FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13047{
13048 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
13049 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13050}
13051
13052
13053/**
13054 * @opcode 0xdd
13055 */
13056FNIEMOP_DEF(iemOp_EscF5)
13057{
13058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13059 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
13060 if (IEM_IS_MODRM_REG_MODE(bRm))
13061 {
13062 switch (IEM_GET_MODRM_REG_8(bRm))
13063 {
13064 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13065 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13066 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13067 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13068 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13069 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13070 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
13071 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13073 }
13074 }
13075 else
13076 {
13077 switch (IEM_GET_MODRM_REG_8(bRm))
13078 {
13079 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13080 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13081 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13082 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13083 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13084 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
13085 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13086 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13088 }
13089 }
13090}
13091
13092
13093/** Opcode 0xde 11/0. */
13094FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13095{
13096 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
13097 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13098}
13099
13100
13101/** Opcode 0xde 11/0. */
13102FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13103{
13104 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
13105 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13106}
13107
13108
13109/** Opcode 0xde 0xd9. */
13110FNIEMOP_DEF(iemOp_fcompp)
13111{
13112 IEMOP_MNEMONIC(fcompp, "fcompp");
13113 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
13114}
13115
13116
13117/** Opcode 0xde 11/4. */
13118FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13119{
13120 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
13121 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13122}
13123
13124
13125/** Opcode 0xde 11/5. */
13126FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13127{
13128 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
13129 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13130}
13131
13132
13133/** Opcode 0xde 11/6. */
13134FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13135{
13136 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
13137 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13138}
13139
13140
13141/** Opcode 0xde 11/7. */
13142FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13143{
13144 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
13145 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13146}
13147
13148
13149/**
13150 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13151 * the result in ST0.
13152 *
13153 * @param bRm Mod R/M byte.
13154 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13155 */
13156FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13157{
13158 IEM_MC_BEGIN(0, 0);
13159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13160 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13161 IEM_MC_LOCAL(int16_t, i16Val2);
13162 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13164 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13165
13166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168
13169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13171 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13172
13173 IEM_MC_PREPARE_FPU_USAGE();
13174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13175 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13176 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
13177 } IEM_MC_ELSE() {
13178 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
13179 } IEM_MC_ENDIF();
13180 IEM_MC_ADVANCE_RIP_AND_FINISH();
13181
13182 IEM_MC_END();
13183}
13184
13185
13186/** Opcode 0xde !11/0. */
13187FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13188{
13189 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
13190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13191}
13192
13193
13194/** Opcode 0xde !11/1. */
13195FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13196{
13197 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
13198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13199}
13200
13201
13202/** Opcode 0xde !11/2. */
13203FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13204{
13205 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
13206
13207 IEM_MC_BEGIN(0, 0);
13208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13209 IEM_MC_LOCAL(uint16_t, u16Fsw);
13210 IEM_MC_LOCAL(int16_t, i16Val2);
13211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13213 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13214
13215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13217
13218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13220 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13221
13222 IEM_MC_PREPARE_FPU_USAGE();
13223 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13224 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13225 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13226 } IEM_MC_ELSE() {
13227 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13228 } IEM_MC_ENDIF();
13229 IEM_MC_ADVANCE_RIP_AND_FINISH();
13230
13231 IEM_MC_END();
13232}
13233
13234
13235/** Opcode 0xde !11/3. */
13236FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13237{
13238 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
13239
13240 IEM_MC_BEGIN(0, 0);
13241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13242 IEM_MC_LOCAL(uint16_t, u16Fsw);
13243 IEM_MC_LOCAL(int16_t, i16Val2);
13244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13245 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13246 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13247
13248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13250
13251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13253 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13254
13255 IEM_MC_PREPARE_FPU_USAGE();
13256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13259 } IEM_MC_ELSE() {
13260 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13261 } IEM_MC_ENDIF();
13262 IEM_MC_ADVANCE_RIP_AND_FINISH();
13263
13264 IEM_MC_END();
13265}
13266
13267
13268/** Opcode 0xde !11/4. */
13269FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13270{
13271 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
13272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13273}
13274
13275
13276/** Opcode 0xde !11/5. */
13277FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13278{
13279 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
13280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13281}
13282
13283
13284/** Opcode 0xde !11/6. */
13285FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13286{
13287 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
13288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13289}
13290
13291
13292/** Opcode 0xde !11/7. */
13293FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13294{
13295 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
13296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13297}
13298
13299
13300/**
13301 * @opcode 0xde
13302 */
13303FNIEMOP_DEF(iemOp_EscF6)
13304{
13305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13306 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
13307 if (IEM_IS_MODRM_REG_MODE(bRm))
13308 {
13309 switch (IEM_GET_MODRM_REG_8(bRm))
13310 {
13311 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13312 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13313 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13314 case 3: if (bRm == 0xd9)
13315 return FNIEMOP_CALL(iemOp_fcompp);
13316 IEMOP_RAISE_INVALID_OPCODE_RET();
13317 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13318 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13319 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13320 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13322 }
13323 }
13324 else
13325 {
13326 switch (IEM_GET_MODRM_REG_8(bRm))
13327 {
13328 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13329 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13330 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13331 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13332 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13333 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13334 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13335 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13337 }
13338 }
13339}
13340
13341
13342/** Opcode 0xdf 11/0.
13343 * Undocument instruction, assumed to work like ffree + fincstp. */
13344FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13345{
13346 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
13347 IEM_MC_BEGIN(0, 0);
13348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13349
13350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13352
13353 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13354 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13355 IEM_MC_FPU_STACK_INC_TOP();
13356 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13357
13358 IEM_MC_ADVANCE_RIP_AND_FINISH();
13359 IEM_MC_END();
13360}
13361
13362
13363/** Opcode 0xdf 0xe0. */
13364FNIEMOP_DEF(iemOp_fnstsw_ax)
13365{
13366 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
13367 IEM_MC_BEGIN(0, 0);
13368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13369 IEM_MC_LOCAL(uint16_t, u16Tmp);
13370 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13371 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13372 IEM_MC_FETCH_FSW(u16Tmp);
13373 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13374 IEM_MC_ADVANCE_RIP_AND_FINISH();
13375 IEM_MC_END();
13376}
13377
13378
13379/** Opcode 0xdf 11/5. */
13380FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13381{
13382 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
13383 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13384 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13385 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13386}
13387
13388
13389/** Opcode 0xdf 11/6. */
13390FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13391{
13392 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
13393 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13394 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13395 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13396}
13397
13398
13399/** Opcode 0xdf !11/0. */
13400FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
13401{
13402 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
13403
13404 IEM_MC_BEGIN(0, 0);
13405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13406 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13407 IEM_MC_LOCAL(int16_t, i16Val);
13408 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13409 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
13410
13411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13413
13414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13416 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13417
13418 IEM_MC_PREPARE_FPU_USAGE();
13419 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13420 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
13421 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13422 } IEM_MC_ELSE() {
13423 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13424 } IEM_MC_ENDIF();
13425 IEM_MC_ADVANCE_RIP_AND_FINISH();
13426
13427 IEM_MC_END();
13428}
13429
13430
13431/** Opcode 0xdf !11/1. */
13432FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13433{
13434 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13435 IEM_MC_BEGIN(0, 0);
13436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13438
13439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13442 IEM_MC_PREPARE_FPU_USAGE();
13443
13444 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13445 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13446 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13447
13448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13449 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13450 IEM_MC_LOCAL(uint16_t, u16Fsw);
13451 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13452 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13453 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13454 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13455 } IEM_MC_ELSE() {
13456 IEM_MC_IF_FCW_IM() {
13457 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13458 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13459 } IEM_MC_ELSE() {
13460 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13461 } IEM_MC_ENDIF();
13462 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13463 } IEM_MC_ENDIF();
13464 IEM_MC_ADVANCE_RIP_AND_FINISH();
13465
13466 IEM_MC_END();
13467}
13468
13469
13470/** Opcode 0xdf !11/2. */
13471FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13472{
13473 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13474 IEM_MC_BEGIN(0, 0);
13475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13477
13478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13481 IEM_MC_PREPARE_FPU_USAGE();
13482
13483 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13484 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13485 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13486
13487 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13488 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13489 IEM_MC_LOCAL(uint16_t, u16Fsw);
13490 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13491 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13492 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13493 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13494 } IEM_MC_ELSE() {
13495 IEM_MC_IF_FCW_IM() {
13496 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13497 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13498 } IEM_MC_ELSE() {
13499 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13500 } IEM_MC_ENDIF();
13501 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13502 } IEM_MC_ENDIF();
13503 IEM_MC_ADVANCE_RIP_AND_FINISH();
13504
13505 IEM_MC_END();
13506}
13507
13508
13509/** Opcode 0xdf !11/3. */
13510FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13511{
13512 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13513 IEM_MC_BEGIN(0, 0);
13514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13516
13517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13520 IEM_MC_PREPARE_FPU_USAGE();
13521
13522 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13523 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13524 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13525
13526 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13528 IEM_MC_LOCAL(uint16_t, u16Fsw);
13529 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13530 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13531 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13532 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13533 } IEM_MC_ELSE() {
13534 IEM_MC_IF_FCW_IM() {
13535 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13536 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13537 } IEM_MC_ELSE() {
13538 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13539 } IEM_MC_ENDIF();
13540 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13541 } IEM_MC_ENDIF();
13542 IEM_MC_ADVANCE_RIP_AND_FINISH();
13543
13544 IEM_MC_END();
13545}
13546
13547
13548/** Opcode 0xdf !11/4. */
13549FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13550{
13551 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13552
13553 IEM_MC_BEGIN(0, 0);
13554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13555 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13556 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13557 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13558 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13559
13560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13562
13563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13564 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13565 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13566
13567 IEM_MC_PREPARE_FPU_USAGE();
13568 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13569 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13570 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13571 } IEM_MC_ELSE() {
13572 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13573 } IEM_MC_ENDIF();
13574 IEM_MC_ADVANCE_RIP_AND_FINISH();
13575
13576 IEM_MC_END();
13577}
13578
13579
13580/** Opcode 0xdf !11/5. */
13581FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13582{
13583 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13584
13585 IEM_MC_BEGIN(0, 0);
13586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13587 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13588 IEM_MC_LOCAL(int64_t, i64Val);
13589 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13590 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13591
13592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13594
13595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13597 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13598
13599 IEM_MC_PREPARE_FPU_USAGE();
13600 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13601 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13602 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13603 } IEM_MC_ELSE() {
13604 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13605 } IEM_MC_ENDIF();
13606 IEM_MC_ADVANCE_RIP_AND_FINISH();
13607
13608 IEM_MC_END();
13609}
13610
13611
13612/** Opcode 0xdf !11/6. */
13613FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13614{
13615 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13616 IEM_MC_BEGIN(0, 0);
13617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13619
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13623 IEM_MC_PREPARE_FPU_USAGE();
13624
13625 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13626 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13627 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13628
13629 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13630 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13631 IEM_MC_LOCAL(uint16_t, u16Fsw);
13632 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13634 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13635 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13636 } IEM_MC_ELSE() {
13637 IEM_MC_IF_FCW_IM() {
13638 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13639 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13640 } IEM_MC_ELSE() {
13641 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13642 } IEM_MC_ENDIF();
13643 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13644 } IEM_MC_ENDIF();
13645 IEM_MC_ADVANCE_RIP_AND_FINISH();
13646
13647 IEM_MC_END();
13648}
13649
13650
13651/** Opcode 0xdf !11/7. */
13652FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13653{
13654 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13655 IEM_MC_BEGIN(0, 0);
13656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13658
13659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13662 IEM_MC_PREPARE_FPU_USAGE();
13663
13664 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13665 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13666 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13667
13668 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13669 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13670 IEM_MC_LOCAL(uint16_t, u16Fsw);
13671 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13672 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13673 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13674 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13675 } IEM_MC_ELSE() {
13676 IEM_MC_IF_FCW_IM() {
13677 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13678 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13679 } IEM_MC_ELSE() {
13680 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13681 } IEM_MC_ENDIF();
13682 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13683 } IEM_MC_ENDIF();
13684 IEM_MC_ADVANCE_RIP_AND_FINISH();
13685
13686 IEM_MC_END();
13687}
13688
13689
13690/**
13691 * @opcode 0xdf
13692 */
13693FNIEMOP_DEF(iemOp_EscF7)
13694{
13695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13696 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13697 if (IEM_IS_MODRM_REG_MODE(bRm))
13698 {
13699 switch (IEM_GET_MODRM_REG_8(bRm))
13700 {
13701 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13702 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13703 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13704 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13705 case 4: if (bRm == 0xe0)
13706 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13707 IEMOP_RAISE_INVALID_OPCODE_RET();
13708 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13709 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13710 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13712 }
13713 }
13714 else
13715 {
13716 switch (IEM_GET_MODRM_REG_8(bRm))
13717 {
13718 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13719 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13720 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13721 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13722 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13723 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13724 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13725 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13727 }
13728 }
13729}
13730
13731
13732/**
13733 * @opcode 0xe0
13734 * @opfltest zf
13735 */
13736FNIEMOP_DEF(iemOp_loopne_Jb)
13737{
13738 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13739 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13740 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13741
13742 switch (pVCpu->iem.s.enmEffAddrMode)
13743 {
13744 case IEMMODE_16BIT:
13745 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13747 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13748 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13749 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13750 } IEM_MC_ELSE() {
13751 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13752 IEM_MC_ADVANCE_RIP_AND_FINISH();
13753 } IEM_MC_ENDIF();
13754 IEM_MC_END();
13755 break;
13756
13757 case IEMMODE_32BIT:
13758 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13760 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13761 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13762 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13763 } IEM_MC_ELSE() {
13764 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13765 IEM_MC_ADVANCE_RIP_AND_FINISH();
13766 } IEM_MC_ENDIF();
13767 IEM_MC_END();
13768 break;
13769
13770 case IEMMODE_64BIT:
13771 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13773 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13774 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13775 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13776 } IEM_MC_ELSE() {
13777 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13778 IEM_MC_ADVANCE_RIP_AND_FINISH();
13779 } IEM_MC_ENDIF();
13780 IEM_MC_END();
13781 break;
13782
13783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13784 }
13785}
13786
13787
13788/**
13789 * @opcode 0xe1
13790 * @opfltest zf
13791 */
13792FNIEMOP_DEF(iemOp_loope_Jb)
13793{
13794 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13795 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13797
13798 switch (pVCpu->iem.s.enmEffAddrMode)
13799 {
13800 case IEMMODE_16BIT:
13801 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13803 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13804 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13806 } IEM_MC_ELSE() {
13807 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13808 IEM_MC_ADVANCE_RIP_AND_FINISH();
13809 } IEM_MC_ENDIF();
13810 IEM_MC_END();
13811 break;
13812
13813 case IEMMODE_32BIT:
13814 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13816 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13817 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13818 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13819 } IEM_MC_ELSE() {
13820 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13821 IEM_MC_ADVANCE_RIP_AND_FINISH();
13822 } IEM_MC_ENDIF();
13823 IEM_MC_END();
13824 break;
13825
13826 case IEMMODE_64BIT:
13827 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13829 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13830 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13831 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13832 } IEM_MC_ELSE() {
13833 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13834 IEM_MC_ADVANCE_RIP_AND_FINISH();
13835 } IEM_MC_ENDIF();
13836 IEM_MC_END();
13837 break;
13838
13839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13840 }
13841}
13842
13843
13844/**
13845 * @opcode 0xe2
13846 */
13847FNIEMOP_DEF(iemOp_loop_Jb)
13848{
13849 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13850 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13852
13853 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13854 * using the 32-bit operand size override. How can that be restarted? See
13855 * weird pseudo code in intel manual. */
13856
13857 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13858 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13859 * the loop causes guest crashes, but when logging it's nice to skip a few million
13860 * lines of useless output. */
13861#if defined(LOG_ENABLED)
13862 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13863 switch (pVCpu->iem.s.enmEffAddrMode)
13864 {
13865 case IEMMODE_16BIT:
13866 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13868 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13869 IEM_MC_ADVANCE_RIP_AND_FINISH();
13870 IEM_MC_END();
13871 break;
13872
13873 case IEMMODE_32BIT:
13874 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13876 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13877 IEM_MC_ADVANCE_RIP_AND_FINISH();
13878 IEM_MC_END();
13879 break;
13880
13881 case IEMMODE_64BIT:
13882 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13884 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13885 IEM_MC_ADVANCE_RIP_AND_FINISH();
13886 IEM_MC_END();
13887 break;
13888
13889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13890 }
13891#endif
13892
13893 switch (pVCpu->iem.s.enmEffAddrMode)
13894 {
13895 case IEMMODE_16BIT:
13896 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13898 IEM_MC_IF_CX_IS_NOT_ONE() {
13899 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13900 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13901 } IEM_MC_ELSE() {
13902 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13903 IEM_MC_ADVANCE_RIP_AND_FINISH();
13904 } IEM_MC_ENDIF();
13905 IEM_MC_END();
13906 break;
13907
13908 case IEMMODE_32BIT:
13909 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13911 IEM_MC_IF_ECX_IS_NOT_ONE() {
13912 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13913 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13914 } IEM_MC_ELSE() {
13915 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13916 IEM_MC_ADVANCE_RIP_AND_FINISH();
13917 } IEM_MC_ENDIF();
13918 IEM_MC_END();
13919 break;
13920
13921 case IEMMODE_64BIT:
13922 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13924 IEM_MC_IF_RCX_IS_NOT_ONE() {
13925 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13926 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13927 } IEM_MC_ELSE() {
13928 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13929 IEM_MC_ADVANCE_RIP_AND_FINISH();
13930 } IEM_MC_ENDIF();
13931 IEM_MC_END();
13932 break;
13933
13934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13935 }
13936}
13937
13938
13939/**
13940 * @opcode 0xe3
13941 */
13942FNIEMOP_DEF(iemOp_jecxz_Jb)
13943{
13944 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13945 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13946 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13947
13948 switch (pVCpu->iem.s.enmEffAddrMode)
13949 {
13950 case IEMMODE_16BIT:
13951 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13953 IEM_MC_IF_CX_IS_NZ() {
13954 IEM_MC_ADVANCE_RIP_AND_FINISH();
13955 } IEM_MC_ELSE() {
13956 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13957 } IEM_MC_ENDIF();
13958 IEM_MC_END();
13959 break;
13960
13961 case IEMMODE_32BIT:
13962 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13964 IEM_MC_IF_ECX_IS_NZ() {
13965 IEM_MC_ADVANCE_RIP_AND_FINISH();
13966 } IEM_MC_ELSE() {
13967 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13968 } IEM_MC_ENDIF();
13969 IEM_MC_END();
13970 break;
13971
13972 case IEMMODE_64BIT:
13973 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13975 IEM_MC_IF_RCX_IS_NZ() {
13976 IEM_MC_ADVANCE_RIP_AND_FINISH();
13977 } IEM_MC_ELSE() {
13978 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13979 } IEM_MC_ENDIF();
13980 IEM_MC_END();
13981 break;
13982
13983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13984 }
13985}
13986
13987
13988/**
13989 * @opcode 0xe4
13990 * @opfltest iopl
13991 */
13992FNIEMOP_DEF(iemOp_in_AL_Ib)
13993{
13994 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13995 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13997 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13998 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13999}
14000
14001
14002/**
14003 * @opcode 0xe5
14004 * @opfltest iopl
14005 */
14006FNIEMOP_DEF(iemOp_in_eAX_Ib)
14007{
14008 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
14009 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14011 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14012 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14013 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14014}
14015
14016
14017/**
14018 * @opcode 0xe6
14019 * @opfltest iopl
14020 */
14021FNIEMOP_DEF(iemOp_out_Ib_AL)
14022{
14023 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
14024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14026 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14027 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14028}
14029
14030
14031/**
14032 * @opcode 0xe7
14033 * @opfltest iopl
14034 */
14035FNIEMOP_DEF(iemOp_out_Ib_eAX)
14036{
14037 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
14038 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14040 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14041 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14042 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14043}
14044
14045
14046/**
14047 * @opcode 0xe8
14048 */
14049FNIEMOP_DEF(iemOp_call_Jv)
14050{
14051 IEMOP_MNEMONIC(call_Jv, "call Jv");
14052 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14053 switch (pVCpu->iem.s.enmEffOpSize)
14054 {
14055 case IEMMODE_16BIT:
14056 {
14057 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14058 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
14059 iemCImpl_call_rel_16, (int16_t)u16Imm);
14060 }
14061
14062 case IEMMODE_32BIT:
14063 {
14064 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14065 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
14066 iemCImpl_call_rel_32, (int32_t)u32Imm);
14067 }
14068
14069 case IEMMODE_64BIT:
14070 {
14071 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14072 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
14073 iemCImpl_call_rel_64, u64Imm);
14074 }
14075
14076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14077 }
14078}
14079
14080
14081/**
14082 * @opcode 0xe9
14083 */
14084FNIEMOP_DEF(iemOp_jmp_Jv)
14085{
14086 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
14087 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14088 switch (pVCpu->iem.s.enmEffOpSize)
14089 {
14090 case IEMMODE_16BIT:
14091 IEM_MC_BEGIN(0, 0);
14092 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14094 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
14095 IEM_MC_END();
14096 break;
14097
14098 case IEMMODE_64BIT:
14099 case IEMMODE_32BIT:
14100 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14101 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14103 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
14104 IEM_MC_END();
14105 break;
14106
14107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14108 }
14109}
14110
14111
14112/**
14113 * @opcode 0xea
14114 */
14115FNIEMOP_DEF(iemOp_jmp_Ap)
14116{
14117 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
14118 IEMOP_HLP_NO_64BIT();
14119
14120 /* Decode the far pointer address and pass it on to the far call C implementation. */
14121 uint32_t off32Seg;
14122 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
14123 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
14124 else
14125 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
14126 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
14127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14128 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
14129 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
14130 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
14131 /** @todo make task-switches, ring-switches, ++ return non-zero status */
14132}
14133
14134
14135/**
14136 * @opcode 0xeb
14137 */
14138FNIEMOP_DEF(iemOp_jmp_Jb)
14139{
14140 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
14141 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14142 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14143
14144 IEM_MC_BEGIN(0, 0);
14145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14146 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
14147 IEM_MC_END();
14148}
14149
14150
14151/**
14152 * @opcode 0xec
14153 * @opfltest iopl
14154 */
14155FNIEMOP_DEF(iemOp_in_AL_DX)
14156{
14157 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
14158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
14160 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14161 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
14162}
14163
14164
14165/**
14166 * @opcode 0xed
14167 * @opfltest iopl
14168 */
14169FNIEMOP_DEF(iemOp_in_eAX_DX)
14170{
14171 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
14172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14173 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
14174 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14175 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14176 pVCpu->iem.s.enmEffAddrMode);
14177}
14178
14179
14180/**
14181 * @opcode 0xee
14182 * @opfltest iopl
14183 */
14184FNIEMOP_DEF(iemOp_out_DX_AL)
14185{
14186 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
14187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14188 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14189 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
14190}
14191
14192
14193/**
14194 * @opcode 0xef
14195 * @opfltest iopl
14196 */
14197FNIEMOP_DEF(iemOp_out_DX_eAX)
14198{
14199 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
14200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14201 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14202 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14203 pVCpu->iem.s.enmEffAddrMode);
14204}
14205
14206
14207/**
14208 * @opcode 0xf0
14209 */
14210FNIEMOP_DEF(iemOp_lock)
14211{
14212 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
14213 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
14214
14215 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14216 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14217}
14218
14219
14220/**
14221 * @opcode 0xf1
14222 */
14223FNIEMOP_DEF(iemOp_int1)
14224{
14225 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
14226 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
14227 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
14228 * LOADALL memo. Needs some testing. */
14229 IEMOP_HLP_MIN_386();
14230 /** @todo testcase! */
14231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
14232 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
14233 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
14234}
14235
14236
14237/**
14238 * @opcode 0xf2
14239 */
14240FNIEMOP_DEF(iemOp_repne)
14241{
14242 /* This overrides any previous REPE prefix. */
14243 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
14244 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
14245 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
14246
14247 /* For the 4 entry opcode tables, REPNZ overrides any previous
14248 REPZ and operand size prefixes. */
14249 pVCpu->iem.s.idxPrefix = 3;
14250
14251 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14252 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14253}
14254
14255
14256/**
14257 * @opcode 0xf3
14258 */
14259FNIEMOP_DEF(iemOp_repe)
14260{
14261 /* This overrides any previous REPNE prefix. */
14262 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
14263 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
14264 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
14265
14266 /* For the 4 entry opcode tables, REPNZ overrides any previous
14267 REPNZ and operand size prefixes. */
14268 pVCpu->iem.s.idxPrefix = 2;
14269
14270 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14271 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14272}
14273
14274
14275/**
14276 * @opcode 0xf4
14277 */
14278FNIEMOP_DEF(iemOp_hlt)
14279{
14280 IEMOP_MNEMONIC(hlt, "hlt");
14281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14282 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
14283}
14284
14285
14286/**
14287 * @opcode 0xf5
14288 * @opflmodify cf
14289 */
14290FNIEMOP_DEF(iemOp_cmc)
14291{
14292 IEMOP_MNEMONIC(cmc, "cmc");
14293 IEM_MC_BEGIN(0, 0);
14294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14295 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14296 IEM_MC_ADVANCE_RIP_AND_FINISH();
14297 IEM_MC_END();
14298}
14299
14300
14301/**
14302 * Body for of 'inc/dec/not/neg Eb'.
14303 */
14304#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
14305 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
14306 { \
14307 /* register access */ \
14308 IEM_MC_BEGIN(0, 0); \
14309 IEMOP_HLP_DONE_DECODING(); \
14310 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14311 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14312 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
14313 IEM_MC_REF_EFLAGS(pEFlags); \
14314 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14316 IEM_MC_END(); \
14317 } \
14318 else \
14319 { \
14320 /* memory access. */ \
14321 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14322 { \
14323 IEM_MC_BEGIN(0, 0); \
14324 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14327 \
14328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14329 IEMOP_HLP_DONE_DECODING(); \
14330 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14332 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14333 \
14334 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14335 IEM_MC_COMMIT_EFLAGS(EFlags); \
14336 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14337 IEM_MC_END(); \
14338 } \
14339 else \
14340 { \
14341 IEM_MC_BEGIN(0, 0); \
14342 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14344 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14345 \
14346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14347 IEMOP_HLP_DONE_DECODING(); \
14348 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14350 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
14351 \
14352 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14353 IEM_MC_COMMIT_EFLAGS(EFlags); \
14354 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14355 IEM_MC_END(); \
14356 } \
14357 } \
14358 (void)0
14359
14360
14361/**
14362 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
14363 */
14364#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
14365 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14366 { \
14367 /* \
14368 * Register target \
14369 */ \
14370 switch (pVCpu->iem.s.enmEffOpSize) \
14371 { \
14372 case IEMMODE_16BIT: \
14373 IEM_MC_BEGIN(0, 0); \
14374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14375 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14376 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14377 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14378 IEM_MC_REF_EFLAGS(pEFlags); \
14379 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14381 IEM_MC_END(); \
14382 break; \
14383 \
14384 case IEMMODE_32BIT: \
14385 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14387 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14388 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14389 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14390 IEM_MC_REF_EFLAGS(pEFlags); \
14391 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14392 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
14393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14394 IEM_MC_END(); \
14395 break; \
14396 \
14397 case IEMMODE_64BIT: \
14398 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14400 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14401 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14402 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14403 IEM_MC_REF_EFLAGS(pEFlags); \
14404 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14405 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14406 IEM_MC_END(); \
14407 break; \
14408 \
14409 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14410 } \
14411 } \
14412 else \
14413 { \
14414 /* \
14415 * Memory target. \
14416 */ \
14417 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14418 { \
14419 switch (pVCpu->iem.s.enmEffOpSize) \
14420 { \
14421 case IEMMODE_16BIT: \
14422 IEM_MC_BEGIN(0, 0); \
14423 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14425 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14426 \
14427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14428 IEMOP_HLP_DONE_DECODING(); \
14429 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14430 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14431 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14432 \
14433 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14434 IEM_MC_COMMIT_EFLAGS(EFlags); \
14435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14436 IEM_MC_END(); \
14437 break; \
14438 \
14439 case IEMMODE_32BIT: \
14440 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14441 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14443 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14444 \
14445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14446 IEMOP_HLP_DONE_DECODING(); \
14447 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14449 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14450 \
14451 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14452 IEM_MC_COMMIT_EFLAGS(EFlags); \
14453 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14454 IEM_MC_END(); \
14455 break; \
14456 \
14457 case IEMMODE_64BIT: \
14458 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14459 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14461 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14462 \
14463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14464 IEMOP_HLP_DONE_DECODING(); \
14465 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14466 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14467 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14468 \
14469 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14470 IEM_MC_COMMIT_EFLAGS(EFlags); \
14471 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14472 IEM_MC_END(); \
14473 break; \
14474 \
14475 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14476 } \
14477 } \
14478 else \
14479 { \
14480 (void)0
14481
14482#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14483 switch (pVCpu->iem.s.enmEffOpSize) \
14484 { \
14485 case IEMMODE_16BIT: \
14486 IEM_MC_BEGIN(0, 0); \
14487 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14489 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14490 \
14491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14492 IEMOP_HLP_DONE_DECODING(); \
14493 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14495 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14496 \
14497 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14498 IEM_MC_COMMIT_EFLAGS(EFlags); \
14499 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14500 IEM_MC_END(); \
14501 break; \
14502 \
14503 case IEMMODE_32BIT: \
14504 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14505 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14507 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14508 \
14509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14510 IEMOP_HLP_DONE_DECODING(); \
14511 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14512 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14513 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14514 \
14515 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14516 IEM_MC_COMMIT_EFLAGS(EFlags); \
14517 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14518 IEM_MC_END(); \
14519 break; \
14520 \
14521 case IEMMODE_64BIT: \
14522 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14523 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14525 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14526 \
14527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14528 IEMOP_HLP_DONE_DECODING(); \
14529 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14530 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14531 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14532 \
14533 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14534 IEM_MC_COMMIT_EFLAGS(EFlags); \
14535 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14536 IEM_MC_END(); \
14537 break; \
14538 \
14539 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14540 } \
14541 } \
14542 } \
14543 (void)0
14544
14545
14546/**
14547 * @opmaps grp3_f6
14548 * @opcode /0
14549 * @opflclass logical
14550 * @todo also /1
14551 */
14552FNIEMOP_DEF_1(iemOp_grp3_test_Eb_Ib, uint8_t, bRm)
14553{
14554 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14555 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14556 IEMOP_BODY_BINARY_Eb_Ib_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14557}
14558
14559
14560/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14561#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14562 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14563 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14564 { \
14565 /* register access */ \
14566 IEM_MC_BEGIN(0, 0); \
14567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14568 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14569 IEM_MC_ARG(uint8_t, u8Value, 1); \
14570 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14571 \
14572 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14573 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14574 IEM_MC_REF_EFLAGS(pEFlags); \
14575 IEM_MC_CALL_AIMPL_3(int32_t, rc, pfnU8, pu16AX, u8Value, pEFlags); \
14576 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14577 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14578 } IEM_MC_ELSE() { \
14579 IEM_MC_RAISE_DIVIDE_ERROR(); \
14580 } IEM_MC_ENDIF(); \
14581 \
14582 IEM_MC_END(); \
14583 } \
14584 else \
14585 { \
14586 /* memory access. */ \
14587 IEM_MC_BEGIN(0, 0); \
14588 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14589 IEM_MC_ARG(uint8_t, u8Value, 1); \
14590 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14592 \
14593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14595 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14596 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14597 IEM_MC_REF_EFLAGS(pEFlags); \
14598 IEM_MC_CALL_AIMPL_3(int32_t, rc, pfnU8, pu16AX, u8Value, pEFlags); \
14599 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14601 } IEM_MC_ELSE() { \
14602 IEM_MC_RAISE_DIVIDE_ERROR(); \
14603 } IEM_MC_ENDIF(); \
14604 \
14605 IEM_MC_END(); \
14606 } (void)0
14607
14608
14609/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14610#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14611 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14612 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14613 { \
14614 /* register access */ \
14615 switch (pVCpu->iem.s.enmEffOpSize) \
14616 { \
14617 case IEMMODE_16BIT: \
14618 IEM_MC_BEGIN(0, 0); \
14619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14620 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14621 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14622 IEM_MC_ARG(uint16_t, u16Value, 2); \
14623 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14624 \
14625 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14626 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14627 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14628 IEM_MC_REF_EFLAGS(pEFlags); \
14629 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14630 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14631 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14632 } IEM_MC_ELSE() { \
14633 IEM_MC_RAISE_DIVIDE_ERROR(); \
14634 } IEM_MC_ENDIF(); \
14635 \
14636 IEM_MC_END(); \
14637 break; \
14638 \
14639 case IEMMODE_32BIT: \
14640 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14642 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14643 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14644 IEM_MC_ARG(uint32_t, u32Value, 2); \
14645 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14646 \
14647 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14648 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14649 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14650 IEM_MC_REF_EFLAGS(pEFlags); \
14651 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14652 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14653 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14654 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14655 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14656 } IEM_MC_ELSE() { \
14657 IEM_MC_RAISE_DIVIDE_ERROR(); \
14658 } IEM_MC_ENDIF(); \
14659 \
14660 IEM_MC_END(); \
14661 break; \
14662 \
14663 case IEMMODE_64BIT: \
14664 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14666 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14667 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14668 IEM_MC_ARG(uint64_t, u64Value, 2); \
14669 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14670 \
14671 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14672 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14673 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14674 IEM_MC_REF_EFLAGS(pEFlags); \
14675 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14676 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14677 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14678 } IEM_MC_ELSE() { \
14679 IEM_MC_RAISE_DIVIDE_ERROR(); \
14680 } IEM_MC_ENDIF(); \
14681 \
14682 IEM_MC_END(); \
14683 break; \
14684 \
14685 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14686 } \
14687 } \
14688 else \
14689 { \
14690 /* memory access. */ \
14691 switch (pVCpu->iem.s.enmEffOpSize) \
14692 { \
14693 case IEMMODE_16BIT: \
14694 IEM_MC_BEGIN(0, 0); \
14695 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14696 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14697 IEM_MC_ARG(uint16_t, u16Value, 2); \
14698 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14700 \
14701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14703 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14704 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14705 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14706 IEM_MC_REF_EFLAGS(pEFlags); \
14707 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14708 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14709 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14710 } IEM_MC_ELSE() { \
14711 IEM_MC_RAISE_DIVIDE_ERROR(); \
14712 } IEM_MC_ENDIF(); \
14713 \
14714 IEM_MC_END(); \
14715 break; \
14716 \
14717 case IEMMODE_32BIT: \
14718 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14719 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14720 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14721 IEM_MC_ARG(uint32_t, u32Value, 2); \
14722 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14724 \
14725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14727 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14728 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14729 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14730 IEM_MC_REF_EFLAGS(pEFlags); \
14731 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14732 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14733 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14734 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14736 } IEM_MC_ELSE() { \
14737 IEM_MC_RAISE_DIVIDE_ERROR(); \
14738 } IEM_MC_ENDIF(); \
14739 \
14740 IEM_MC_END(); \
14741 break; \
14742 \
14743 case IEMMODE_64BIT: \
14744 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14745 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14746 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14747 IEM_MC_ARG(uint64_t, u64Value, 2); \
14748 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14750 \
14751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14753 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14754 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14755 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14756 IEM_MC_REF_EFLAGS(pEFlags); \
14757 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14758 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14760 } IEM_MC_ELSE() { \
14761 IEM_MC_RAISE_DIVIDE_ERROR(); \
14762 } IEM_MC_ENDIF(); \
14763 \
14764 IEM_MC_END(); \
14765 break; \
14766 \
14767 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14768 } \
14769 } (void)0
14770
14771
14772/**
14773 * @opmaps grp3_f6
14774 * @opcode /2
14775 * @opflclass unchanged
14776 */
14777FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14778{
14779/** @todo does not modify EFLAGS. */
14780 IEMOP_MNEMONIC(not_Eb, "not Eb");
14781 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14782}
14783
14784
14785/**
14786 * @opmaps grp3_f6
14787 * @opcode /3
14788 * @opflclass arithmetic
14789 */
14790FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14791{
14792 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14793 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14794}
14795
14796
14797/**
14798 * @opcode 0xf6
14799 */
14800FNIEMOP_DEF(iemOp_Grp3_Eb)
14801{
14802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14803 switch (IEM_GET_MODRM_REG_8(bRm))
14804 {
14805 case 0:
14806 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14807 case 1:
14808 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14809 case 2:
14810 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14811 case 3:
14812 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14813 case 4:
14814 {
14815 /**
14816 * @opdone
14817 * @opmaps grp3_f6
14818 * @opcode /4
14819 * @opflclass multiply
14820 */
14821 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14822 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14823 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14824 break;
14825 }
14826 case 5:
14827 {
14828 /**
14829 * @opdone
14830 * @opmaps grp3_f6
14831 * @opcode /5
14832 * @opflclass multiply
14833 */
14834 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14835 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14836 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14837 break;
14838 }
14839 case 6:
14840 {
14841 /**
14842 * @opdone
14843 * @opmaps grp3_f6
14844 * @opcode /6
14845 * @opflclass division
14846 */
14847 IEMOP_MNEMONIC(div_Eb, "div Eb");
14848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14849 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14850 break;
14851 }
14852 case 7:
14853 {
14854 /**
14855 * @opdone
14856 * @opmaps grp3_f6
14857 * @opcode /7
14858 * @opflclass division
14859 */
14860 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14862 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14863 break;
14864 }
14865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14866 }
14867}
14868
14869
14870/**
14871 * @opmaps grp3_f7
14872 * @opcode /0
14873 * @opflclass logical
14874 */
14875FNIEMOP_DEF_1(iemOp_grp3_test_Ev_Iz, uint8_t, bRm)
14876{
14877 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14878 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14879 IEMOP_BODY_BINARY_Ev_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14880}
14881
14882
14883/**
14884 * @opmaps grp3_f7
14885 * @opcode /2
14886 * @opflclass unchanged
14887 */
14888FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14889{
14890/** @todo does not modify EFLAGS */
14891 IEMOP_MNEMONIC(not_Ev, "not Ev");
14892 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14893 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14894}
14895
14896
14897/**
14898 * @opmaps grp3_f7
14899 * @opcode /3
14900 * @opflclass arithmetic
14901 */
14902FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14903{
14904 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14905 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14906 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14907}
14908
14909
14910/**
14911 * @opmaps grp3_f7
14912 * @opcode /4
14913 * @opflclass multiply
14914 */
14915FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14916{
14917 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14919 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14920}
14921
14922
14923/**
14924 * @opmaps grp3_f7
14925 * @opcode /5
14926 * @opflclass multiply
14927 */
14928FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14932 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14933}
14934
14935
14936/**
14937 * @opmaps grp3_f7
14938 * @opcode /6
14939 * @opflclass division
14940 */
14941FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14942{
14943 IEMOP_MNEMONIC(div_Ev, "div Ev");
14944 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14945 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14946}
14947
14948
14949/**
14950 * @opmaps grp3_f7
14951 * @opcode /7
14952 * @opflclass division
14953 */
14954FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14955{
14956 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14958 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14959}
14960
14961
14962/**
14963 * @opcode 0xf7
14964 */
14965FNIEMOP_DEF(iemOp_Grp3_Ev)
14966{
14967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14968 switch (IEM_GET_MODRM_REG_8(bRm))
14969 {
14970 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14971 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14972 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14973 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14974 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14975 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14976 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14977 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14979 }
14980}
14981
14982
14983/**
14984 * @opcode 0xf8
14985 * @opflmodify cf
14986 * @opflclear cf
14987 */
14988FNIEMOP_DEF(iemOp_clc)
14989{
14990 IEMOP_MNEMONIC(clc, "clc");
14991 IEM_MC_BEGIN(0, 0);
14992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14993 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14994 IEM_MC_ADVANCE_RIP_AND_FINISH();
14995 IEM_MC_END();
14996}
14997
14998
14999/**
15000 * @opcode 0xf9
15001 * @opflmodify cf
15002 * @opflset cf
15003 */
15004FNIEMOP_DEF(iemOp_stc)
15005{
15006 IEMOP_MNEMONIC(stc, "stc");
15007 IEM_MC_BEGIN(0, 0);
15008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15009 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
15010 IEM_MC_ADVANCE_RIP_AND_FINISH();
15011 IEM_MC_END();
15012}
15013
15014
15015/**
15016 * @opcode 0xfa
15017 * @opfltest iopl,vm
15018 * @opflmodify if,vif
15019 */
15020FNIEMOP_DEF(iemOp_cli)
15021{
15022 IEMOP_MNEMONIC(cli, "cli");
15023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15024 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
15025}
15026
15027
15028/**
15029 * @opcode 0xfb
15030 * @opfltest iopl,vm
15031 * @opflmodify if,vif
15032 */
15033FNIEMOP_DEF(iemOp_sti)
15034{
15035 IEMOP_MNEMONIC(sti, "sti");
15036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15037 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
15038 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
15039}
15040
15041
15042/**
15043 * @opcode 0xfc
15044 * @opflmodify df
15045 * @opflclear df
15046 */
15047FNIEMOP_DEF(iemOp_cld)
15048{
15049 IEMOP_MNEMONIC(cld, "cld");
15050 IEM_MC_BEGIN(0, 0);
15051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15052 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15053 IEM_MC_ADVANCE_RIP_AND_FINISH();
15054 IEM_MC_END();
15055}
15056
15057
15058/**
15059 * @opcode 0xfd
15060 * @opflmodify df
15061 * @opflset df
15062 */
15063FNIEMOP_DEF(iemOp_std)
15064{
15065 IEMOP_MNEMONIC(std, "std");
15066 IEM_MC_BEGIN(0, 0);
15067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15068 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15069 IEM_MC_ADVANCE_RIP_AND_FINISH();
15070 IEM_MC_END();
15071}
15072
15073
15074/**
15075 * @opmaps grp4
15076 * @opcode /0
15077 * @opflclass incdec
15078 */
15079FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
15080{
15081 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
15082 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
15083}
15084
15085
15086/**
15087 * @opmaps grp4
15088 * @opcode /1
15089 * @opflclass incdec
15090 */
15091FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
15092{
15093 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
15094 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
15095}
15096
15097
15098/**
15099 * @opcode 0xfe
15100 */
15101FNIEMOP_DEF(iemOp_Grp4)
15102{
15103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15104 switch (IEM_GET_MODRM_REG_8(bRm))
15105 {
15106 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
15107 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
15108 default:
15109 /** @todo is the eff-addr decoded? */
15110 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
15111 IEMOP_RAISE_INVALID_OPCODE_RET();
15112 }
15113}
15114
15115/**
15116 * @opmaps grp5
15117 * @opcode /0
15118 * @opflclass incdec
15119 */
15120FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
15121{
15122 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
15123 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
15124 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
15125}
15126
15127
15128/**
15129 * @opmaps grp5
15130 * @opcode /1
15131 * @opflclass incdec
15132 */
15133FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
15134{
15135 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
15136 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
15137 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
15138}
15139
15140
15141/**
15142 * Opcode 0xff /2.
15143 * @param bRm The RM byte.
15144 */
15145FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15146{
15147 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
15148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15149
15150 if (IEM_IS_MODRM_REG_MODE(bRm))
15151 {
15152 /* The new RIP is taken from a register. */
15153 switch (pVCpu->iem.s.enmEffOpSize)
15154 {
15155 case IEMMODE_16BIT:
15156 IEM_MC_BEGIN(0, 0);
15157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15158 IEM_MC_ARG(uint16_t, u16Target, 0);
15159 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15160 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15161 IEM_MC_END();
15162 break;
15163
15164 case IEMMODE_32BIT:
15165 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15167 IEM_MC_ARG(uint32_t, u32Target, 0);
15168 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15169 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15170 IEM_MC_END();
15171 break;
15172
15173 case IEMMODE_64BIT:
15174 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15176 IEM_MC_ARG(uint64_t, u64Target, 0);
15177 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15178 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15179 IEM_MC_END();
15180 break;
15181
15182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15183 }
15184 }
15185 else
15186 {
15187 /* The new RIP is taken from a register. */
15188 switch (pVCpu->iem.s.enmEffOpSize)
15189 {
15190 case IEMMODE_16BIT:
15191 IEM_MC_BEGIN(0, 0);
15192 IEM_MC_ARG(uint16_t, u16Target, 0);
15193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15196 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15197 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15198 IEM_MC_END();
15199 break;
15200
15201 case IEMMODE_32BIT:
15202 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15203 IEM_MC_ARG(uint32_t, u32Target, 0);
15204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15207 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15208 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15209 IEM_MC_END();
15210 break;
15211
15212 case IEMMODE_64BIT:
15213 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15214 IEM_MC_ARG(uint64_t, u64Target, 0);
15215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15218 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15219 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15220 IEM_MC_END();
15221 break;
15222
15223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15224 }
15225 }
15226}
15227
15228#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
15229 /* Registers? How?? */ \
15230 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
15231 { /* likely */ } \
15232 else \
15233 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
15234 \
15235 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
15236 /** @todo what does VIA do? */ \
15237 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
15238 { /* likely */ } \
15239 else \
15240 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
15241 \
15242 /* Far pointer loaded from memory. */ \
15243 switch (pVCpu->iem.s.enmEffOpSize) \
15244 { \
15245 case IEMMODE_16BIT: \
15246 IEM_MC_BEGIN(0, 0); \
15247 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15248 IEM_MC_ARG(uint16_t, offSeg, 1); \
15249 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15253 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15254 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15255 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15256 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15257 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15258 IEM_MC_END(); \
15259 break; \
15260 \
15261 case IEMMODE_32BIT: \
15262 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
15263 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15264 IEM_MC_ARG(uint32_t, offSeg, 1); \
15265 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15269 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15270 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15271 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15272 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15273 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15274 IEM_MC_END(); \
15275 break; \
15276 \
15277 case IEMMODE_64BIT: \
15278 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15279 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
15280 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15281 IEM_MC_ARG(uint64_t, offSeg, 1); \
15282 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15286 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15287 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15288 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15289 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15290 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15291 IEM_MC_END(); \
15292 break; \
15293 \
15294 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15295 } do {} while (0)
15296
15297
15298/**
15299 * Opcode 0xff /3.
15300 * @param bRm The RM byte.
15301 */
15302FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15303{
15304 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15305 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15306}
15307
15308
15309/**
15310 * Opcode 0xff /4.
15311 * @param bRm The RM byte.
15312 */
15313FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15314{
15315 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15316 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15317
15318 if (IEM_IS_MODRM_REG_MODE(bRm))
15319 {
15320 /* The new RIP is taken from a register. */
15321 switch (pVCpu->iem.s.enmEffOpSize)
15322 {
15323 case IEMMODE_16BIT:
15324 IEM_MC_BEGIN(0, 0);
15325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15326 IEM_MC_LOCAL(uint16_t, u16Target);
15327 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15328 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15329 IEM_MC_END();
15330 break;
15331
15332 case IEMMODE_32BIT:
15333 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15335 IEM_MC_LOCAL(uint32_t, u32Target);
15336 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15337 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15338 IEM_MC_END();
15339 break;
15340
15341 case IEMMODE_64BIT:
15342 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15344 IEM_MC_LOCAL(uint64_t, u64Target);
15345 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15346 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15347 IEM_MC_END();
15348 break;
15349
15350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15351 }
15352 }
15353 else
15354 {
15355 /* The new RIP is taken from a memory location. */
15356 switch (pVCpu->iem.s.enmEffOpSize)
15357 {
15358 case IEMMODE_16BIT:
15359 IEM_MC_BEGIN(0, 0);
15360 IEM_MC_LOCAL(uint16_t, u16Target);
15361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15364 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15365 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15366 IEM_MC_END();
15367 break;
15368
15369 case IEMMODE_32BIT:
15370 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15371 IEM_MC_LOCAL(uint32_t, u32Target);
15372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15375 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15376 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15377 IEM_MC_END();
15378 break;
15379
15380 case IEMMODE_64BIT:
15381 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15382 IEM_MC_LOCAL(uint64_t, u64Target);
15383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15386 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15387 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15388 IEM_MC_END();
15389 break;
15390
15391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15392 }
15393 }
15394}
15395
15396
15397/**
15398 * Opcode 0xff /5.
15399 * @param bRm The RM byte.
15400 */
15401FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15402{
15403 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15404 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15405}
15406
15407
15408/**
15409 * Opcode 0xff /6.
15410 * @param bRm The RM byte.
15411 */
15412FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15413{
15414 IEMOP_MNEMONIC(push_Ev, "push Ev");
15415
15416 /* Registers are handled by a common worker. */
15417 if (IEM_IS_MODRM_REG_MODE(bRm))
15418 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15419
15420 /* Memory we do here. */
15421 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15422 switch (pVCpu->iem.s.enmEffOpSize)
15423 {
15424 case IEMMODE_16BIT:
15425 IEM_MC_BEGIN(0, 0);
15426 IEM_MC_LOCAL(uint16_t, u16Src);
15427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15430 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15431 IEM_MC_PUSH_U16(u16Src);
15432 IEM_MC_ADVANCE_RIP_AND_FINISH();
15433 IEM_MC_END();
15434 break;
15435
15436 case IEMMODE_32BIT:
15437 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15438 IEM_MC_LOCAL(uint32_t, u32Src);
15439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15442 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15443 IEM_MC_PUSH_U32(u32Src);
15444 IEM_MC_ADVANCE_RIP_AND_FINISH();
15445 IEM_MC_END();
15446 break;
15447
15448 case IEMMODE_64BIT:
15449 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15450 IEM_MC_LOCAL(uint64_t, u64Src);
15451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15454 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15455 IEM_MC_PUSH_U64(u64Src);
15456 IEM_MC_ADVANCE_RIP_AND_FINISH();
15457 IEM_MC_END();
15458 break;
15459
15460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15461 }
15462}
15463
15464
15465/**
15466 * @opcode 0xff
15467 */
15468FNIEMOP_DEF(iemOp_Grp5)
15469{
15470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15471 switch (IEM_GET_MODRM_REG_8(bRm))
15472 {
15473 case 0:
15474 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15475 case 1:
15476 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15477 case 2:
15478 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15479 case 3:
15480 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15481 case 4:
15482 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15483 case 5:
15484 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15485 case 6:
15486 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15487 case 7:
15488 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15489 IEMOP_RAISE_INVALID_OPCODE_RET();
15490 }
15491 AssertFailedReturn(VERR_IEM_IPE_3);
15492}
15493
15494
15495
15496const PFNIEMOP g_apfnOneByteMap[256] =
15497{
15498 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15499 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15500 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15501 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15502 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15503 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15504 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15505 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15506 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15507 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15508 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15509 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15510 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15511 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15512 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15513 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15514 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15515 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15516 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15517 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15518 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15519 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15520 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15521 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15522 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15523 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15524 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15525 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15526 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15527 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15528 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15529 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15530 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15531 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15532 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15533 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15534 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15535 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15536 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15537 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15538 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15539 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15540 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15541 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15542 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15543 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15544 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15545 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15546 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15547 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15548 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15549 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15550 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15551 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15552 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15553 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15554 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15555 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15556 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15557 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15558 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15559 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15560 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15561 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15562};
15563
15564
15565/** @} */
15566
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette