VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 104238

Last change on this file since 104238 was 104238, checked in by vboxsync, 8 weeks ago

VMM/IEM: Refactoring assembly helpers to not pass eflags by reference but instead by value and return the updated value (via eax/w0) - sixth chunk: SHL,SHR,SAR,ROL,ROR,RCL,RCR. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 595.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 104238 2024-04-08 20:15:10Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Special case body for bytes instruction like SUB and XOR that can be used
61 * to zero a register.
62 *
63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the
64 * same register.
65 */
66#define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \
67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
69 { \
70 IEM_MC_BEGIN(0, 0); \
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \
73 IEM_MC_LOCAL_EFLAGS(fEFlags); \
74 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
75 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
76 IEM_MC_COMMIT_EFLAGS(fEFlags); \
77 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
78 IEM_MC_END(); \
79 } ((void)0)
80
81/**
82 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
83 * memory/register as the destination.
84 */
85#define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
86 /* \
87 * If rm is denoting a register, no more instruction bytes. \
88 */ \
89 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
90 { \
91 IEM_MC_BEGIN(0, 0); \
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
93 IEM_MC_ARG(uint8_t, u8Src, 2); \
94 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
95 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
96 IEM_MC_LOCAL(uint8_t, u8Dst); \
97 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
98 IEM_MC_LOCAL_EFLAGS(uEFlags); \
99 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
100 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \
101 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
102 } IEM_MC_NATIVE_ELSE() { \
103 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
104 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
105 IEM_MC_ARG_EFLAGS( fEFlags, 0); \
106 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlags, pu8Dst, u8Src); \
107 IEM_MC_COMMIT_EFLAGS_OPT(fEFlagsRet); \
108 } IEM_MC_NATIVE_ENDIF(); \
109 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
110 IEM_MC_END(); \
111 } \
112 else \
113 { \
114 /* \
115 * We're accessing memory. \
116 */ \
117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
118 { \
119 IEM_MC_BEGIN(0, 0); \
120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
122 IEMOP_HLP_DONE_DECODING(); \
123 \
124 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
126 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
127 IEM_MC_ARG(uint8_t, u8Src, 2); \
128 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
129 IEM_MC_ARG_EFLAGS( fEFlags, 0); \
130 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlags, pu8Dst, u8Src); \
131 \
132 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
133 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
135 IEM_MC_END(); \
136 } \
137 else \
138 { \
139 IEM_MC_BEGIN(0, 0); \
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
142 IEMOP_HLP_DONE_DECODING(); \
143 \
144 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
145 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
146 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
147 IEM_MC_ARG(uint8_t, u8Src, 2); \
148 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
149 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
150 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), fEFlagsIn, pu8Dst, u8Src); \
151 \
152 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
153 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
155 IEM_MC_END(); \
156 } \
157 } \
158 (void)0
159
160/**
161 * Body for instructions like TEST & CMP with a byte memory/registers as
162 * operands.
163 */
164#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
165 /* \
166 * If rm is denoting a register, no more instruction bytes. \
167 */ \
168 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
169 { \
170 IEM_MC_BEGIN(0, 0); \
171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
172 IEM_MC_ARG(uint8_t, u8Src, 2); \
173 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
174 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
175 IEM_MC_LOCAL(uint8_t, u8Dst); \
176 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
177 IEM_MC_LOCAL_EFLAGS(uEFlags); \
178 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
179 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
180 } IEM_MC_NATIVE_ELSE() { \
181 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
182 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
183 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
184 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU8, fEFlagsIn, pu8Dst, u8Src); \
185 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
186 } IEM_MC_NATIVE_ENDIF(); \
187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
188 IEM_MC_END(); \
189 } \
190 else \
191 { \
192 /* \
193 * We're accessing memory. \
194 */ \
195 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
196 { \
197 IEM_MC_BEGIN(0, 0); \
198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
200 IEMOP_HLP_DONE_DECODING(); \
201 IEM_MC_NATIVE_IF(0) { \
202 IEM_MC_LOCAL(uint8_t, u8Dst); \
203 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
204 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
205 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
206 IEM_MC_LOCAL_EFLAGS(uEFlags); \
207 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
208 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
209 } IEM_MC_NATIVE_ELSE() { \
210 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
211 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
212 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
213 IEM_MC_ARG(uint8_t, u8Src, 2); \
214 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
215 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
216 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU8, fEFlagsIn, pu8Dst, u8Src); \
217 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
218 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
219 } IEM_MC_NATIVE_ENDIF(); \
220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
221 IEM_MC_END(); \
222 } \
223 else \
224 { \
225 /** @todo we should probably decode the address first. */ \
226 IEMOP_HLP_DONE_DECODING(); \
227 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
228 } \
229 } \
230 (void)0
231
232/**
233 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
234 * destination.
235 */
236#define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \
237 /* \
238 * If rm is denoting a register, no more instruction bytes. \
239 */ \
240 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
241 { \
242 IEM_MC_BEGIN(0, 0); \
243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
244 IEM_MC_ARG(uint8_t, u8Src, 2); \
245 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
246 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
247 IEM_MC_LOCAL(uint8_t, u8Dst); \
248 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
249 IEM_MC_LOCAL_EFLAGS(uEFlags); \
250 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
251 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
252 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
253 } IEM_MC_NATIVE_ELSE() { \
254 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
255 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
256 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
257 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
258 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
259 } IEM_MC_NATIVE_ENDIF(); \
260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
261 IEM_MC_END(); \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 */ \
268 IEM_MC_BEGIN(0, 0); \
269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
272 IEM_MC_ARG(uint8_t, u8Src, 2); \
273 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
274 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
275 IEM_MC_LOCAL(uint8_t, u8Dst); \
276 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
277 IEM_MC_LOCAL_EFLAGS(uEFlags); \
278 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
279 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
280 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
281 } IEM_MC_NATIVE_ELSE() { \
282 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
283 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
284 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
285 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
286 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
287 } IEM_MC_NATIVE_ENDIF(); \
288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
289 IEM_MC_END(); \
290 } \
291 (void)0
292
293/**
294 * Body for byte instruction CMP with a register as the destination.
295 */
296#define IEMOP_BODY_BINARY_r8_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
297 /* \
298 * If rm is denoting a register, no more instruction bytes. \
299 */ \
300 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
301 { \
302 IEM_MC_BEGIN(0, 0); \
303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
304 IEM_MC_ARG(uint8_t, u8Src, 2); \
305 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
306 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
307 IEM_MC_LOCAL(uint8_t, u8Dst); \
308 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
309 IEM_MC_LOCAL_EFLAGS(uEFlags); \
310 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
311 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
312 } IEM_MC_NATIVE_ELSE() { \
313 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
314 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
315 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
316 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
317 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
318 } IEM_MC_NATIVE_ENDIF(); \
319 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
320 IEM_MC_END(); \
321 } \
322 else \
323 { \
324 /* \
325 * We're accessing memory. \
326 */ \
327 IEM_MC_BEGIN(0, 0); \
328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
331 IEM_MC_ARG(uint8_t, u8Src, 2); \
332 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
333 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
334 IEM_MC_LOCAL(uint8_t, u8Dst); \
335 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
336 IEM_MC_LOCAL_EFLAGS(uEFlags); \
337 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
338 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
339 } IEM_MC_NATIVE_ELSE() { \
340 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
341 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
342 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
343 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
344 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
345 } IEM_MC_NATIVE_ENDIF(); \
346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
347 IEM_MC_END(); \
348 } \
349 (void)0
350
351
352/**
353 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
354 * memory/register as the destination.
355 */
356#define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
357 /* \
358 * If rm is denoting a register, no more instruction bytes. \
359 */ \
360 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
361 { \
362 switch (pVCpu->iem.s.enmEffOpSize) \
363 { \
364 case IEMMODE_16BIT: \
365 IEM_MC_BEGIN(0, 0); \
366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
367 IEM_MC_ARG(uint16_t, u16Src, 2); \
368 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
369 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
370 IEM_MC_LOCAL(uint16_t, u16Dst); \
371 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
372 IEM_MC_LOCAL_EFLAGS(uEFlags); \
373 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
374 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \
375 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
376 } IEM_MC_NATIVE_ELSE() { \
377 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
378 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
379 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
380 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
381 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
382 } IEM_MC_NATIVE_ENDIF(); \
383 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
384 IEM_MC_END(); \
385 break; \
386 \
387 case IEMMODE_32BIT: \
388 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
390 IEM_MC_ARG(uint32_t, u32Src, 2); \
391 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
392 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
393 IEM_MC_LOCAL(uint32_t, u32Dst); \
394 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
395 IEM_MC_LOCAL_EFLAGS(uEFlags); \
396 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
397 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \
398 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
399 } IEM_MC_NATIVE_ELSE() { \
400 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
401 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
402 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
403 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
404 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
405 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
406 } IEM_MC_NATIVE_ENDIF(); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 case IEMMODE_64BIT: \
412 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
414 IEM_MC_ARG(uint64_t, u64Src, 2); \
415 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
416 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
417 IEM_MC_LOCAL(uint64_t, u64Dst); \
418 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
419 IEM_MC_LOCAL_EFLAGS(uEFlags); \
420 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
421 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \
422 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
423 } IEM_MC_NATIVE_ELSE() { \
424 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
426 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
427 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
428 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
429 } IEM_MC_NATIVE_ENDIF(); \
430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
431 IEM_MC_END(); \
432 break; \
433 \
434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
435 } \
436 } \
437 else \
438 { \
439 /* \
440 * We're accessing memory. \
441 */ \
442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
443 { \
444 switch (pVCpu->iem.s.enmEffOpSize) \
445 { \
446 case IEMMODE_16BIT: \
447 IEM_MC_BEGIN(0, 0); \
448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 \
452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
453 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
454 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
455 IEM_MC_ARG(uint16_t, u16Src, 2); \
456 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
457 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
458 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
459 \
460 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
461 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
463 IEM_MC_END(); \
464 break; \
465 \
466 case IEMMODE_32BIT: \
467 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
470 IEMOP_HLP_DONE_DECODING(); \
471 \
472 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
473 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
474 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
475 IEM_MC_ARG(uint32_t, u32Src, 2); \
476 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
477 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
478 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
479 \
480 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
481 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
482 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
483 IEM_MC_END(); \
484 break; \
485 \
486 case IEMMODE_64BIT: \
487 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
490 IEMOP_HLP_DONE_DECODING(); \
491 \
492 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
493 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
494 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
495 IEM_MC_ARG(uint64_t, u64Src, 2); \
496 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
497 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
498 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
499 \
500 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
501 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
502 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
503 IEM_MC_END(); \
504 break; \
505 \
506 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
507 } \
508 } \
509 else \
510 { \
511 (void)0
512/* Separate macro to work around parsing issue in IEMAllInstPython.py */
513#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \
514 switch (pVCpu->iem.s.enmEffOpSize) \
515 { \
516 case IEMMODE_16BIT: \
517 IEM_MC_BEGIN(0, 0); \
518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
520 IEMOP_HLP_DONE_DECODING(); \
521 \
522 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
523 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
524 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
525 IEM_MC_ARG(uint16_t, u16Src, 2); \
526 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
527 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
528 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
529 \
530 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
531 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
533 IEM_MC_END(); \
534 break; \
535 \
536 case IEMMODE_32BIT: \
537 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
540 IEMOP_HLP_DONE_DECODING(); \
541 \
542 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
543 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
544 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
545 IEM_MC_ARG(uint32_t, u32Src, 2); \
546 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
547 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
548 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
549 \
550 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
551 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
552 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
553 IEM_MC_END(); \
554 break; \
555 \
556 case IEMMODE_64BIT: \
557 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
560 IEMOP_HLP_DONE_DECODING(); \
561 \
562 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
563 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
564 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_ARG(uint64_t, u64Src, 2); \
566 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
567 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
568 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
569 \
570 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
571 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
572 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
573 IEM_MC_END(); \
574 break; \
575 \
576 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
577 } \
578 } \
579 } \
580 (void)0
581
582/**
583 * Body for read-only word/dword/qword instructions like TEST and CMP with
584 * memory/register as the destination.
585 */
586#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_InsNm, a_fNativeArchs) \
587 /* \
588 * If rm is denoting a register, no more instruction bytes. \
589 */ \
590 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
591 { \
592 switch (pVCpu->iem.s.enmEffOpSize) \
593 { \
594 case IEMMODE_16BIT: \
595 IEM_MC_BEGIN(0, 0); \
596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
597 IEM_MC_ARG(uint16_t, u16Src, 2); \
598 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
599 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
600 IEM_MC_LOCAL(uint16_t, u16Dst); \
601 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
602 IEM_MC_LOCAL_EFLAGS(uEFlags); \
603 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
604 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
605 } IEM_MC_NATIVE_ELSE() { \
606 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
607 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
608 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
609 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
610 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
611 } IEM_MC_NATIVE_ENDIF(); \
612 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
613 IEM_MC_END(); \
614 break; \
615 \
616 case IEMMODE_32BIT: \
617 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
619 IEM_MC_ARG(uint32_t, u32Src, 2); \
620 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
621 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
622 IEM_MC_LOCAL(uint32_t, u32Dst); \
623 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
624 IEM_MC_LOCAL_EFLAGS(uEFlags); \
625 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
626 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
627 } IEM_MC_NATIVE_ELSE() { \
628 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
629 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
630 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
631 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
632 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
633 } IEM_MC_NATIVE_ENDIF(); \
634 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
635 IEM_MC_END(); \
636 break; \
637 \
638 case IEMMODE_64BIT: \
639 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
641 IEM_MC_ARG(uint64_t, u64Src, 2); \
642 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
643 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
644 IEM_MC_LOCAL(uint64_t, u64Dst); \
645 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
646 IEM_MC_LOCAL_EFLAGS(uEFlags); \
647 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
648 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
649 } IEM_MC_NATIVE_ELSE() { \
650 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
651 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
652 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
653 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
654 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
655 } IEM_MC_NATIVE_ENDIF(); \
656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
657 IEM_MC_END(); \
658 break; \
659 \
660 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
661 } \
662 } \
663 else \
664 { \
665 /* \
666 * We're accessing memory. \
667 */ \
668 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
669 { \
670 switch (pVCpu->iem.s.enmEffOpSize) \
671 { \
672 case IEMMODE_16BIT: \
673 IEM_MC_BEGIN(0, 0); \
674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
676 IEMOP_HLP_DONE_DECODING(); \
677 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
678 IEM_MC_LOCAL(uint16_t, u16Dst); \
679 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
680 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
681 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
682 IEM_MC_LOCAL_EFLAGS(uEFlags); \
683 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
684 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
685 } IEM_MC_NATIVE_ELSE() { \
686 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
687 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
688 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
689 IEM_MC_ARG(uint16_t, u16Src, 2); \
690 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
691 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
692 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
693 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
694 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
695 } IEM_MC_NATIVE_ENDIF(); \
696 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
697 IEM_MC_END(); \
698 break; \
699 \
700 case IEMMODE_32BIT: \
701 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
704 IEMOP_HLP_DONE_DECODING(); \
705 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
706 IEM_MC_LOCAL(uint32_t, u32Dst); \
707 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
708 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
709 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
710 IEM_MC_LOCAL_EFLAGS(uEFlags); \
711 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
712 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
713 } IEM_MC_NATIVE_ELSE() { \
714 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
715 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
716 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
717 IEM_MC_ARG(uint32_t, u32Src, 2); \
718 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
719 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
720 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
721 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
722 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
723 } IEM_MC_NATIVE_ENDIF(); \
724 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
725 IEM_MC_END(); \
726 break; \
727 \
728 case IEMMODE_64BIT: \
729 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
732 IEMOP_HLP_DONE_DECODING(); \
733 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
734 IEM_MC_LOCAL(uint64_t, u64Dst); \
735 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
736 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
737 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
738 IEM_MC_LOCAL_EFLAGS(uEFlags); \
739 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
740 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
741 } IEM_MC_NATIVE_ELSE() { \
742 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
743 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
744 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
745 IEM_MC_ARG(uint64_t, u64Src, 2); \
746 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
747 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
748 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
749 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
750 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
751 } IEM_MC_NATIVE_ENDIF(); \
752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
753 IEM_MC_END(); \
754 break; \
755 \
756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
757 } \
758 } \
759 else \
760 { \
761 IEMOP_HLP_DONE_DECODING(); \
762 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
763 } \
764 } \
765 (void)0
766
767
768/**
769 * Body for instructions like ADD, AND, OR, ++ with working on AL with
770 * a byte immediate.
771 */
772#define IEMOP_BODY_BINARY_AL_Ib(a_InsNm, a_fNativeArchs) \
773 IEM_MC_BEGIN(0, 0); \
774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
776 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
777 IEM_MC_LOCAL(uint8_t, u8Dst); \
778 IEM_MC_FETCH_GREG_U8(u8Dst, X86_GREG_xAX); \
779 IEM_MC_LOCAL(uint32_t, uEFlags); \
780 IEM_MC_FETCH_EFLAGS(uEFlags); \
781 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
782 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Dst); \
783 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
784 } IEM_MC_NATIVE_ELSE() { \
785 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
786 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
788 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 2); \
789 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
790 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
791 } IEM_MC_NATIVE_ENDIF(); \
792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
793 IEM_MC_END()
794
795/**
796 * Body for instructions like ADD, AND, OR, ++ with working on
797 * AX/EAX/RAX with a word/dword immediate.
798 */
799#define IEMOP_BODY_BINARY_rAX_Iz_RW(a_InsNm, a_fNativeArchs) \
800 switch (pVCpu->iem.s.enmEffOpSize) \
801 { \
802 case IEMMODE_16BIT: \
803 { \
804 IEM_MC_BEGIN(0, 0); \
805 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
807 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
808 IEM_MC_LOCAL(uint16_t, u16Dst); \
809 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
810 IEM_MC_LOCAL(uint32_t, uEFlags); \
811 IEM_MC_FETCH_EFLAGS(uEFlags); \
812 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
813 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Dst); \
814 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
815 } IEM_MC_NATIVE_ELSE() { \
816 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2); \
817 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
818 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
819 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
820 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
821 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
822 } IEM_MC_NATIVE_ENDIF(); \
823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
824 IEM_MC_END(); \
825 } \
826 \
827 case IEMMODE_32BIT: \
828 { \
829 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
832 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
833 IEM_MC_LOCAL(uint32_t, u32Dst); \
834 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
835 IEM_MC_LOCAL(uint32_t, uEFlags); \
836 IEM_MC_FETCH_EFLAGS(uEFlags); \
837 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
838 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Dst); \
839 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
840 } IEM_MC_NATIVE_ELSE() { \
841 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2); \
842 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
843 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
844 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
845 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
846 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
847 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
848 } IEM_MC_NATIVE_ENDIF(); \
849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
850 IEM_MC_END(); \
851 } \
852 \
853 case IEMMODE_64BIT: \
854 { \
855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
856 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
858 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
859 IEM_MC_LOCAL(uint64_t, u64Dst); \
860 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
861 IEM_MC_LOCAL(uint32_t, uEFlags); \
862 IEM_MC_FETCH_EFLAGS(uEFlags); \
863 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
864 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Dst); \
865 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
866 } IEM_MC_NATIVE_ELSE() { \
867 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2); \
868 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
869 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
870 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
871 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
872 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
873 } IEM_MC_NATIVE_ENDIF(); \
874 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
875 IEM_MC_END(); \
876 } \
877 \
878 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
879 } \
880 (void)0
881
882/**
883 * Body for the instructions CMP and TEST working on AX/EAX/RAX with a
884 * word/dword immediate.
885 */
886#define IEMOP_BODY_BINARY_rAX_Iz_RO(a_InsNm, a_fNativeArchs) \
887 switch (pVCpu->iem.s.enmEffOpSize) \
888 { \
889 case IEMMODE_16BIT: \
890 { \
891 IEM_MC_BEGIN(0, 0); \
892 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
894 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
895 IEM_MC_LOCAL(uint16_t, u16Dst); \
896 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
897 IEM_MC_LOCAL(uint32_t, uEFlags); \
898 IEM_MC_FETCH_EFLAGS(uEFlags); \
899 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
900 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
901 } IEM_MC_NATIVE_ELSE() { \
902 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2); \
903 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
904 IEM_MC_REF_GREG_U16_CONST(pu16Dst, X86_GREG_xAX); \
905 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
906 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
907 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
908 } IEM_MC_NATIVE_ENDIF(); \
909 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
910 IEM_MC_END(); \
911 } \
912 \
913 case IEMMODE_32BIT: \
914 { \
915 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
916 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
918 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
919 IEM_MC_LOCAL(uint32_t, u32Dst); \
920 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
921 IEM_MC_LOCAL(uint32_t, uEFlags); \
922 IEM_MC_FETCH_EFLAGS(uEFlags); \
923 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
924 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
925 } IEM_MC_NATIVE_ELSE() { \
926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2); \
927 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
928 IEM_MC_REF_GREG_U32_CONST(pu32Dst, X86_GREG_xAX); \
929 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
930 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
931 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
932 } IEM_MC_NATIVE_ENDIF(); \
933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
934 IEM_MC_END(); \
935 } \
936 \
937 case IEMMODE_64BIT: \
938 { \
939 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
940 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
942 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
943 IEM_MC_LOCAL(uint64_t, u64Dst); \
944 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
945 IEM_MC_LOCAL(uint32_t, uEFlags); \
946 IEM_MC_FETCH_EFLAGS(uEFlags); \
947 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
948 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
949 } IEM_MC_NATIVE_ELSE() { \
950 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2); \
951 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
952 IEM_MC_REF_GREG_U64_CONST(pu64Dst, X86_GREG_xAX); \
953 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
954 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
955 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
956 } IEM_MC_NATIVE_ENDIF(); \
957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
958 IEM_MC_END(); \
959 } \
960 \
961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
962 } \
963 (void)0
964
965
966
967/* Instruction specification format - work in progress: */
968
969/**
970 * @opcode 0x00
971 * @opmnemonic add
972 * @op1 rm:Eb
973 * @op2 reg:Gb
974 * @opmaps one
975 * @openc ModR/M
976 * @opflclass arithmetic
977 * @ophints harmless ignores_op_sizes
978 * @opstats add_Eb_Gb
979 * @opgroup og_gen_arith_bin
980 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
981 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
982 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
983 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
984 */
985FNIEMOP_DEF(iemOp_add_Eb_Gb)
986{
987 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
989 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
990}
991
992
993/**
994 * @opcode 0x01
995 * @opgroup og_gen_arith_bin
996 * @opflclass arithmetic
997 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
998 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
999 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1000 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1001 */
1002FNIEMOP_DEF(iemOp_add_Ev_Gv)
1003{
1004 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1006 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1007 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add);
1008}
1009
1010
1011/**
1012 * @opcode 0x02
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic
1015 * @opcopytests iemOp_add_Eb_Gb
1016 */
1017FNIEMOP_DEF(iemOp_add_Gb_Eb)
1018{
1019 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1021 IEMOP_BODY_BINARY_r8_rm(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1022}
1023
1024
1025/**
1026 * @opcode 0x03
1027 * @opgroup og_gen_arith_bin
1028 * @opflclass arithmetic
1029 * @opcopytests iemOp_add_Ev_Gv
1030 */
1031FNIEMOP_DEF(iemOp_add_Gv_Ev)
1032{
1033 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1035 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 0, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1036}
1037
1038
1039/**
1040 * @opcode 0x04
1041 * @opgroup og_gen_arith_bin
1042 * @opflclass arithmetic
1043 * @opcopytests iemOp_add_Eb_Gb
1044 */
1045FNIEMOP_DEF(iemOp_add_Al_Ib)
1046{
1047 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1048 IEMOP_BODY_BINARY_AL_Ib(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1049}
1050
1051
1052/**
1053 * @opcode 0x05
1054 * @opgroup og_gen_arith_bin
1055 * @opflclass arithmetic
1056 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
1057 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1058 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1059 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1060 */
1061FNIEMOP_DEF(iemOp_add_eAX_Iz)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1064 IEMOP_BODY_BINARY_rAX_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1065}
1066
1067
1068/**
1069 * @opcode 0x06
1070 * @opgroup og_stack_sreg
1071 */
1072FNIEMOP_DEF(iemOp_push_ES)
1073{
1074 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1075 IEMOP_HLP_NO_64BIT();
1076 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
1077}
1078
1079
1080/**
1081 * @opcode 0x07
1082 * @opgroup og_stack_sreg
1083 */
1084FNIEMOP_DEF(iemOp_pop_ES)
1085{
1086 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1089 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1090 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1091 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
1092 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
1093 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
1094 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
1095 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
1096}
1097
1098
1099/**
1100 * @opcode 0x08
1101 * @opgroup og_gen_arith_bin
1102 * @opflclass logical
1103 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1104 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1105 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1106 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1107 */
1108FNIEMOP_DEF(iemOp_or_Eb_Gb)
1109{
1110 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1114}
1115
1116
1117/*
1118 * @opcode 0x09
1119 * @opgroup og_gen_arith_bin
1120 * @opflclass logical
1121 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1122 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1123 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1124 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1125 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1126 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1127 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
1128 */
1129FNIEMOP_DEF(iemOp_or_Ev_Gv)
1130{
1131 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1134 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1135 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or);
1136}
1137
1138
1139/**
1140 * @opcode 0x0a
1141 * @opgroup og_gen_arith_bin
1142 * @opflclass logical
1143 * @opcopytests iemOp_or_Eb_Gb
1144 */
1145FNIEMOP_DEF(iemOp_or_Gb_Eb)
1146{
1147 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1148 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1150 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1151}
1152
1153
1154/**
1155 * @opcode 0x0b
1156 * @opgroup og_gen_arith_bin
1157 * @opflclass logical
1158 * @opcopytests iemOp_or_Ev_Gv
1159 */
1160FNIEMOP_DEF(iemOp_or_Gv_Ev)
1161{
1162 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1165 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1166}
1167
1168
1169/**
1170 * @opcode 0x0c
1171 * @opgroup og_gen_arith_bin
1172 * @opflclass logical
1173 * @opcopytests iemOp_or_Eb_Gb
1174 */
1175FNIEMOP_DEF(iemOp_or_Al_Ib)
1176{
1177 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1179 IEMOP_BODY_BINARY_AL_Ib(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1180}
1181
1182
1183/**
1184 * @opcode 0x0d
1185 * @opgroup og_gen_arith_bin
1186 * @opflclass logical
1187 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1188 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1189 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1190 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1191 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1192 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1193 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1194 */
1195FNIEMOP_DEF(iemOp_or_eAX_Iz)
1196{
1197 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1199 IEMOP_BODY_BINARY_rAX_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1200}
1201
1202
1203/**
1204 * @opcode 0x0e
1205 * @opgroup og_stack_sreg
1206 */
1207FNIEMOP_DEF(iemOp_push_CS)
1208{
1209 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1210 IEMOP_HLP_NO_64BIT();
1211 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1212}
1213
1214
1215/**
1216 * @opcode 0x0f
1217 * @opmnemonic EscTwo0f
1218 * @openc two0f
1219 * @opdisenum OP_2B_ESC
1220 * @ophints harmless
1221 * @opgroup og_escapes
1222 */
1223FNIEMOP_DEF(iemOp_2byteEscape)
1224{
1225#if 0 /// @todo def VBOX_STRICT
1226 /* Sanity check the table the first time around. */
1227 static bool s_fTested = false;
1228 if (RT_LIKELY(s_fTested)) { /* likely */ }
1229 else
1230 {
1231 s_fTested = true;
1232 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1233 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1234 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1235 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1236 }
1237#endif
1238
1239 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1240 {
1241 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1242 IEMOP_HLP_MIN_286();
1243 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1244 }
1245 /* @opdone */
1246
1247 /*
1248 * On the 8086 this is a POP CS instruction.
1249 * For the time being we don't specify this this.
1250 */
1251 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1252 IEMOP_HLP_NO_64BIT();
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 /** @todo eliminate END_TB here */
1255 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1256 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1257 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1258 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1259}
1260
1261/**
1262 * @opcode 0x10
1263 * @opgroup og_gen_arith_bin
1264 * @opflclass arithmetic_carry
1265 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1266 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1267 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1268 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1269 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1270 */
1271FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1272{
1273 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1275 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1276}
1277
1278
1279/**
1280 * @opcode 0x11
1281 * @opgroup og_gen_arith_bin
1282 * @opflclass arithmetic_carry
1283 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1284 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1285 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1286 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1287 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1288 */
1289FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1290{
1291 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1293 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1294 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc);
1295}
1296
1297
1298/**
1299 * @opcode 0x12
1300 * @opgroup og_gen_arith_bin
1301 * @opflclass arithmetic_carry
1302 * @opcopytests iemOp_adc_Eb_Gb
1303 */
1304FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1305{
1306 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1308 IEMOP_BODY_BINARY_r8_rm(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1309}
1310
1311
1312/**
1313 * @opcode 0x13
1314 * @opgroup og_gen_arith_bin
1315 * @opflclass arithmetic_carry
1316 * @opcopytests iemOp_adc_Ev_Gv
1317 */
1318FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1319{
1320 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1322 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 0, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1323}
1324
1325
1326/**
1327 * @opcode 0x14
1328 * @opgroup og_gen_arith_bin
1329 * @opflclass arithmetic_carry
1330 * @opcopytests iemOp_adc_Eb_Gb
1331 */
1332FNIEMOP_DEF(iemOp_adc_Al_Ib)
1333{
1334 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1335 IEMOP_BODY_BINARY_AL_Ib(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1336}
1337
1338
1339/**
1340 * @opcode 0x15
1341 * @opgroup og_gen_arith_bin
1342 * @opflclass arithmetic_carry
1343 * @opcopytests iemOp_adc_Ev_Gv
1344 */
1345FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1346{
1347 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1348 IEMOP_BODY_BINARY_rAX_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1349}
1350
1351
1352/**
1353 * @opcode 0x16
1354 */
1355FNIEMOP_DEF(iemOp_push_SS)
1356{
1357 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1358 IEMOP_HLP_NO_64BIT();
1359 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1360}
1361
1362
1363/**
1364 * @opcode 0x17
1365 */
1366FNIEMOP_DEF(iemOp_pop_SS)
1367{
1368 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1370 IEMOP_HLP_NO_64BIT();
1371 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1372 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1373 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1374 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1375 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1376 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1377 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1378}
1379
1380
1381/**
1382 * @opcode 0x18
1383 * @opgroup og_gen_arith_bin
1384 * @opflclass arithmetic_carry
1385 */
1386FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1387{
1388 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1390 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1391}
1392
1393
1394/**
1395 * @opcode 0x19
1396 * @opgroup og_gen_arith_bin
1397 * @opflclass arithmetic_carry
1398 */
1399FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1400{
1401 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1403 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1404 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb);
1405}
1406
1407
1408/**
1409 * @opcode 0x1a
1410 * @opgroup og_gen_arith_bin
1411 * @opflclass arithmetic_carry
1412 */
1413FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1414{
1415 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1417 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1418}
1419
1420
1421/**
1422 * @opcode 0x1b
1423 * @opgroup og_gen_arith_bin
1424 * @opflclass arithmetic_carry
1425 */
1426FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1427{
1428 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1430 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 0, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1431}
1432
1433
1434/**
1435 * @opcode 0x1c
1436 * @opgroup og_gen_arith_bin
1437 * @opflclass arithmetic_carry
1438 */
1439FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1440{
1441 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1442 IEMOP_BODY_BINARY_AL_Ib(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1443}
1444
1445
1446/**
1447 * @opcode 0x1d
1448 * @opgroup og_gen_arith_bin
1449 * @opflclass arithmetic_carry
1450 */
1451FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1452{
1453 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1454 IEMOP_BODY_BINARY_rAX_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1455}
1456
1457
1458/**
1459 * @opcode 0x1e
1460 * @opgroup og_stack_sreg
1461 */
1462FNIEMOP_DEF(iemOp_push_DS)
1463{
1464 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1465 IEMOP_HLP_NO_64BIT();
1466 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1467}
1468
1469
1470/**
1471 * @opcode 0x1f
1472 * @opgroup og_stack_sreg
1473 */
1474FNIEMOP_DEF(iemOp_pop_DS)
1475{
1476 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEMOP_HLP_NO_64BIT();
1479 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1481 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1482 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1483 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1484 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1485 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1486}
1487
1488
1489/**
1490 * @opcode 0x20
1491 * @opgroup og_gen_arith_bin
1492 * @opflclass logical
1493 */
1494FNIEMOP_DEF(iemOp_and_Eb_Gb)
1495{
1496 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1499 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1500}
1501
1502
1503/**
1504 * @opcode 0x21
1505 * @opgroup og_gen_arith_bin
1506 * @opflclass logical
1507 */
1508FNIEMOP_DEF(iemOp_and_Ev_Gv)
1509{
1510 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1514 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and);
1515}
1516
1517
1518/**
1519 * @opcode 0x22
1520 * @opgroup og_gen_arith_bin
1521 * @opflclass logical
1522 */
1523FNIEMOP_DEF(iemOp_and_Gb_Eb)
1524{
1525 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1528 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1529}
1530
1531
1532/**
1533 * @opcode 0x23
1534 * @opgroup og_gen_arith_bin
1535 * @opflclass logical
1536 */
1537FNIEMOP_DEF(iemOp_and_Gv_Ev)
1538{
1539 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1542 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1543}
1544
1545
1546/**
1547 * @opcode 0x24
1548 * @opgroup og_gen_arith_bin
1549 * @opflclass logical
1550 */
1551FNIEMOP_DEF(iemOp_and_Al_Ib)
1552{
1553 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1555 IEMOP_BODY_BINARY_AL_Ib(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1556}
1557
1558
1559/**
1560 * @opcode 0x25
1561 * @opgroup og_gen_arith_bin
1562 * @opflclass logical
1563 */
1564FNIEMOP_DEF(iemOp_and_eAX_Iz)
1565{
1566 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1567 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1568 IEMOP_BODY_BINARY_rAX_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1569}
1570
1571
1572/**
1573 * @opcode 0x26
1574 * @opmnemonic SEG
1575 * @op1 ES
1576 * @opgroup og_prefix
1577 * @openc prefix
1578 * @opdisenum OP_SEG
1579 * @ophints harmless
1580 */
1581FNIEMOP_DEF(iemOp_seg_ES)
1582{
1583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1584 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1585 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1586
1587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1588 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1589}
1590
1591
1592/**
1593 * @opcode 0x27
1594 * @opfltest af,cf
1595 * @opflmodify cf,pf,af,zf,sf,of
1596 * @opflundef of
1597 */
1598FNIEMOP_DEF(iemOp_daa)
1599{
1600 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1601 IEMOP_HLP_NO_64BIT();
1602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1603 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1604 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1605}
1606
1607
1608/**
1609 * Special case body for word/dword/qword instruction like SUB and XOR that can
1610 * be used to zero a register.
1611 *
1612 * This can be used both for the rv_rm and rm_rv forms since it's working on the
1613 * same register.
1614 */
1615#define IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(a_bRm) \
1616 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((a_bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
1617 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
1618 { \
1619 switch (pVCpu->iem.s.enmEffOpSize) \
1620 { \
1621 case IEMMODE_16BIT: \
1622 IEM_MC_BEGIN(0, 0); \
1623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1624 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1625 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1626 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1627 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1628 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1629 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1630 IEM_MC_END(); \
1631 break; \
1632 \
1633 case IEMMODE_32BIT: \
1634 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1636 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1637 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1638 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1639 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1640 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1641 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1642 IEM_MC_END(); \
1643 break; \
1644 \
1645 case IEMMODE_64BIT: \
1646 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1648 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1649 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1650 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1651 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1652 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1654 IEM_MC_END(); \
1655 break; \
1656 \
1657 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1658 } \
1659 } ((void)0)
1660
1661
1662/**
1663 * @opcode 0x28
1664 * @opgroup og_gen_arith_bin
1665 * @opflclass arithmetic
1666 */
1667FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1668{
1669 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1671 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1672 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1673}
1674
1675
1676/**
1677 * @opcode 0x29
1678 * @opgroup og_gen_arith_bin
1679 * @opflclass arithmetic
1680 */
1681FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1682{
1683 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1685 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1686 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1687 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub);
1688}
1689
1690
1691/**
1692 * @opcode 0x2a
1693 * @opgroup og_gen_arith_bin
1694 * @opflclass arithmetic
1695 */
1696FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1697{
1698 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1700 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1701 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1702}
1703
1704
1705/**
1706 * @opcode 0x2b
1707 * @opgroup og_gen_arith_bin
1708 * @opflclass arithmetic
1709 */
1710FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1711{
1712 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1714 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1715 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1716}
1717
1718
1719/**
1720 * @opcode 0x2c
1721 * @opgroup og_gen_arith_bin
1722 * @opflclass arithmetic
1723 */
1724FNIEMOP_DEF(iemOp_sub_Al_Ib)
1725{
1726 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1727 IEMOP_BODY_BINARY_AL_Ib(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1728}
1729
1730
1731/**
1732 * @opcode 0x2d
1733 * @opgroup og_gen_arith_bin
1734 * @opflclass arithmetic
1735 */
1736FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1737{
1738 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1739 IEMOP_BODY_BINARY_rAX_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1740}
1741
1742
1743/**
1744 * @opcode 0x2e
1745 * @opmnemonic SEG
1746 * @op1 CS
1747 * @opgroup og_prefix
1748 * @openc prefix
1749 * @opdisenum OP_SEG
1750 * @ophints harmless
1751 */
1752FNIEMOP_DEF(iemOp_seg_CS)
1753{
1754 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1755 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1756 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1757
1758 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1759 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1760}
1761
1762
1763/**
1764 * @opcode 0x2f
1765 * @opfltest af,cf
1766 * @opflmodify cf,pf,af,zf,sf,of
1767 * @opflundef of
1768 */
1769FNIEMOP_DEF(iemOp_das)
1770{
1771 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1772 IEMOP_HLP_NO_64BIT();
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1775 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1776}
1777
1778
1779/**
1780 * @opcode 0x30
1781 * @opgroup og_gen_arith_bin
1782 * @opflclass logical
1783 */
1784FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1785{
1786 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1790 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1791}
1792
1793
1794/**
1795 * @opcode 0x31
1796 * @opgroup og_gen_arith_bin
1797 * @opflclass logical
1798 */
1799FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1800{
1801 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1804 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1805 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1806 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor);
1807}
1808
1809
1810/**
1811 * @opcode 0x32
1812 * @opgroup og_gen_arith_bin
1813 * @opflclass logical
1814 */
1815FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1816{
1817 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1818 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1820 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1821 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1822}
1823
1824
1825/**
1826 * @opcode 0x33
1827 * @opgroup og_gen_arith_bin
1828 * @opflclass logical
1829 */
1830FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1831{
1832 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1835 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1836 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1837}
1838
1839
1840/**
1841 * @opcode 0x34
1842 * @opgroup og_gen_arith_bin
1843 * @opflclass logical
1844 */
1845FNIEMOP_DEF(iemOp_xor_Al_Ib)
1846{
1847 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1849 IEMOP_BODY_BINARY_AL_Ib(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1850}
1851
1852
1853/**
1854 * @opcode 0x35
1855 * @opgroup og_gen_arith_bin
1856 * @opflclass logical
1857 */
1858FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1859{
1860 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1862 IEMOP_BODY_BINARY_rAX_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1863}
1864
1865
1866/**
1867 * @opcode 0x36
1868 * @opmnemonic SEG
1869 * @op1 SS
1870 * @opgroup og_prefix
1871 * @openc prefix
1872 * @opdisenum OP_SEG
1873 * @ophints harmless
1874 */
1875FNIEMOP_DEF(iemOp_seg_SS)
1876{
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1879 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883}
1884
1885
1886/**
1887 * @opcode 0x37
1888 * @opfltest af
1889 * @opflmodify cf,pf,af,zf,sf,of
1890 * @opflundef pf,zf,sf,of
1891 * @opgroup og_gen_arith_dec
1892 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1893 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1894 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1895 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1896 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1897 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1898 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1899 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1900 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1901 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1902 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1903 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1904 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1905 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1906 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1907 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1908 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1909 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1910 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1911 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1912 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1913 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1914 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1915 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1916 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1917 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1918 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1919 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1920 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1921 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1922 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1923 */
1924FNIEMOP_DEF(iemOp_aaa)
1925{
1926 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1927 IEMOP_HLP_NO_64BIT();
1928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1930
1931 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1932}
1933
1934
1935/**
1936 * Body for word/dword/qword the instruction CMP, ++ with a register as the
1937 * destination.
1938 *
1939 * @note Used both in OneByte and TwoByte0f.
1940 */
1941#define IEMOP_BODY_BINARY_rv_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
1942 /* \
1943 * If rm is denoting a register, no more instruction bytes. \
1944 */ \
1945 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
1946 { \
1947 switch (pVCpu->iem.s.enmEffOpSize) \
1948 { \
1949 case IEMMODE_16BIT: \
1950 IEM_MC_BEGIN(0, 0); \
1951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1952 IEM_MC_ARG(uint16_t, u16Src, 2); \
1953 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1954 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1955 IEM_MC_LOCAL(uint16_t, u16Dst); \
1956 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1957 IEM_MC_LOCAL_EFLAGS(uEFlags); \
1958 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
1959 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
1960 } IEM_MC_NATIVE_ELSE() { \
1961 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
1962 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1963 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1964 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
1965 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1966 } IEM_MC_NATIVE_ENDIF(); \
1967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1968 IEM_MC_END(); \
1969 break; \
1970 \
1971 case IEMMODE_32BIT: \
1972 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1974 IEM_MC_ARG(uint32_t, u32Src, 2); \
1975 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1976 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1977 IEM_MC_LOCAL(uint32_t, u32Dst); \
1978 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1979 IEM_MC_LOCAL_EFLAGS(uEFlags); \
1980 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
1981 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
1982 } IEM_MC_NATIVE_ELSE() { \
1983 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
1984 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1985 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1986 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
1987 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1988 } IEM_MC_NATIVE_ENDIF(); \
1989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1990 IEM_MC_END(); \
1991 break; \
1992 \
1993 case IEMMODE_64BIT: \
1994 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1996 IEM_MC_ARG(uint64_t, u64Src, 2); \
1997 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1998 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1999 IEM_MC_LOCAL(uint64_t, u64Dst); \
2000 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2001 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2002 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
2003 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2004 } IEM_MC_NATIVE_ELSE() { \
2005 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
2006 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2007 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2008 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
2009 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2010 } IEM_MC_NATIVE_ENDIF(); \
2011 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2012 IEM_MC_END(); \
2013 break; \
2014 \
2015 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2016 } \
2017 } \
2018 else \
2019 { \
2020 /* \
2021 * We're accessing memory. \
2022 */ \
2023 switch (pVCpu->iem.s.enmEffOpSize) \
2024 { \
2025 case IEMMODE_16BIT: \
2026 IEM_MC_BEGIN(0, 0); \
2027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2030 IEM_MC_ARG(uint16_t, u16Src, 2); \
2031 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2032 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2033 IEM_MC_LOCAL(uint16_t, u16Dst); \
2034 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2035 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2036 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
2037 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2038 } IEM_MC_NATIVE_ELSE() { \
2039 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
2040 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2041 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2042 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
2043 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2044 } IEM_MC_NATIVE_ENDIF(); \
2045 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2046 IEM_MC_END(); \
2047 break; \
2048 \
2049 case IEMMODE_32BIT: \
2050 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
2051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2054 IEM_MC_ARG(uint32_t, u32Src, 2); \
2055 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2056 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2057 IEM_MC_LOCAL(uint32_t, u32Dst); \
2058 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2059 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2060 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
2061 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2062 } IEM_MC_NATIVE_ELSE() { \
2063 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
2064 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2065 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2066 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
2067 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2068 } IEM_MC_NATIVE_ENDIF(); \
2069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2070 IEM_MC_END(); \
2071 break; \
2072 \
2073 case IEMMODE_64BIT: \
2074 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2078 IEM_MC_ARG(uint64_t, u64Src, 2); \
2079 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2080 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2081 IEM_MC_LOCAL(uint64_t, u64Dst); \
2082 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2083 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2084 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
2085 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2086 } IEM_MC_NATIVE_ELSE() { \
2087 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
2088 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2089 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2090 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
2091 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2092 } IEM_MC_NATIVE_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2094 IEM_MC_END(); \
2095 break; \
2096 \
2097 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2098 } \
2099 } \
2100 (void)0
2101
2102
2103/**
2104 * @opcode 0x38
2105 * @opflclass arithmetic
2106 */
2107FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
2108{
2109 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
2110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2111 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2112}
2113
2114
2115/**
2116 * @opcode 0x39
2117 * @opflclass arithmetic
2118 */
2119FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
2120{
2121 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
2122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2123 IEMOP_BODY_BINARY_rm_rv_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2124}
2125
2126
2127/**
2128 * @opcode 0x3a
2129 * @opflclass arithmetic
2130 */
2131FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
2132{
2133 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
2134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2135 IEMOP_BODY_BINARY_r8_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2136}
2137
2138
2139/**
2140 * @opcode 0x3b
2141 * @opflclass arithmetic
2142 */
2143FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
2144{
2145 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 IEMOP_BODY_BINARY_rv_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2148}
2149
2150
2151/**
2152 * @opcode 0x3c
2153 * @opflclass arithmetic
2154 */
2155FNIEMOP_DEF(iemOp_cmp_Al_Ib)
2156{
2157 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
2158 IEMOP_BODY_BINARY_AL_Ib(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2159}
2160
2161
2162/**
2163 * @opcode 0x3d
2164 * @opflclass arithmetic
2165 */
2166FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
2167{
2168 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
2169 IEMOP_BODY_BINARY_rAX_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2170}
2171
2172
2173/**
2174 * @opcode 0x3e
2175 */
2176FNIEMOP_DEF(iemOp_seg_DS)
2177{
2178 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
2179 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
2180 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
2181
2182 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2183 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2184}
2185
2186
2187/**
2188 * @opcode 0x3f
2189 * @opfltest af
2190 * @opflmodify cf,pf,af,zf,sf,of
2191 * @opflundef pf,zf,sf,of
2192 * @opgroup og_gen_arith_dec
2193 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
2194 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
2195 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
2196 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
2197 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
2198 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
2199 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
2200 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
2201 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
2202 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2203 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2204 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
2205 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
2206 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
2207 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
2208 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2209 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2210 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2211 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2212 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
2213 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
2214 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
2215 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
2216 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
2217 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
2218 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
2219 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
2220 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
2221 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
2222 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
2223 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
2224 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2225 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2226 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2227 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2228 */
2229FNIEMOP_DEF(iemOp_aas)
2230{
2231 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
2232 IEMOP_HLP_NO_64BIT();
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
2235
2236 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
2237}
2238
2239
2240/**
2241 * Common 'inc/dec register' helper.
2242 *
2243 * Not for 64-bit code, only for what became the rex prefixes.
2244 */
2245#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
2246 switch (pVCpu->iem.s.enmEffOpSize) \
2247 { \
2248 case IEMMODE_16BIT: \
2249 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0); \
2250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2251 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
2252 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2253 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
2254 IEM_MC_REF_EFLAGS(pEFlags); \
2255 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
2256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2257 IEM_MC_END(); \
2258 break; \
2259 \
2260 case IEMMODE_32BIT: \
2261 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2263 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2264 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2265 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
2266 IEM_MC_REF_EFLAGS(pEFlags); \
2267 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
2268 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
2269 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2270 IEM_MC_END(); \
2271 break; \
2272 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2273 } \
2274 (void)0
2275
2276/**
2277 * @opcode 0x40
2278 * @opflclass incdec
2279 */
2280FNIEMOP_DEF(iemOp_inc_eAX)
2281{
2282 /*
2283 * This is a REX prefix in 64-bit mode.
2284 */
2285 if (IEM_IS_64BIT_CODE(pVCpu))
2286 {
2287 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
2288 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
2289
2290 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2291 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2292 }
2293
2294 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
2295 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
2296}
2297
2298
2299/**
2300 * @opcode 0x41
2301 * @opflclass incdec
2302 */
2303FNIEMOP_DEF(iemOp_inc_eCX)
2304{
2305 /*
2306 * This is a REX prefix in 64-bit mode.
2307 */
2308 if (IEM_IS_64BIT_CODE(pVCpu))
2309 {
2310 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
2311 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2312 pVCpu->iem.s.uRexB = 1 << 3;
2313
2314 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2315 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2316 }
2317
2318 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
2319 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
2320}
2321
2322
2323/**
2324 * @opcode 0x42
2325 * @opflclass incdec
2326 */
2327FNIEMOP_DEF(iemOp_inc_eDX)
2328{
2329 /*
2330 * This is a REX prefix in 64-bit mode.
2331 */
2332 if (IEM_IS_64BIT_CODE(pVCpu))
2333 {
2334 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2335 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2336 pVCpu->iem.s.uRexIndex = 1 << 3;
2337
2338 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2339 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2340 }
2341
2342 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2343 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2344}
2345
2346
2347
2348/**
2349 * @opcode 0x43
2350 * @opflclass incdec
2351 */
2352FNIEMOP_DEF(iemOp_inc_eBX)
2353{
2354 /*
2355 * This is a REX prefix in 64-bit mode.
2356 */
2357 if (IEM_IS_64BIT_CODE(pVCpu))
2358 {
2359 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2360 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2361 pVCpu->iem.s.uRexB = 1 << 3;
2362 pVCpu->iem.s.uRexIndex = 1 << 3;
2363
2364 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2365 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2366 }
2367
2368 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2369 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2370}
2371
2372
2373/**
2374 * @opcode 0x44
2375 * @opflclass incdec
2376 */
2377FNIEMOP_DEF(iemOp_inc_eSP)
2378{
2379 /*
2380 * This is a REX prefix in 64-bit mode.
2381 */
2382 if (IEM_IS_64BIT_CODE(pVCpu))
2383 {
2384 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2385 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2386 pVCpu->iem.s.uRexReg = 1 << 3;
2387
2388 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2389 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2390 }
2391
2392 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2393 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2394}
2395
2396
2397/**
2398 * @opcode 0x45
2399 * @opflclass incdec
2400 */
2401FNIEMOP_DEF(iemOp_inc_eBP)
2402{
2403 /*
2404 * This is a REX prefix in 64-bit mode.
2405 */
2406 if (IEM_IS_64BIT_CODE(pVCpu))
2407 {
2408 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2409 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2410 pVCpu->iem.s.uRexReg = 1 << 3;
2411 pVCpu->iem.s.uRexB = 1 << 3;
2412
2413 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2414 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2415 }
2416
2417 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2418 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2419}
2420
2421
2422/**
2423 * @opcode 0x46
2424 * @opflclass incdec
2425 */
2426FNIEMOP_DEF(iemOp_inc_eSI)
2427{
2428 /*
2429 * This is a REX prefix in 64-bit mode.
2430 */
2431 if (IEM_IS_64BIT_CODE(pVCpu))
2432 {
2433 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2434 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2435 pVCpu->iem.s.uRexReg = 1 << 3;
2436 pVCpu->iem.s.uRexIndex = 1 << 3;
2437
2438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2439 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2440 }
2441
2442 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2443 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2444}
2445
2446
2447/**
2448 * @opcode 0x47
2449 * @opflclass incdec
2450 */
2451FNIEMOP_DEF(iemOp_inc_eDI)
2452{
2453 /*
2454 * This is a REX prefix in 64-bit mode.
2455 */
2456 if (IEM_IS_64BIT_CODE(pVCpu))
2457 {
2458 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2459 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2460 pVCpu->iem.s.uRexReg = 1 << 3;
2461 pVCpu->iem.s.uRexB = 1 << 3;
2462 pVCpu->iem.s.uRexIndex = 1 << 3;
2463
2464 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2465 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2466 }
2467
2468 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2469 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2470}
2471
2472
2473/**
2474 * @opcode 0x48
2475 * @opflclass incdec
2476 */
2477FNIEMOP_DEF(iemOp_dec_eAX)
2478{
2479 /*
2480 * This is a REX prefix in 64-bit mode.
2481 */
2482 if (IEM_IS_64BIT_CODE(pVCpu))
2483 {
2484 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2485 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2486 iemRecalEffOpSize(pVCpu);
2487
2488 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2489 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2490 }
2491
2492 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2493 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2494}
2495
2496
2497/**
2498 * @opcode 0x49
2499 * @opflclass incdec
2500 */
2501FNIEMOP_DEF(iemOp_dec_eCX)
2502{
2503 /*
2504 * This is a REX prefix in 64-bit mode.
2505 */
2506 if (IEM_IS_64BIT_CODE(pVCpu))
2507 {
2508 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2509 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2510 pVCpu->iem.s.uRexB = 1 << 3;
2511 iemRecalEffOpSize(pVCpu);
2512
2513 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2514 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2515 }
2516
2517 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2518 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2519}
2520
2521
2522/**
2523 * @opcode 0x4a
2524 * @opflclass incdec
2525 */
2526FNIEMOP_DEF(iemOp_dec_eDX)
2527{
2528 /*
2529 * This is a REX prefix in 64-bit mode.
2530 */
2531 if (IEM_IS_64BIT_CODE(pVCpu))
2532 {
2533 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2534 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2535 pVCpu->iem.s.uRexIndex = 1 << 3;
2536 iemRecalEffOpSize(pVCpu);
2537
2538 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2539 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2540 }
2541
2542 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2543 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2544}
2545
2546
2547/**
2548 * @opcode 0x4b
2549 * @opflclass incdec
2550 */
2551FNIEMOP_DEF(iemOp_dec_eBX)
2552{
2553 /*
2554 * This is a REX prefix in 64-bit mode.
2555 */
2556 if (IEM_IS_64BIT_CODE(pVCpu))
2557 {
2558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2559 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2560 pVCpu->iem.s.uRexB = 1 << 3;
2561 pVCpu->iem.s.uRexIndex = 1 << 3;
2562 iemRecalEffOpSize(pVCpu);
2563
2564 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2565 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2566 }
2567
2568 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2569 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2570}
2571
2572
2573/**
2574 * @opcode 0x4c
2575 * @opflclass incdec
2576 */
2577FNIEMOP_DEF(iemOp_dec_eSP)
2578{
2579 /*
2580 * This is a REX prefix in 64-bit mode.
2581 */
2582 if (IEM_IS_64BIT_CODE(pVCpu))
2583 {
2584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2585 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2586 pVCpu->iem.s.uRexReg = 1 << 3;
2587 iemRecalEffOpSize(pVCpu);
2588
2589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2590 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2591 }
2592
2593 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2594 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2595}
2596
2597
2598/**
2599 * @opcode 0x4d
2600 * @opflclass incdec
2601 */
2602FNIEMOP_DEF(iemOp_dec_eBP)
2603{
2604 /*
2605 * This is a REX prefix in 64-bit mode.
2606 */
2607 if (IEM_IS_64BIT_CODE(pVCpu))
2608 {
2609 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2610 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2611 pVCpu->iem.s.uRexReg = 1 << 3;
2612 pVCpu->iem.s.uRexB = 1 << 3;
2613 iemRecalEffOpSize(pVCpu);
2614
2615 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2616 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2617 }
2618
2619 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2620 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2621}
2622
2623
2624/**
2625 * @opcode 0x4e
2626 * @opflclass incdec
2627 */
2628FNIEMOP_DEF(iemOp_dec_eSI)
2629{
2630 /*
2631 * This is a REX prefix in 64-bit mode.
2632 */
2633 if (IEM_IS_64BIT_CODE(pVCpu))
2634 {
2635 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2637 pVCpu->iem.s.uRexReg = 1 << 3;
2638 pVCpu->iem.s.uRexIndex = 1 << 3;
2639 iemRecalEffOpSize(pVCpu);
2640
2641 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2642 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2643 }
2644
2645 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2646 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2647}
2648
2649
2650/**
2651 * @opcode 0x4f
2652 * @opflclass incdec
2653 */
2654FNIEMOP_DEF(iemOp_dec_eDI)
2655{
2656 /*
2657 * This is a REX prefix in 64-bit mode.
2658 */
2659 if (IEM_IS_64BIT_CODE(pVCpu))
2660 {
2661 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2662 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2663 pVCpu->iem.s.uRexReg = 1 << 3;
2664 pVCpu->iem.s.uRexB = 1 << 3;
2665 pVCpu->iem.s.uRexIndex = 1 << 3;
2666 iemRecalEffOpSize(pVCpu);
2667
2668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2670 }
2671
2672 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2673 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2674}
2675
2676
2677/**
2678 * Common 'push register' helper.
2679 */
2680FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2681{
2682 if (IEM_IS_64BIT_CODE(pVCpu))
2683 {
2684 iReg |= pVCpu->iem.s.uRexB;
2685 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2686 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2687 }
2688
2689 switch (pVCpu->iem.s.enmEffOpSize)
2690 {
2691 case IEMMODE_16BIT:
2692 IEM_MC_BEGIN(0, 0);
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2694 IEM_MC_LOCAL(uint16_t, u16Value);
2695 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2696 IEM_MC_PUSH_U16(u16Value);
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 break;
2700
2701 case IEMMODE_32BIT:
2702 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2704 IEM_MC_LOCAL(uint32_t, u32Value);
2705 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2706 IEM_MC_PUSH_U32(u32Value);
2707 IEM_MC_ADVANCE_RIP_AND_FINISH();
2708 IEM_MC_END();
2709 break;
2710
2711 case IEMMODE_64BIT:
2712 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2714 IEM_MC_LOCAL(uint64_t, u64Value);
2715 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2716 IEM_MC_PUSH_U64(u64Value);
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 break;
2720
2721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2722 }
2723}
2724
2725
2726/**
2727 * @opcode 0x50
2728 */
2729FNIEMOP_DEF(iemOp_push_eAX)
2730{
2731 IEMOP_MNEMONIC(push_rAX, "push rAX");
2732 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2733}
2734
2735
2736/**
2737 * @opcode 0x51
2738 */
2739FNIEMOP_DEF(iemOp_push_eCX)
2740{
2741 IEMOP_MNEMONIC(push_rCX, "push rCX");
2742 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2743}
2744
2745
2746/**
2747 * @opcode 0x52
2748 */
2749FNIEMOP_DEF(iemOp_push_eDX)
2750{
2751 IEMOP_MNEMONIC(push_rDX, "push rDX");
2752 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2753}
2754
2755
2756/**
2757 * @opcode 0x53
2758 */
2759FNIEMOP_DEF(iemOp_push_eBX)
2760{
2761 IEMOP_MNEMONIC(push_rBX, "push rBX");
2762 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2763}
2764
2765
2766/**
2767 * @opcode 0x54
2768 */
2769FNIEMOP_DEF(iemOp_push_eSP)
2770{
2771 IEMOP_MNEMONIC(push_rSP, "push rSP");
2772 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2773 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2774
2775 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2776 IEM_MC_BEGIN(IEM_MC_F_ONLY_8086, 0);
2777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2778 IEM_MC_LOCAL(uint16_t, u16Value);
2779 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2780 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2781 IEM_MC_PUSH_U16(u16Value);
2782 IEM_MC_ADVANCE_RIP_AND_FINISH();
2783 IEM_MC_END();
2784}
2785
2786
2787/**
2788 * @opcode 0x55
2789 */
2790FNIEMOP_DEF(iemOp_push_eBP)
2791{
2792 IEMOP_MNEMONIC(push_rBP, "push rBP");
2793 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2794}
2795
2796
2797/**
2798 * @opcode 0x56
2799 */
2800FNIEMOP_DEF(iemOp_push_eSI)
2801{
2802 IEMOP_MNEMONIC(push_rSI, "push rSI");
2803 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2804}
2805
2806
2807/**
2808 * @opcode 0x57
2809 */
2810FNIEMOP_DEF(iemOp_push_eDI)
2811{
2812 IEMOP_MNEMONIC(push_rDI, "push rDI");
2813 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2814}
2815
2816
2817/**
2818 * Common 'pop register' helper.
2819 */
2820FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2821{
2822 if (IEM_IS_64BIT_CODE(pVCpu))
2823 {
2824 iReg |= pVCpu->iem.s.uRexB;
2825 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2826 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2827 }
2828
2829 switch (pVCpu->iem.s.enmEffOpSize)
2830 {
2831 case IEMMODE_16BIT:
2832 IEM_MC_BEGIN(0, 0);
2833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2834 IEM_MC_POP_GREG_U16(iReg);
2835 IEM_MC_ADVANCE_RIP_AND_FINISH();
2836 IEM_MC_END();
2837 break;
2838
2839 case IEMMODE_32BIT:
2840 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_POP_GREG_U32(iReg);
2843 IEM_MC_ADVANCE_RIP_AND_FINISH();
2844 IEM_MC_END();
2845 break;
2846
2847 case IEMMODE_64BIT:
2848 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2850 IEM_MC_POP_GREG_U64(iReg);
2851 IEM_MC_ADVANCE_RIP_AND_FINISH();
2852 IEM_MC_END();
2853 break;
2854
2855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2856 }
2857}
2858
2859
2860/**
2861 * @opcode 0x58
2862 */
2863FNIEMOP_DEF(iemOp_pop_eAX)
2864{
2865 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2866 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2867}
2868
2869
2870/**
2871 * @opcode 0x59
2872 */
2873FNIEMOP_DEF(iemOp_pop_eCX)
2874{
2875 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2876 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2877}
2878
2879
2880/**
2881 * @opcode 0x5a
2882 */
2883FNIEMOP_DEF(iemOp_pop_eDX)
2884{
2885 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2886 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2887}
2888
2889
2890/**
2891 * @opcode 0x5b
2892 */
2893FNIEMOP_DEF(iemOp_pop_eBX)
2894{
2895 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2896 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2897}
2898
2899
2900/**
2901 * @opcode 0x5c
2902 */
2903FNIEMOP_DEF(iemOp_pop_eSP)
2904{
2905 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2906 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2907}
2908
2909
2910/**
2911 * @opcode 0x5d
2912 */
2913FNIEMOP_DEF(iemOp_pop_eBP)
2914{
2915 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2916 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2917}
2918
2919
2920/**
2921 * @opcode 0x5e
2922 */
2923FNIEMOP_DEF(iemOp_pop_eSI)
2924{
2925 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2926 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2927}
2928
2929
2930/**
2931 * @opcode 0x5f
2932 */
2933FNIEMOP_DEF(iemOp_pop_eDI)
2934{
2935 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2936 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2937}
2938
2939
2940/**
2941 * @opcode 0x60
2942 */
2943FNIEMOP_DEF(iemOp_pusha)
2944{
2945 IEMOP_MNEMONIC(pusha, "pusha");
2946 IEMOP_HLP_MIN_186();
2947 IEMOP_HLP_NO_64BIT();
2948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2949 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2950 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2951 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2952}
2953
2954
2955/**
2956 * @opcode 0x61
2957 */
2958FNIEMOP_DEF(iemOp_popa__mvex)
2959{
2960 if (!IEM_IS_64BIT_CODE(pVCpu))
2961 {
2962 IEMOP_MNEMONIC(popa, "popa");
2963 IEMOP_HLP_MIN_186();
2964 IEMOP_HLP_NO_64BIT();
2965 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2966 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2967 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2968 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2975 iemCImpl_popa_16);
2976 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2977 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2978 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2982 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2986 iemCImpl_popa_32);
2987 }
2988 IEMOP_MNEMONIC(mvex, "mvex");
2989 Log(("mvex prefix is not supported!\n"));
2990 IEMOP_RAISE_INVALID_OPCODE_RET();
2991}
2992
2993
2994/**
2995 * @opcode 0x62
2996 * @opmnemonic bound
2997 * @op1 Gv_RO
2998 * @op2 Ma
2999 * @opmincpu 80186
3000 * @ophints harmless x86_invalid_64
3001 * @optest op1=0 op2=0 ->
3002 * @optest op1=1 op2=0 -> value.xcpt=5
3003 * @optest o16 / op1=0xffff op2=0x0000fffe ->
3004 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
3005 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
3006 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
3007 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
3008 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
3009 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
3010 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
3011 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
3012 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
3013 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
3014 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
3015 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
3016 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
3017 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
3018 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
3019 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
3020 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
3021 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
3022 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
3023 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
3024 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
3025 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
3026 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
3027 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
3028 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
3029 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
3030 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
3031 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
3032 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
3033 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
3034 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
3035 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
3036 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
3037 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
3038 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
3039 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
3040 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
3041 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
3042 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
3043 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
3044 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
3045 */
3046FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
3047{
3048 /* The BOUND instruction is invalid 64-bit mode. In legacy and
3049 compatability mode it is invalid with MOD=3.
3050
3051 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
3052 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
3053 given as R and X without an exact description, so we assume it builds on
3054 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
3055 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
3056 uint8_t bRm;
3057 if (!IEM_IS_64BIT_CODE(pVCpu))
3058 {
3059 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3060 IEMOP_HLP_MIN_186();
3061 IEM_OPCODE_GET_NEXT_U8(&bRm);
3062 if (IEM_IS_MODRM_MEM_MODE(bRm))
3063 {
3064 /** @todo testcase: check that there are two memory accesses involved. Check
3065 * whether they're both read before the \#BR triggers. */
3066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3067 {
3068 IEM_MC_BEGIN(IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
3069 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
3070 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
3071 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
3072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3073
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076
3077 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
3078 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3079 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
3080
3081 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
3082 IEM_MC_END();
3083 }
3084 else /* 32-bit operands */
3085 {
3086 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3087 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
3088 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
3089 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094
3095 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
3096 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
3098
3099 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
3100 IEM_MC_END();
3101 }
3102 }
3103
3104 /*
3105 * @opdone
3106 */
3107 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
3108 {
3109 /* Note that there is no need for the CPU to fetch further bytes
3110 here because MODRM.MOD == 3. */
3111 Log(("evex not supported by the guest CPU!\n"));
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113 }
3114 }
3115 else
3116 {
3117 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
3118 * does modr/m read, whereas AMD probably doesn't... */
3119 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
3120 {
3121 Log(("evex not supported by the guest CPU!\n"));
3122 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
3123 }
3124 IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 }
3126
3127 IEMOP_MNEMONIC(evex, "evex");
3128 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
3129 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
3130 Log(("evex prefix is not implemented!\n"));
3131 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3132}
3133
3134
3135/**
3136 * @opcode 0x63
3137 * @opflmodify zf
3138 * @note non-64-bit modes.
3139 */
3140FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
3141{
3142 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
3143 IEMOP_HLP_MIN_286();
3144 IEMOP_HLP_NO_REAL_OR_V86_MODE();
3145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3146
3147 if (IEM_IS_MODRM_REG_MODE(bRm))
3148 {
3149 /* Register */
3150 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
3151 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3152 IEM_MC_ARG(uint16_t, u16Src, 2);
3153 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3154 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
3155 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
3156 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3157 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_arpl, fEFlagsIn, pu16Dst, u16Src);
3158 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 else
3164 {
3165 /* Memory */
3166 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
3167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3169 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3170
3171 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
3172 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
3173 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3174 IEM_MC_ARG(uint16_t, u16Src, 2);
3175 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3176 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3177 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_arpl, fEFlagsIn, pu16Dst, u16Src);
3178
3179 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
3180 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3181 IEM_MC_ADVANCE_RIP_AND_FINISH();
3182 IEM_MC_END();
3183 }
3184}
3185
3186
3187/**
3188 * @opcode 0x63
3189 *
3190 * @note This is a weird one. It works like a regular move instruction if
3191 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
3192 * @todo This definitely needs a testcase to verify the odd cases. */
3193FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
3194{
3195 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
3196
3197 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
3198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3199
3200 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3201 {
3202 if (IEM_IS_MODRM_REG_MODE(bRm))
3203 {
3204 /*
3205 * Register to register.
3206 */
3207 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 IEM_MC_LOCAL(uint64_t, u64Value);
3210 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3211 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3212 IEM_MC_ADVANCE_RIP_AND_FINISH();
3213 IEM_MC_END();
3214 }
3215 else
3216 {
3217 /*
3218 * We're loading a register from memory.
3219 */
3220 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3221 IEM_MC_LOCAL(uint64_t, u64Value);
3222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3225 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3226 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3227 IEM_MC_ADVANCE_RIP_AND_FINISH();
3228 IEM_MC_END();
3229 }
3230 }
3231 else
3232 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
3233}
3234
3235
3236/**
3237 * @opcode 0x64
3238 * @opmnemonic segfs
3239 * @opmincpu 80386
3240 * @opgroup og_prefixes
3241 */
3242FNIEMOP_DEF(iemOp_seg_FS)
3243{
3244 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
3245 IEMOP_HLP_MIN_386();
3246
3247 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
3248 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
3249
3250 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3251 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3252}
3253
3254
3255/**
3256 * @opcode 0x65
3257 * @opmnemonic seggs
3258 * @opmincpu 80386
3259 * @opgroup og_prefixes
3260 */
3261FNIEMOP_DEF(iemOp_seg_GS)
3262{
3263 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
3264 IEMOP_HLP_MIN_386();
3265
3266 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
3267 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
3268
3269 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3270 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3271}
3272
3273
3274/**
3275 * @opcode 0x66
3276 * @opmnemonic opsize
3277 * @openc prefix
3278 * @opmincpu 80386
3279 * @ophints harmless
3280 * @opgroup og_prefixes
3281 */
3282FNIEMOP_DEF(iemOp_op_size)
3283{
3284 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
3285 IEMOP_HLP_MIN_386();
3286
3287 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
3288 iemRecalEffOpSize(pVCpu);
3289
3290 /* For the 4 entry opcode tables, the operand prefix doesn't not count
3291 when REPZ or REPNZ are present. */
3292 if (pVCpu->iem.s.idxPrefix == 0)
3293 pVCpu->iem.s.idxPrefix = 1;
3294
3295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3297}
3298
3299
3300/**
3301 * @opcode 0x67
3302 * @opmnemonic addrsize
3303 * @openc prefix
3304 * @opmincpu 80386
3305 * @ophints harmless
3306 * @opgroup og_prefixes
3307 */
3308FNIEMOP_DEF(iemOp_addr_size)
3309{
3310 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
3311 IEMOP_HLP_MIN_386();
3312
3313 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3314 switch (pVCpu->iem.s.enmDefAddrMode)
3315 {
3316 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3317 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
3318 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3319 default: AssertFailed();
3320 }
3321
3322 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3323 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3324}
3325
3326
3327/**
3328 * @opcode 0x68
3329 */
3330FNIEMOP_DEF(iemOp_push_Iz)
3331{
3332 IEMOP_MNEMONIC(push_Iz, "push Iz");
3333 IEMOP_HLP_MIN_186();
3334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3339 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3342 IEM_MC_PUSH_U16(u16Value);
3343 IEM_MC_ADVANCE_RIP_AND_FINISH();
3344 IEM_MC_END();
3345 break;
3346
3347 case IEMMODE_32BIT:
3348 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3349 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3352 IEM_MC_PUSH_U32(u32Value);
3353 IEM_MC_ADVANCE_RIP_AND_FINISH();
3354 IEM_MC_END();
3355 break;
3356
3357 case IEMMODE_64BIT:
3358 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3359 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3362 IEM_MC_PUSH_U64(u64Value);
3363 IEM_MC_ADVANCE_RIP_AND_FINISH();
3364 IEM_MC_END();
3365 break;
3366
3367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3368 }
3369}
3370
3371
3372/**
3373 * @opcode 0x69
3374 * @opflclass multiply
3375 */
3376FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3377{
3378 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3379 IEMOP_HLP_MIN_186();
3380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3382
3383 switch (pVCpu->iem.s.enmEffOpSize)
3384 {
3385 case IEMMODE_16BIT:
3386 {
3387 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3388 if (IEM_IS_MODRM_REG_MODE(bRm))
3389 {
3390 /* register operand */
3391 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3392 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3394 IEM_MC_LOCAL(uint16_t, u16Tmp);
3395 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3396
3397 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3398 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3399 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2);
3400 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3401 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3402 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3403
3404 IEM_MC_ADVANCE_RIP_AND_FINISH();
3405 IEM_MC_END();
3406 }
3407 else
3408 {
3409 /* memory operand */
3410 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3413
3414 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3416
3417 IEM_MC_LOCAL(uint16_t, u16Tmp);
3418 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3419
3420 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3421 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3422 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2);
3423 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3424 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3425 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3426
3427 IEM_MC_ADVANCE_RIP_AND_FINISH();
3428 IEM_MC_END();
3429 }
3430 break;
3431 }
3432
3433 case IEMMODE_32BIT:
3434 {
3435 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3436 if (IEM_IS_MODRM_REG_MODE(bRm))
3437 {
3438 /* register operand */
3439 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3440 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 IEM_MC_LOCAL(uint32_t, u32Tmp);
3443 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3444
3445 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3446 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3447 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2);
3448 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3449 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3450 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3451
3452 IEM_MC_ADVANCE_RIP_AND_FINISH();
3453 IEM_MC_END();
3454 }
3455 else
3456 {
3457 /* memory operand */
3458 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3461
3462 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464
3465 IEM_MC_LOCAL(uint32_t, u32Tmp);
3466 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3467
3468 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3469 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3470 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2);
3471 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3472 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3473 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3474
3475 IEM_MC_ADVANCE_RIP_AND_FINISH();
3476 IEM_MC_END();
3477 }
3478 break;
3479 }
3480
3481 case IEMMODE_64BIT:
3482 {
3483 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3484 if (IEM_IS_MODRM_REG_MODE(bRm))
3485 {
3486 /* register operand */
3487 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3488 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3492
3493 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3494 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3495 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2);
3496 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3497 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3498 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3499
3500 IEM_MC_ADVANCE_RIP_AND_FINISH();
3501 IEM_MC_END();
3502 }
3503 else
3504 {
3505 /* memory operand */
3506 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3509
3510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3512
3513 IEM_MC_LOCAL(uint64_t, u64Tmp);
3514 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3515
3516 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3517 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3518 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 2);
3519 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3520 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3521 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3522
3523 IEM_MC_ADVANCE_RIP_AND_FINISH();
3524 IEM_MC_END();
3525 }
3526 break;
3527 }
3528
3529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3530 }
3531}
3532
3533
3534/**
3535 * @opcode 0x6a
3536 */
3537FNIEMOP_DEF(iemOp_push_Ib)
3538{
3539 IEMOP_MNEMONIC(push_Ib, "push Ib");
3540 IEMOP_HLP_MIN_186();
3541 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3543
3544 switch (pVCpu->iem.s.enmEffOpSize)
3545 {
3546 case IEMMODE_16BIT:
3547 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3550 IEM_MC_PUSH_U16(uValue);
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 break;
3554 case IEMMODE_32BIT:
3555 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3558 IEM_MC_PUSH_U32(uValue);
3559 IEM_MC_ADVANCE_RIP_AND_FINISH();
3560 IEM_MC_END();
3561 break;
3562 case IEMMODE_64BIT:
3563 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3566 IEM_MC_PUSH_U64(uValue);
3567 IEM_MC_ADVANCE_RIP_AND_FINISH();
3568 IEM_MC_END();
3569 break;
3570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3571 }
3572}
3573
3574
3575/**
3576 * @opcode 0x6b
3577 * @opflclass multiply
3578 */
3579FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3580{
3581 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3582 IEMOP_HLP_MIN_186();
3583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3585
3586 switch (pVCpu->iem.s.enmEffOpSize)
3587 {
3588 case IEMMODE_16BIT:
3589 {
3590 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3591 if (IEM_IS_MODRM_REG_MODE(bRm))
3592 {
3593 /* register operand */
3594 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3595 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3597
3598 IEM_MC_LOCAL(uint16_t, u16Tmp);
3599 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3600
3601 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3602 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3603 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 2);
3604 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3605 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3606 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3607
3608 IEM_MC_ADVANCE_RIP_AND_FINISH();
3609 IEM_MC_END();
3610 }
3611 else
3612 {
3613 /* memory operand */
3614 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3615
3616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3618
3619 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 IEM_MC_LOCAL(uint16_t, u16Tmp);
3623 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3624
3625 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3626 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3627 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2);
3628 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3629 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3630 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635 break;
3636 }
3637
3638 case IEMMODE_32BIT:
3639 {
3640 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3641 if (IEM_IS_MODRM_REG_MODE(bRm))
3642 {
3643 /* register operand */
3644 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3645 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3647 IEM_MC_LOCAL(uint32_t, u32Tmp);
3648 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3649
3650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3651 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3652 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 2);
3653 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3654 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3655 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3656
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 /* memory operand */
3663 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3666
3667 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 IEM_MC_LOCAL(uint32_t, u32Tmp);
3671 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3672
3673 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3674 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3675 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2);
3676 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3677 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3678 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3679
3680 IEM_MC_ADVANCE_RIP_AND_FINISH();
3681 IEM_MC_END();
3682 }
3683 break;
3684 }
3685
3686 case IEMMODE_64BIT:
3687 {
3688 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3689 if (IEM_IS_MODRM_REG_MODE(bRm))
3690 {
3691 /* register operand */
3692 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3693 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_LOCAL(uint64_t, u64Tmp);
3696 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3697
3698 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3699 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3700 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 2);
3701 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3702 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3703 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3704
3705 IEM_MC_ADVANCE_RIP_AND_FINISH();
3706 IEM_MC_END();
3707 }
3708 else
3709 {
3710 /* memory operand */
3711 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3714
3715 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_LOCAL(uint64_t, u64Tmp);
3719 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3720
3721 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3722 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3723 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 2);
3724 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3725 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3726 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3727
3728 IEM_MC_ADVANCE_RIP_AND_FINISH();
3729 IEM_MC_END();
3730 }
3731 break;
3732 }
3733
3734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3735 }
3736}
3737
3738
3739/**
3740 * @opcode 0x6c
3741 * @opfltest iopl,df
3742 */
3743FNIEMOP_DEF(iemOp_insb_Yb_DX)
3744{
3745 IEMOP_HLP_MIN_186();
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3747 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3748 {
3749 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3750 switch (pVCpu->iem.s.enmEffAddrMode)
3751 {
3752 case IEMMODE_16BIT:
3753 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3756 iemCImpl_rep_ins_op8_addr16, false);
3757 case IEMMODE_32BIT:
3758 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3759 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3760 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3761 iemCImpl_rep_ins_op8_addr32, false);
3762 case IEMMODE_64BIT:
3763 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3764 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3766 iemCImpl_rep_ins_op8_addr64, false);
3767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3768 }
3769 }
3770 else
3771 {
3772 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3773 switch (pVCpu->iem.s.enmEffAddrMode)
3774 {
3775 case IEMMODE_16BIT:
3776 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3778 iemCImpl_ins_op8_addr16, false);
3779 case IEMMODE_32BIT:
3780 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3781 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3782 iemCImpl_ins_op8_addr32, false);
3783 case IEMMODE_64BIT:
3784 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3785 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3786 iemCImpl_ins_op8_addr64, false);
3787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3788 }
3789 }
3790}
3791
3792
3793/**
3794 * @opcode 0x6d
3795 * @opfltest iopl,df
3796 */
3797FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3798{
3799 IEMOP_HLP_MIN_186();
3800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3801 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3802 {
3803 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3804 switch (pVCpu->iem.s.enmEffOpSize)
3805 {
3806 case IEMMODE_16BIT:
3807 switch (pVCpu->iem.s.enmEffAddrMode)
3808 {
3809 case IEMMODE_16BIT:
3810 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3811 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3813 iemCImpl_rep_ins_op16_addr16, false);
3814 case IEMMODE_32BIT:
3815 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3816 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3818 iemCImpl_rep_ins_op16_addr32, false);
3819 case IEMMODE_64BIT:
3820 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3823 iemCImpl_rep_ins_op16_addr64, false);
3824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3825 }
3826 break;
3827 case IEMMODE_64BIT:
3828 case IEMMODE_32BIT:
3829 switch (pVCpu->iem.s.enmEffAddrMode)
3830 {
3831 case IEMMODE_16BIT:
3832 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3835 iemCImpl_rep_ins_op32_addr16, false);
3836 case IEMMODE_32BIT:
3837 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3840 iemCImpl_rep_ins_op32_addr32, false);
3841 case IEMMODE_64BIT:
3842 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3845 iemCImpl_rep_ins_op32_addr64, false);
3846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3847 }
3848 break;
3849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3850 }
3851 }
3852 else
3853 {
3854 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3855 switch (pVCpu->iem.s.enmEffOpSize)
3856 {
3857 case IEMMODE_16BIT:
3858 switch (pVCpu->iem.s.enmEffAddrMode)
3859 {
3860 case IEMMODE_16BIT:
3861 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3863 iemCImpl_ins_op16_addr16, false);
3864 case IEMMODE_32BIT:
3865 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3866 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3867 iemCImpl_ins_op16_addr32, false);
3868 case IEMMODE_64BIT:
3869 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3870 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3871 iemCImpl_ins_op16_addr64, false);
3872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3873 }
3874 break;
3875 case IEMMODE_64BIT:
3876 case IEMMODE_32BIT:
3877 switch (pVCpu->iem.s.enmEffAddrMode)
3878 {
3879 case IEMMODE_16BIT:
3880 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3881 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3882 iemCImpl_ins_op32_addr16, false);
3883 case IEMMODE_32BIT:
3884 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3885 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3886 iemCImpl_ins_op32_addr32, false);
3887 case IEMMODE_64BIT:
3888 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3889 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3890 iemCImpl_ins_op32_addr64, false);
3891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3892 }
3893 break;
3894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3895 }
3896 }
3897}
3898
3899
3900/**
3901 * @opcode 0x6e
3902 * @opfltest iopl,df
3903 */
3904FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3905{
3906 IEMOP_HLP_MIN_186();
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3909 {
3910 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3911 switch (pVCpu->iem.s.enmEffAddrMode)
3912 {
3913 case IEMMODE_16BIT:
3914 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3915 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3916 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3917 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3918 case IEMMODE_32BIT:
3919 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3920 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3921 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3922 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3923 case IEMMODE_64BIT:
3924 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3925 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3926 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3927 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3929 }
3930 }
3931 else
3932 {
3933 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3934 switch (pVCpu->iem.s.enmEffAddrMode)
3935 {
3936 case IEMMODE_16BIT:
3937 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3938 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3939 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3940 case IEMMODE_32BIT:
3941 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3942 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3943 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3944 case IEMMODE_64BIT:
3945 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3946 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3947 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3949 }
3950 }
3951}
3952
3953
3954/**
3955 * @opcode 0x6f
3956 * @opfltest iopl,df
3957 */
3958FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3959{
3960 IEMOP_HLP_MIN_186();
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3962 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3963 {
3964 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3965 switch (pVCpu->iem.s.enmEffOpSize)
3966 {
3967 case IEMMODE_16BIT:
3968 switch (pVCpu->iem.s.enmEffAddrMode)
3969 {
3970 case IEMMODE_16BIT:
3971 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3972 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3974 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3975 case IEMMODE_32BIT:
3976 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3977 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3979 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3980 case IEMMODE_64BIT:
3981 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3982 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3984 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3986 }
3987 break;
3988 case IEMMODE_64BIT:
3989 case IEMMODE_32BIT:
3990 switch (pVCpu->iem.s.enmEffAddrMode)
3991 {
3992 case IEMMODE_16BIT:
3993 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3994 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3995 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3996 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3997 case IEMMODE_32BIT:
3998 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3999 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
4000 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
4001 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
4002 case IEMMODE_64BIT:
4003 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4004 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
4005 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
4006 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
4007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4008 }
4009 break;
4010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4011 }
4012 }
4013 else
4014 {
4015 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
4016 switch (pVCpu->iem.s.enmEffOpSize)
4017 {
4018 case IEMMODE_16BIT:
4019 switch (pVCpu->iem.s.enmEffAddrMode)
4020 {
4021 case IEMMODE_16BIT:
4022 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4023 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4024 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
4025 case IEMMODE_32BIT:
4026 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4027 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4028 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
4029 case IEMMODE_64BIT:
4030 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4031 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4032 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
4033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4034 }
4035 break;
4036 case IEMMODE_64BIT:
4037 case IEMMODE_32BIT:
4038 switch (pVCpu->iem.s.enmEffAddrMode)
4039 {
4040 case IEMMODE_16BIT:
4041 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4042 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4043 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
4044 case IEMMODE_32BIT:
4045 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4047 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
4048 case IEMMODE_64BIT:
4049 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4050 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4051 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
4052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4053 }
4054 break;
4055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4056 }
4057 }
4058}
4059
4060
4061/**
4062 * @opcode 0x70
4063 * @opfltest of
4064 */
4065FNIEMOP_DEF(iemOp_jo_Jb)
4066{
4067 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
4068 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4069 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4070
4071 IEM_MC_BEGIN(0, 0);
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4074 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4075 } IEM_MC_ELSE() {
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 } IEM_MC_ENDIF();
4078 IEM_MC_END();
4079}
4080
4081
4082/**
4083 * @opcode 0x71
4084 * @opfltest of
4085 */
4086FNIEMOP_DEF(iemOp_jno_Jb)
4087{
4088 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
4089 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4090 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4091
4092 IEM_MC_BEGIN(0, 0);
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4095 IEM_MC_ADVANCE_RIP_AND_FINISH();
4096 } IEM_MC_ELSE() {
4097 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4098 } IEM_MC_ENDIF();
4099 IEM_MC_END();
4100}
4101
4102/**
4103 * @opcode 0x72
4104 * @opfltest cf
4105 */
4106FNIEMOP_DEF(iemOp_jc_Jb)
4107{
4108 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
4109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4111
4112 IEM_MC_BEGIN(0, 0);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4115 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120}
4121
4122
4123/**
4124 * @opcode 0x73
4125 * @opfltest cf
4126 */
4127FNIEMOP_DEF(iemOp_jnc_Jb)
4128{
4129 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
4130 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4132
4133 IEM_MC_BEGIN(0, 0);
4134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 } IEM_MC_ELSE() {
4138 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4139 } IEM_MC_ENDIF();
4140 IEM_MC_END();
4141}
4142
4143
4144/**
4145 * @opcode 0x74
4146 * @opfltest zf
4147 */
4148FNIEMOP_DEF(iemOp_je_Jb)
4149{
4150 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
4151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4157 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4158 } IEM_MC_ELSE() {
4159 IEM_MC_ADVANCE_RIP_AND_FINISH();
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162}
4163
4164
4165/**
4166 * @opcode 0x75
4167 * @opfltest zf
4168 */
4169FNIEMOP_DEF(iemOp_jne_Jb)
4170{
4171 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
4172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4174
4175 IEM_MC_BEGIN(0, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4178 IEM_MC_ADVANCE_RIP_AND_FINISH();
4179 } IEM_MC_ELSE() {
4180 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4181 } IEM_MC_ENDIF();
4182 IEM_MC_END();
4183}
4184
4185
4186/**
4187 * @opcode 0x76
4188 * @opfltest cf,zf
4189 */
4190FNIEMOP_DEF(iemOp_jbe_Jb)
4191{
4192 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
4193 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4195
4196 IEM_MC_BEGIN(0, 0);
4197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4198 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4199 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4200 } IEM_MC_ELSE() {
4201 IEM_MC_ADVANCE_RIP_AND_FINISH();
4202 } IEM_MC_ENDIF();
4203 IEM_MC_END();
4204}
4205
4206
4207/**
4208 * @opcode 0x77
4209 * @opfltest cf,zf
4210 */
4211FNIEMOP_DEF(iemOp_jnbe_Jb)
4212{
4213 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
4214 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4216
4217 IEM_MC_BEGIN(0, 0);
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4220 IEM_MC_ADVANCE_RIP_AND_FINISH();
4221 } IEM_MC_ELSE() {
4222 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_END();
4225}
4226
4227
4228/**
4229 * @opcode 0x78
4230 * @opfltest sf
4231 */
4232FNIEMOP_DEF(iemOp_js_Jb)
4233{
4234 IEMOP_MNEMONIC(js_Jb, "js Jb");
4235 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4236 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4237
4238 IEM_MC_BEGIN(0, 0);
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4241 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4242 } IEM_MC_ELSE() {
4243 IEM_MC_ADVANCE_RIP_AND_FINISH();
4244 } IEM_MC_ENDIF();
4245 IEM_MC_END();
4246}
4247
4248
4249/**
4250 * @opcode 0x79
4251 * @opfltest sf
4252 */
4253FNIEMOP_DEF(iemOp_jns_Jb)
4254{
4255 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
4256 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4258
4259 IEM_MC_BEGIN(0, 0);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4262 IEM_MC_ADVANCE_RIP_AND_FINISH();
4263 } IEM_MC_ELSE() {
4264 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4265 } IEM_MC_ENDIF();
4266 IEM_MC_END();
4267}
4268
4269
4270/**
4271 * @opcode 0x7a
4272 * @opfltest pf
4273 */
4274FNIEMOP_DEF(iemOp_jp_Jb)
4275{
4276 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
4277 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4278 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4279
4280 IEM_MC_BEGIN(0, 0);
4281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4283 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4284 } IEM_MC_ELSE() {
4285 IEM_MC_ADVANCE_RIP_AND_FINISH();
4286 } IEM_MC_ENDIF();
4287 IEM_MC_END();
4288}
4289
4290
4291/**
4292 * @opcode 0x7b
4293 * @opfltest pf
4294 */
4295FNIEMOP_DEF(iemOp_jnp_Jb)
4296{
4297 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
4298 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4304 IEM_MC_ADVANCE_RIP_AND_FINISH();
4305 } IEM_MC_ELSE() {
4306 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4307 } IEM_MC_ENDIF();
4308 IEM_MC_END();
4309}
4310
4311
4312/**
4313 * @opcode 0x7c
4314 * @opfltest sf,of
4315 */
4316FNIEMOP_DEF(iemOp_jl_Jb)
4317{
4318 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
4319 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4321
4322 IEM_MC_BEGIN(0, 0);
4323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4324 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4326 } IEM_MC_ELSE() {
4327 IEM_MC_ADVANCE_RIP_AND_FINISH();
4328 } IEM_MC_ENDIF();
4329 IEM_MC_END();
4330}
4331
4332
4333/**
4334 * @opcode 0x7d
4335 * @opfltest sf,of
4336 */
4337FNIEMOP_DEF(iemOp_jnl_Jb)
4338{
4339 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4340 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4342
4343 IEM_MC_BEGIN(0, 0);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4346 IEM_MC_ADVANCE_RIP_AND_FINISH();
4347 } IEM_MC_ELSE() {
4348 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4349 } IEM_MC_ENDIF();
4350 IEM_MC_END();
4351}
4352
4353
4354/**
4355 * @opcode 0x7e
4356 * @opfltest zf,sf,of
4357 */
4358FNIEMOP_DEF(iemOp_jle_Jb)
4359{
4360 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4361 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4362 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4363
4364 IEM_MC_BEGIN(0, 0);
4365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4366 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4367 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4368 } IEM_MC_ELSE() {
4369 IEM_MC_ADVANCE_RIP_AND_FINISH();
4370 } IEM_MC_ENDIF();
4371 IEM_MC_END();
4372}
4373
4374
4375/**
4376 * @opcode 0x7f
4377 * @opfltest zf,sf,of
4378 */
4379FNIEMOP_DEF(iemOp_jnle_Jb)
4380{
4381 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4382 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4384
4385 IEM_MC_BEGIN(0, 0);
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4387 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4388 IEM_MC_ADVANCE_RIP_AND_FINISH();
4389 } IEM_MC_ELSE() {
4390 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4391 } IEM_MC_ENDIF();
4392 IEM_MC_END();
4393}
4394
4395
4396/**
4397 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4398 * iemOp_Grp1_Eb_Ib_80.
4399 */
4400#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4401 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4402 { \
4403 /* register target */ \
4404 IEM_MC_BEGIN(0, 0); \
4405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4407 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4408 IEM_MC_LOCAL(uint8_t, u8Dst); \
4409 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4410 IEM_MC_LOCAL_EFLAGS( uEFlags); \
4411 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4412 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
4413 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4414 } IEM_MC_NATIVE_ELSE() { \
4415 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4416 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4417 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4418 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4419 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4420 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4421 } IEM_MC_NATIVE_ENDIF(); \
4422 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4423 IEM_MC_END(); \
4424 } \
4425 else \
4426 { \
4427 /* memory target */ \
4428 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4429 { \
4430 IEM_MC_BEGIN(0, 0); \
4431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4434 IEMOP_HLP_DONE_DECODING(); \
4435 \
4436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4437 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4438 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4439 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4440 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4441 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 } \
4448 else \
4449 { \
4450 IEM_MC_BEGIN(0, 0); \
4451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4453 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4454 IEMOP_HLP_DONE_DECODING(); \
4455 \
4456 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4457 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4458 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4459 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4460 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4461 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), fEFlagsIn, pu8Dst, u8Src); \
4462 \
4463 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4464 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4465 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4466 IEM_MC_END(); \
4467 } \
4468 } \
4469 (void)0
4470
4471#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_InsNm, a_fNativeArchs) \
4472 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4473 { \
4474 /* register target */ \
4475 IEM_MC_BEGIN(0, 0); \
4476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4478 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4479 IEM_MC_LOCAL(uint8_t, u8Dst); \
4480 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4481 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4482 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4483 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4484 } IEM_MC_NATIVE_ELSE() { \
4485 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
4486 IEM_MC_REF_GREG_U8_CONST(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4487 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4488 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4489 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4490 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4491 } IEM_MC_NATIVE_ENDIF(); \
4492 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4493 IEM_MC_END(); \
4494 } \
4495 else \
4496 { \
4497 /* memory target */ \
4498 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4499 { \
4500 IEM_MC_BEGIN(0, 0); \
4501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4503 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4504 IEMOP_HLP_DONE_DECODING(); \
4505 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4506 IEM_MC_LOCAL(uint8_t, u8Dst); \
4507 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4508 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4509 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4510 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4511 } IEM_MC_NATIVE_ELSE() { \
4512 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4513 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
4514 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4515 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4516 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4517 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4518 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4519 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4520 } IEM_MC_NATIVE_ENDIF(); \
4521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4522 IEM_MC_END(); \
4523 } \
4524 else \
4525 { \
4526 IEMOP_HLP_DONE_DECODING(); \
4527 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4528 } \
4529 } \
4530 (void)0
4531
4532
4533
4534/**
4535 * @opmaps grp1_80,grp1_83
4536 * @opcode /0
4537 * @opflclass arithmetic
4538 */
4539FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4540{
4541 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4542 IEMOP_BODY_BINARY_Eb_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_80,grp1_83
4548 * @opcode /1
4549 * @opflclass logical
4550 */
4551FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4552{
4553 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4554 IEMOP_BODY_BINARY_Eb_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_80,grp1_83
4560 * @opcode /2
4561 * @opflclass arithmetic_carry
4562 */
4563FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4564{
4565 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4566 IEMOP_BODY_BINARY_Eb_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_80,grp1_83
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4578 IEMOP_BODY_BINARY_Eb_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4579}
4580
4581
4582/**
4583 * @opmaps grp1_80,grp1_83
4584 * @opcode /4
4585 * @opflclass logical
4586 */
4587FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4588{
4589 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4590 IEMOP_BODY_BINARY_Eb_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4591}
4592
4593
4594/**
4595 * @opmaps grp1_80,grp1_83
4596 * @opcode /5
4597 * @opflclass arithmetic
4598 */
4599FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4600{
4601 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4602 IEMOP_BODY_BINARY_Eb_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4603}
4604
4605
4606/**
4607 * @opmaps grp1_80,grp1_83
4608 * @opcode /6
4609 * @opflclass logical
4610 */
4611FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4612{
4613 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4614 IEMOP_BODY_BINARY_Eb_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4615}
4616
4617
4618/**
4619 * @opmaps grp1_80,grp1_83
4620 * @opcode /7
4621 * @opflclass arithmetic
4622 */
4623FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4624{
4625 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4626 IEMOP_BODY_BINARY_Eb_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4627}
4628
4629
4630/**
4631 * @opcode 0x80
4632 */
4633FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4634{
4635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4636 switch (IEM_GET_MODRM_REG_8(bRm))
4637 {
4638 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4639 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4640 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4641 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4642 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4643 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4644 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4645 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4647 }
4648}
4649
4650
4651/**
4652 * Body for a group 1 binary operator.
4653 */
4654#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4655 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4656 { \
4657 /* register target */ \
4658 switch (pVCpu->iem.s.enmEffOpSize) \
4659 { \
4660 case IEMMODE_16BIT: \
4661 { \
4662 IEM_MC_BEGIN(0, 0); \
4663 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4665 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4666 IEM_MC_LOCAL(uint16_t, u16Dst); \
4667 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4668 IEM_MC_LOCAL(uint32_t, uEFlags); \
4669 IEM_MC_FETCH_EFLAGS(uEFlags); \
4670 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4671 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
4672 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4673 } IEM_MC_NATIVE_ELSE() { \
4674 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4675 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4676 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4677 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 2); \
4678 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4679 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4680 } IEM_MC_NATIVE_ENDIF(); \
4681 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4682 IEM_MC_END(); \
4683 break; \
4684 } \
4685 \
4686 case IEMMODE_32BIT: \
4687 { \
4688 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4689 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4691 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4692 IEM_MC_LOCAL(uint32_t, u32Dst); \
4693 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4694 IEM_MC_LOCAL(uint32_t, uEFlags); \
4695 IEM_MC_FETCH_EFLAGS(uEFlags); \
4696 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4697 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
4698 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4699 } IEM_MC_NATIVE_ELSE() { \
4700 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4701 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4702 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4703 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 2); \
4704 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4707 } IEM_MC_NATIVE_ENDIF(); \
4708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4709 IEM_MC_END(); \
4710 break; \
4711 } \
4712 \
4713 case IEMMODE_64BIT: \
4714 { \
4715 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4716 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4718 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4719 IEM_MC_LOCAL(uint64_t, u64Dst); \
4720 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4721 IEM_MC_LOCAL(uint32_t, uEFlags); \
4722 IEM_MC_FETCH_EFLAGS(uEFlags); \
4723 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4724 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
4725 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4726 } IEM_MC_NATIVE_ELSE() { \
4727 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4728 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4729 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4730 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 2); \
4731 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4732 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4733 } IEM_MC_NATIVE_ENDIF(); \
4734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4735 IEM_MC_END(); \
4736 break; \
4737 } \
4738 \
4739 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4740 } \
4741 } \
4742 else \
4743 { \
4744 /* memory target */ \
4745 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4746 { \
4747 switch (pVCpu->iem.s.enmEffOpSize) \
4748 { \
4749 case IEMMODE_16BIT: \
4750 { \
4751 IEM_MC_BEGIN(0, 0); \
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4754 \
4755 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4756 IEMOP_HLP_DONE_DECODING(); \
4757 \
4758 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4759 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4760 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4761 \
4762 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
4763 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4764 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4765 \
4766 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4767 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4769 IEM_MC_END(); \
4770 break; \
4771 } \
4772 \
4773 case IEMMODE_32BIT: \
4774 { \
4775 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4778 \
4779 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4780 IEMOP_HLP_DONE_DECODING(); \
4781 \
4782 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4783 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4784 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4785 \
4786 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
4787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4788 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4789 \
4790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4791 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4793 IEM_MC_END(); \
4794 break; \
4795 } \
4796 \
4797 case IEMMODE_64BIT: \
4798 { \
4799 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4800 \
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4803 \
4804 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4805 IEMOP_HLP_DONE_DECODING(); \
4806 \
4807 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4808 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4809 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4810 \
4811 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
4812 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4813 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4814 \
4815 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4816 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4818 IEM_MC_END(); \
4819 break; \
4820 } \
4821 \
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4823 } \
4824 } \
4825 else \
4826 { \
4827 switch (pVCpu->iem.s.enmEffOpSize) \
4828 { \
4829 case IEMMODE_16BIT: \
4830 { \
4831 IEM_MC_BEGIN(0, 0); \
4832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4834 \
4835 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4836 IEMOP_HLP_DONE_DECODING(); \
4837 \
4838 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4839 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4840 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4841 \
4842 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
4843 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4844 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
4845 \
4846 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4847 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4848 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4849 IEM_MC_END(); \
4850 break; \
4851 } \
4852 \
4853 case IEMMODE_32BIT: \
4854 { \
4855 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4858 \
4859 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4860 IEMOP_HLP_DONE_DECODING(); \
4861 \
4862 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4863 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4864 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4865 \
4866 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
4867 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4868 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
4869 \
4870 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4871 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4872 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4873 IEM_MC_END(); \
4874 break; \
4875 } \
4876 \
4877 case IEMMODE_64BIT: \
4878 { \
4879 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4882 \
4883 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4884 IEMOP_HLP_DONE_DECODING(); \
4885 \
4886 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4887 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4888 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4889 \
4890 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
4891 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4892 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
4893 \
4894 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4895 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4897 IEM_MC_END(); \
4898 break; \
4899 } \
4900 \
4901 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4902 } \
4903 } \
4904 } \
4905 (void)0
4906
4907/* read-only version */
4908#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_InsNm, a_fNativeArchs) \
4909 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4910 { \
4911 /* register target */ \
4912 switch (pVCpu->iem.s.enmEffOpSize) \
4913 { \
4914 case IEMMODE_16BIT: \
4915 { \
4916 IEM_MC_BEGIN(0, 0); \
4917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4919 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4920 IEM_MC_LOCAL(uint16_t, u16Dst); \
4921 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4922 IEM_MC_LOCAL(uint32_t, uEFlags); \
4923 IEM_MC_FETCH_EFLAGS(uEFlags); \
4924 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4925 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4926 } IEM_MC_NATIVE_ELSE() { \
4927 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
4928 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4929 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4930 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 2); \
4931 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4932 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4933 } IEM_MC_NATIVE_ENDIF(); \
4934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4935 IEM_MC_END(); \
4936 break; \
4937 } \
4938 \
4939 case IEMMODE_32BIT: \
4940 { \
4941 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4944 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4945 IEM_MC_LOCAL(uint32_t, u32Dst); \
4946 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4947 IEM_MC_LOCAL(uint32_t, uEFlags); \
4948 IEM_MC_FETCH_EFLAGS(uEFlags); \
4949 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4950 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4951 } IEM_MC_NATIVE_ELSE() { \
4952 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
4953 IEM_MC_REF_GREG_U32_CONST (pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4954 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4955 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 2); \
4956 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4957 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4958 } IEM_MC_NATIVE_ENDIF(); \
4959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4960 IEM_MC_END(); \
4961 break; \
4962 } \
4963 \
4964 case IEMMODE_64BIT: \
4965 { \
4966 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4969 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4970 IEM_MC_LOCAL(uint64_t, u64Dst); \
4971 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4972 IEM_MC_LOCAL(uint32_t, uEFlags); \
4973 IEM_MC_FETCH_EFLAGS(uEFlags); \
4974 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4975 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4976 } IEM_MC_NATIVE_ELSE() { \
4977 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
4978 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4979 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4980 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 2); \
4981 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4982 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4983 } IEM_MC_NATIVE_ENDIF(); \
4984 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4985 IEM_MC_END(); \
4986 break; \
4987 } \
4988 \
4989 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4990 } \
4991 } \
4992 else \
4993 { \
4994 /* memory target */ \
4995 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4996 { \
4997 switch (pVCpu->iem.s.enmEffOpSize) \
4998 { \
4999 case IEMMODE_16BIT: \
5000 { \
5001 IEM_MC_BEGIN(0, 0); \
5002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
5004 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
5005 IEMOP_HLP_DONE_DECODING(); \
5006 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5007 IEM_MC_LOCAL(uint16_t, u16Dst); \
5008 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5009 IEM_MC_LOCAL_EFLAGS(uEFlags); \
5010 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
5011 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5012 } IEM_MC_NATIVE_ELSE() { \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
5015 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5017 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
5018 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5019 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5020 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5021 } IEM_MC_NATIVE_ENDIF(); \
5022 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5023 IEM_MC_END(); \
5024 break; \
5025 } \
5026 \
5027 case IEMMODE_32BIT: \
5028 { \
5029 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
5032 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5035 IEM_MC_LOCAL(uint32_t, u32Dst); \
5036 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5037 IEM_MC_LOCAL_EFLAGS(uEFlags); \
5038 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
5039 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5040 } IEM_MC_NATIVE_ELSE() { \
5041 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5042 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
5043 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5044 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5045 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
5046 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5047 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5048 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5049 } IEM_MC_NATIVE_ENDIF(); \
5050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5051 IEM_MC_END(); \
5052 break; \
5053 } \
5054 \
5055 case IEMMODE_64BIT: \
5056 { \
5057 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
5060 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
5061 IEMOP_HLP_DONE_DECODING(); \
5062 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5063 IEM_MC_LOCAL(uint64_t, u64Dst); \
5064 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5065 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5066 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
5067 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5068 } IEM_MC_NATIVE_ELSE() { \
5069 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5070 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
5071 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5072 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5073 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
5074 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5075 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5076 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5077 } IEM_MC_NATIVE_ENDIF(); \
5078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5079 IEM_MC_END(); \
5080 break; \
5081 } \
5082 \
5083 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5084 } \
5085 } \
5086 else \
5087 { \
5088 IEMOP_HLP_DONE_DECODING(); \
5089 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5090 } \
5091 } \
5092 (void)0
5093
5094
5095/**
5096 * @opmaps grp1_81
5097 * @opcode /0
5098 * @opflclass arithmetic
5099 */
5100FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
5101{
5102 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
5103 IEMOP_BODY_BINARY_Ev_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5104}
5105
5106
5107/**
5108 * @opmaps grp1_81
5109 * @opcode /1
5110 * @opflclass logical
5111 */
5112FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
5113{
5114 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
5115 IEMOP_BODY_BINARY_Ev_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5116}
5117
5118
5119/**
5120 * @opmaps grp1_81
5121 * @opcode /2
5122 * @opflclass arithmetic_carry
5123 */
5124FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
5125{
5126 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
5127 IEMOP_BODY_BINARY_Ev_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5128}
5129
5130
5131/**
5132 * @opmaps grp1_81
5133 * @opcode /3
5134 * @opflclass arithmetic_carry
5135 */
5136FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
5137{
5138 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
5139 IEMOP_BODY_BINARY_Ev_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5140}
5141
5142
5143/**
5144 * @opmaps grp1_81
5145 * @opcode /4
5146 * @opflclass logical
5147 */
5148FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
5149{
5150 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
5151 IEMOP_BODY_BINARY_Ev_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5152}
5153
5154
5155/**
5156 * @opmaps grp1_81
5157 * @opcode /5
5158 * @opflclass arithmetic
5159 */
5160FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
5161{
5162 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
5163 IEMOP_BODY_BINARY_Ev_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5164}
5165
5166
5167/**
5168 * @opmaps grp1_81
5169 * @opcode /6
5170 * @opflclass logical
5171 */
5172FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
5173{
5174 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
5175 IEMOP_BODY_BINARY_Ev_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5176}
5177
5178
5179/**
5180 * @opmaps grp1_81
5181 * @opcode /7
5182 * @opflclass arithmetic
5183 */
5184FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
5185{
5186 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
5187 IEMOP_BODY_BINARY_Ev_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5188}
5189
5190
5191/**
5192 * @opcode 0x81
5193 */
5194FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
5195{
5196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5197 switch (IEM_GET_MODRM_REG_8(bRm))
5198 {
5199 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
5200 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
5201 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
5202 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
5203 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
5204 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
5205 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
5206 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
5207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5208 }
5209}
5210
5211
5212/**
5213 * @opcode 0x82
5214 * @opmnemonic grp1_82
5215 * @opgroup og_groups
5216 */
5217FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
5218{
5219 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
5220 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
5221}
5222
5223
5224/**
5225 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
5226 * iemOp_Grp1_Ev_Ib.
5227 */
5228#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
5229 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5230 { \
5231 /* \
5232 * Register target \
5233 */ \
5234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5235 switch (pVCpu->iem.s.enmEffOpSize) \
5236 { \
5237 case IEMMODE_16BIT: \
5238 IEM_MC_BEGIN(0, 0); \
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5240 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5241 IEM_MC_LOCAL(uint16_t, u16Dst); \
5242 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5243 IEM_MC_LOCAL(uint32_t, uEFlags); \
5244 IEM_MC_FETCH_EFLAGS(uEFlags); \
5245 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5246 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
5247 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5248 } IEM_MC_NATIVE_ELSE() { \
5249 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5250 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5251 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5252 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5253 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5254 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5255 } IEM_MC_NATIVE_ENDIF(); \
5256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5257 IEM_MC_END(); \
5258 break; \
5259 \
5260 case IEMMODE_32BIT: \
5261 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5263 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5264 IEM_MC_LOCAL(uint32_t, u32Dst); \
5265 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5266 IEM_MC_LOCAL(uint32_t, uEFlags); \
5267 IEM_MC_FETCH_EFLAGS(uEFlags); \
5268 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5269 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
5270 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5271 } IEM_MC_NATIVE_ELSE() { \
5272 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5273 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5275 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5276 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5277 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
5278 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5279 } IEM_MC_NATIVE_ENDIF(); \
5280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5281 IEM_MC_END(); \
5282 break; \
5283 \
5284 case IEMMODE_64BIT: \
5285 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5287 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5288 IEM_MC_LOCAL(uint64_t, u64Dst); \
5289 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5290 IEM_MC_LOCAL(uint32_t, uEFlags); \
5291 IEM_MC_FETCH_EFLAGS(uEFlags); \
5292 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5293 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
5294 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5295 } IEM_MC_NATIVE_ELSE() { \
5296 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5297 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5298 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5299 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5300 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5301 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5302 } IEM_MC_NATIVE_ENDIF(); \
5303 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5304 IEM_MC_END(); \
5305 break; \
5306 \
5307 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5308 } \
5309 } \
5310 else \
5311 { \
5312 /* \
5313 * Memory target. \
5314 */ \
5315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5316 { \
5317 switch (pVCpu->iem.s.enmEffOpSize) \
5318 { \
5319 case IEMMODE_16BIT: \
5320 IEM_MC_BEGIN(0, 0); \
5321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5323 \
5324 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5325 IEMOP_HLP_DONE_DECODING(); \
5326 \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5329 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5330 \
5331 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5332 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5333 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5334 \
5335 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5336 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5338 IEM_MC_END(); \
5339 break; \
5340 \
5341 case IEMMODE_32BIT: \
5342 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5345 \
5346 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5347 IEMOP_HLP_DONE_DECODING(); \
5348 \
5349 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5350 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5351 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5352 \
5353 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5354 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5355 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5356 \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5358 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5359 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5360 IEM_MC_END(); \
5361 break; \
5362 \
5363 case IEMMODE_64BIT: \
5364 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5367 \
5368 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5369 IEMOP_HLP_DONE_DECODING(); \
5370 \
5371 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5372 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5373 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5374 \
5375 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5376 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5377 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5378 \
5379 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5380 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5381 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5382 IEM_MC_END(); \
5383 break; \
5384 \
5385 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5386 } \
5387 } \
5388 else \
5389 { \
5390 switch (pVCpu->iem.s.enmEffOpSize) \
5391 { \
5392 case IEMMODE_16BIT: \
5393 IEM_MC_BEGIN(0, 0); \
5394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5396 \
5397 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5398 IEMOP_HLP_DONE_DECODING(); \
5399 \
5400 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5401 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5402 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5403 \
5404 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5405 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5406 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
5407 \
5408 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5409 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5411 IEM_MC_END(); \
5412 break; \
5413 \
5414 case IEMMODE_32BIT: \
5415 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5418 \
5419 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5420 IEMOP_HLP_DONE_DECODING(); \
5421 \
5422 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5423 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5424 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5425 \
5426 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5427 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5428 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
5429 \
5430 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5431 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5433 IEM_MC_END(); \
5434 break; \
5435 \
5436 case IEMMODE_64BIT: \
5437 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5440 \
5441 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5442 IEMOP_HLP_DONE_DECODING(); \
5443 \
5444 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5445 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5446 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5447 \
5448 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5449 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5450 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
5451 \
5452 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5453 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5455 IEM_MC_END(); \
5456 break; \
5457 \
5458 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5459 } \
5460 } \
5461 } \
5462 (void)0
5463
5464/* read-only variant */
5465#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_InsNm, a_fNativeArchs) \
5466 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5467 { \
5468 /* \
5469 * Register target \
5470 */ \
5471 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5472 switch (pVCpu->iem.s.enmEffOpSize) \
5473 { \
5474 case IEMMODE_16BIT: \
5475 IEM_MC_BEGIN(0, 0); \
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5477 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5478 IEM_MC_LOCAL(uint16_t, u16Dst); \
5479 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5480 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5481 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5482 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5483 } IEM_MC_NATIVE_ELSE() { \
5484 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
5485 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5486 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5487 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5488 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5489 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5490 } IEM_MC_NATIVE_ENDIF(); \
5491 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5492 IEM_MC_END(); \
5493 break; \
5494 \
5495 case IEMMODE_32BIT: \
5496 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5498 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5499 IEM_MC_LOCAL(uint32_t, u32Dst); \
5500 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5501 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5502 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5503 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5504 } IEM_MC_NATIVE_ELSE() { \
5505 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
5506 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5507 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5508 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5509 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5510 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5511 } IEM_MC_NATIVE_ENDIF(); \
5512 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5513 IEM_MC_END(); \
5514 break; \
5515 \
5516 case IEMMODE_64BIT: \
5517 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5519 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5520 IEM_MC_LOCAL(uint64_t, u64Dst); \
5521 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5522 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5523 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5524 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5525 } IEM_MC_NATIVE_ELSE() { \
5526 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
5527 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5528 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5529 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5530 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5531 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5532 } IEM_MC_NATIVE_ENDIF(); \
5533 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5534 IEM_MC_END(); \
5535 break; \
5536 \
5537 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5538 } \
5539 } \
5540 else \
5541 { \
5542 /* \
5543 * Memory target. \
5544 */ \
5545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5546 { \
5547 switch (pVCpu->iem.s.enmEffOpSize) \
5548 { \
5549 case IEMMODE_16BIT: \
5550 IEM_MC_BEGIN(0, 0); \
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5554 IEMOP_HLP_DONE_DECODING(); \
5555 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5556 IEM_MC_LOCAL(uint16_t, u16Dst); \
5557 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5558 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5559 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5560 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5561 } IEM_MC_NATIVE_ELSE() { \
5562 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5563 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
5564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5565 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5566 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5567 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5568 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5569 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5570 } IEM_MC_NATIVE_ENDIF(); \
5571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5572 IEM_MC_END(); \
5573 break; \
5574 \
5575 case IEMMODE_32BIT: \
5576 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5579 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5580 IEMOP_HLP_DONE_DECODING(); \
5581 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5582 IEM_MC_LOCAL(uint32_t, u32Dst); \
5583 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5584 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5585 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5586 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5587 } IEM_MC_NATIVE_ELSE() { \
5588 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5589 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
5590 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5591 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5592 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5593 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5594 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5595 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5596 } IEM_MC_NATIVE_ENDIF(); \
5597 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5598 IEM_MC_END(); \
5599 break; \
5600 \
5601 case IEMMODE_64BIT: \
5602 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5605 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5606 IEMOP_HLP_DONE_DECODING(); \
5607 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5608 IEM_MC_LOCAL(uint64_t, u64Dst); \
5609 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5610 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5611 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5612 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5613 } IEM_MC_NATIVE_ELSE() { \
5614 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5615 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
5616 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5617 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5618 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5619 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5620 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5621 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5622 } IEM_MC_NATIVE_ENDIF(); \
5623 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5624 IEM_MC_END(); \
5625 break; \
5626 \
5627 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5628 } \
5629 } \
5630 else \
5631 { \
5632 IEMOP_HLP_DONE_DECODING(); \
5633 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5634 } \
5635 } \
5636 (void)0
5637
5638/**
5639 * @opmaps grp1_83
5640 * @opcode /0
5641 * @opflclass arithmetic
5642 */
5643FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5644{
5645 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5646 IEMOP_BODY_BINARY_Ev_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5647}
5648
5649
5650/**
5651 * @opmaps grp1_83
5652 * @opcode /1
5653 * @opflclass logical
5654 */
5655FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5656{
5657 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5658 IEMOP_BODY_BINARY_Ev_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5659}
5660
5661
5662/**
5663 * @opmaps grp1_83
5664 * @opcode /2
5665 * @opflclass arithmetic_carry
5666 */
5667FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5668{
5669 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5670 IEMOP_BODY_BINARY_Ev_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5671}
5672
5673
5674/**
5675 * @opmaps grp1_83
5676 * @opcode /3
5677 * @opflclass arithmetic_carry
5678 */
5679FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5680{
5681 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5682 IEMOP_BODY_BINARY_Ev_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5683}
5684
5685
5686/**
5687 * @opmaps grp1_83
5688 * @opcode /4
5689 * @opflclass logical
5690 */
5691FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5692{
5693 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5694 IEMOP_BODY_BINARY_Ev_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5695}
5696
5697
5698/**
5699 * @opmaps grp1_83
5700 * @opcode /5
5701 * @opflclass arithmetic
5702 */
5703FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5704{
5705 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5706 IEMOP_BODY_BINARY_Ev_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5707}
5708
5709
5710/**
5711 * @opmaps grp1_83
5712 * @opcode /6
5713 * @opflclass logical
5714 */
5715FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5716{
5717 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5718 IEMOP_BODY_BINARY_Ev_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5719}
5720
5721
5722/**
5723 * @opmaps grp1_83
5724 * @opcode /7
5725 * @opflclass arithmetic
5726 */
5727FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5728{
5729 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5730 IEMOP_BODY_BINARY_Ev_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5731}
5732
5733
5734/**
5735 * @opcode 0x83
5736 */
5737FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5738{
5739 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5740 to the 386 even if absent in the intel reference manuals and some
5741 3rd party opcode listings. */
5742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5743 switch (IEM_GET_MODRM_REG_8(bRm))
5744 {
5745 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5746 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5747 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5748 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5749 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5750 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5751 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5752 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5754 }
5755}
5756
5757
5758/**
5759 * @opcode 0x84
5760 * @opflclass logical
5761 */
5762FNIEMOP_DEF(iemOp_test_Eb_Gb)
5763{
5764 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5766
5767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5768
5769 /*
5770 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5771 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5772 */
5773 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5774 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5775 {
5776 IEM_MC_BEGIN(0, 0);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_ARG(uint8_t, u8Src, 2);
5779 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5780 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5781 IEM_MC_LOCAL_EFLAGS(uEFlags);
5782 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5783 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5784 } IEM_MC_NATIVE_ELSE() {
5785 IEM_MC_ARG(uint8_t *, pu8Dst, 1);
5786 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5788 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u8, fEFlagsIn, pu8Dst, u8Src);
5789 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5790 } IEM_MC_NATIVE_ENDIF();
5791 IEM_MC_ADVANCE_RIP_AND_FINISH();
5792 IEM_MC_END();
5793 }
5794
5795 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5796}
5797
5798
5799/**
5800 * @opcode 0x85
5801 * @opflclass logical
5802 */
5803FNIEMOP_DEF(iemOp_test_Ev_Gv)
5804{
5805 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5806 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5807
5808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5809
5810 /*
5811 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5812 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5813 */
5814 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5815 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5816 {
5817 switch (pVCpu->iem.s.enmEffOpSize)
5818 {
5819 case IEMMODE_16BIT:
5820 IEM_MC_BEGIN(0, 0);
5821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5822 IEM_MC_ARG(uint16_t, u16Src, 2);
5823 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5824 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5825 IEM_MC_LOCAL_EFLAGS(uEFlags);
5826 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5827 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5828 } IEM_MC_NATIVE_ELSE() {
5829 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
5830 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5831 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5832 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u16, fEFlagsIn, pu16Dst, u16Src);
5833 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5834 } IEM_MC_NATIVE_ENDIF();
5835 IEM_MC_ADVANCE_RIP_AND_FINISH();
5836 IEM_MC_END();
5837 break;
5838
5839 case IEMMODE_32BIT:
5840 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_ARG(uint32_t, u32Src, 2);
5843 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5844 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5845 IEM_MC_LOCAL_EFLAGS(uEFlags);
5846 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5847 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5848 } IEM_MC_NATIVE_ELSE() {
5849 IEM_MC_ARG(uint32_t *, pu32Dst, 1);
5850 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5851 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5852 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u32, fEFlagsIn, pu32Dst, u32Src);
5853 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5854 } IEM_MC_NATIVE_ENDIF();
5855 IEM_MC_ADVANCE_RIP_AND_FINISH();
5856 IEM_MC_END();
5857 break;
5858
5859 case IEMMODE_64BIT:
5860 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5862 IEM_MC_ARG(uint64_t, u64Src, 2);
5863 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5864 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5865 IEM_MC_LOCAL_EFLAGS(uEFlags);
5866 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5867 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5868 } IEM_MC_NATIVE_ELSE() {
5869 IEM_MC_ARG(uint64_t *, pu64Dst, 1);
5870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5871 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5872 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u64, fEFlagsIn, pu64Dst, u64Src);
5873 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5874 } IEM_MC_NATIVE_ENDIF();
5875 IEM_MC_ADVANCE_RIP_AND_FINISH();
5876 IEM_MC_END();
5877 break;
5878
5879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5880 }
5881 }
5882
5883 IEMOP_BODY_BINARY_rm_rv_RO(bRm, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5884}
5885
5886
5887/**
5888 * @opcode 0x86
5889 */
5890FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5891{
5892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5893 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5894
5895 /*
5896 * If rm is denoting a register, no more instruction bytes.
5897 */
5898 if (IEM_IS_MODRM_REG_MODE(bRm))
5899 {
5900 IEM_MC_BEGIN(0, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_LOCAL(uint8_t, uTmp1);
5903 IEM_MC_LOCAL(uint8_t, uTmp2);
5904
5905 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5906 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5907 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5908 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5909
5910 IEM_MC_ADVANCE_RIP_AND_FINISH();
5911 IEM_MC_END();
5912 }
5913 else
5914 {
5915 /*
5916 * We're accessing memory.
5917 */
5918#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5919 IEM_MC_BEGIN(0, 0); \
5920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5921 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5922 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5923 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5924 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5925 \
5926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5927 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5928 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5929 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5930 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5931 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5932 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5933 \
5934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5935 IEM_MC_END()
5936
5937 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5938 {
5939 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5940 }
5941 else
5942 {
5943 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5944 }
5945 }
5946}
5947
5948
5949/**
5950 * @opcode 0x87
5951 */
5952FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5953{
5954 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5956
5957 /*
5958 * If rm is denoting a register, no more instruction bytes.
5959 */
5960 if (IEM_IS_MODRM_REG_MODE(bRm))
5961 {
5962 switch (pVCpu->iem.s.enmEffOpSize)
5963 {
5964 case IEMMODE_16BIT:
5965 IEM_MC_BEGIN(0, 0);
5966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5967 IEM_MC_LOCAL(uint16_t, uTmp1);
5968 IEM_MC_LOCAL(uint16_t, uTmp2);
5969
5970 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5971 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5972 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5973 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5974
5975 IEM_MC_ADVANCE_RIP_AND_FINISH();
5976 IEM_MC_END();
5977 break;
5978
5979 case IEMMODE_32BIT:
5980 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 IEM_MC_LOCAL(uint32_t, uTmp1);
5983 IEM_MC_LOCAL(uint32_t, uTmp2);
5984
5985 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5986 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5987 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5988 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5989
5990 IEM_MC_ADVANCE_RIP_AND_FINISH();
5991 IEM_MC_END();
5992 break;
5993
5994 case IEMMODE_64BIT:
5995 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997 IEM_MC_LOCAL(uint64_t, uTmp1);
5998 IEM_MC_LOCAL(uint64_t, uTmp2);
5999
6000 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
6001 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
6002 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
6003 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
6004
6005 IEM_MC_ADVANCE_RIP_AND_FINISH();
6006 IEM_MC_END();
6007 break;
6008
6009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6010 }
6011 }
6012 else
6013 {
6014 /*
6015 * We're accessing memory.
6016 */
6017#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
6018 do { \
6019 switch (pVCpu->iem.s.enmEffOpSize) \
6020 { \
6021 case IEMMODE_16BIT: \
6022 IEM_MC_BEGIN(0, 0); \
6023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6024 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6025 IEM_MC_LOCAL(uint16_t, uTmpReg); \
6026 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
6027 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
6028 \
6029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6030 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
6031 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6032 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6033 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
6034 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6035 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6036 \
6037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6038 IEM_MC_END(); \
6039 break; \
6040 \
6041 case IEMMODE_32BIT: \
6042 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
6043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6044 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6045 IEM_MC_LOCAL(uint32_t, uTmpReg); \
6046 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
6047 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
6048 \
6049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6050 IEMOP_HLP_DONE_DECODING(); \
6051 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6052 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6053 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
6054 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6055 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6056 \
6057 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6058 IEM_MC_END(); \
6059 break; \
6060 \
6061 case IEMMODE_64BIT: \
6062 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
6063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6064 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6065 IEM_MC_LOCAL(uint64_t, uTmpReg); \
6066 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
6067 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
6068 \
6069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6070 IEMOP_HLP_DONE_DECODING(); \
6071 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6072 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6073 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
6074 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6075 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6076 \
6077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6078 IEM_MC_END(); \
6079 break; \
6080 \
6081 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6082 } \
6083 } while (0)
6084 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
6085 {
6086 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
6087 }
6088 else
6089 {
6090 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
6091 }
6092 }
6093}
6094
6095
6096/**
6097 * @opcode 0x88
6098 */
6099FNIEMOP_DEF(iemOp_mov_Eb_Gb)
6100{
6101 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
6102
6103 uint8_t bRm;
6104 IEM_OPCODE_GET_NEXT_U8(&bRm);
6105
6106 /*
6107 * If rm is denoting a register, no more instruction bytes.
6108 */
6109 if (IEM_IS_MODRM_REG_MODE(bRm))
6110 {
6111 IEM_MC_BEGIN(0, 0);
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6113 IEM_MC_LOCAL(uint8_t, u8Value);
6114 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6115 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
6116 IEM_MC_ADVANCE_RIP_AND_FINISH();
6117 IEM_MC_END();
6118 }
6119 else
6120 {
6121 /*
6122 * We're writing a register to memory.
6123 */
6124 IEM_MC_BEGIN(0, 0);
6125 IEM_MC_LOCAL(uint8_t, u8Value);
6126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6129 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6130 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
6131 IEM_MC_ADVANCE_RIP_AND_FINISH();
6132 IEM_MC_END();
6133 }
6134}
6135
6136
6137/**
6138 * @opcode 0x89
6139 */
6140FNIEMOP_DEF(iemOp_mov_Ev_Gv)
6141{
6142 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
6143
6144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6145
6146 /*
6147 * If rm is denoting a register, no more instruction bytes.
6148 */
6149 if (IEM_IS_MODRM_REG_MODE(bRm))
6150 {
6151 switch (pVCpu->iem.s.enmEffOpSize)
6152 {
6153 case IEMMODE_16BIT:
6154 IEM_MC_BEGIN(0, 0);
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 IEM_MC_LOCAL(uint16_t, u16Value);
6157 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6158 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6159 IEM_MC_ADVANCE_RIP_AND_FINISH();
6160 IEM_MC_END();
6161 break;
6162
6163 case IEMMODE_32BIT:
6164 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 IEM_MC_LOCAL(uint32_t, u32Value);
6167 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6168 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6169 IEM_MC_ADVANCE_RIP_AND_FINISH();
6170 IEM_MC_END();
6171 break;
6172
6173 case IEMMODE_64BIT:
6174 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6176 IEM_MC_LOCAL(uint64_t, u64Value);
6177 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6178 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6179 IEM_MC_ADVANCE_RIP_AND_FINISH();
6180 IEM_MC_END();
6181 break;
6182
6183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6184 }
6185 }
6186 else
6187 {
6188 /*
6189 * We're writing a register to memory.
6190 */
6191 switch (pVCpu->iem.s.enmEffOpSize)
6192 {
6193 case IEMMODE_16BIT:
6194 IEM_MC_BEGIN(0, 0);
6195 IEM_MC_LOCAL(uint16_t, u16Value);
6196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6199 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6200 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6201 IEM_MC_ADVANCE_RIP_AND_FINISH();
6202 IEM_MC_END();
6203 break;
6204
6205 case IEMMODE_32BIT:
6206 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6207 IEM_MC_LOCAL(uint32_t, u32Value);
6208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6212 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6213 IEM_MC_ADVANCE_RIP_AND_FINISH();
6214 IEM_MC_END();
6215 break;
6216
6217 case IEMMODE_64BIT:
6218 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6219 IEM_MC_LOCAL(uint64_t, u64Value);
6220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6223 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6224 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 break;
6228
6229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6230 }
6231 }
6232}
6233
6234
6235/**
6236 * @opcode 0x8a
6237 */
6238FNIEMOP_DEF(iemOp_mov_Gb_Eb)
6239{
6240 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
6241
6242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6243
6244 /*
6245 * If rm is denoting a register, no more instruction bytes.
6246 */
6247 if (IEM_IS_MODRM_REG_MODE(bRm))
6248 {
6249 IEM_MC_BEGIN(0, 0);
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251 IEM_MC_LOCAL(uint8_t, u8Value);
6252 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6253 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6254 IEM_MC_ADVANCE_RIP_AND_FINISH();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /*
6260 * We're loading a register from memory.
6261 */
6262 IEM_MC_BEGIN(0, 0);
6263 IEM_MC_LOCAL(uint8_t, u8Value);
6264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6267 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6268 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6269 IEM_MC_ADVANCE_RIP_AND_FINISH();
6270 IEM_MC_END();
6271 }
6272}
6273
6274
6275/**
6276 * @opcode 0x8b
6277 */
6278FNIEMOP_DEF(iemOp_mov_Gv_Ev)
6279{
6280 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
6281
6282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6283
6284 /*
6285 * If rm is denoting a register, no more instruction bytes.
6286 */
6287 if (IEM_IS_MODRM_REG_MODE(bRm))
6288 {
6289 switch (pVCpu->iem.s.enmEffOpSize)
6290 {
6291 case IEMMODE_16BIT:
6292 IEM_MC_BEGIN(0, 0);
6293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6294 IEM_MC_LOCAL(uint16_t, u16Value);
6295 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6296 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6297 IEM_MC_ADVANCE_RIP_AND_FINISH();
6298 IEM_MC_END();
6299 break;
6300
6301 case IEMMODE_32BIT:
6302 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6304 IEM_MC_LOCAL(uint32_t, u32Value);
6305 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6306 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6307 IEM_MC_ADVANCE_RIP_AND_FINISH();
6308 IEM_MC_END();
6309 break;
6310
6311 case IEMMODE_64BIT:
6312 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_LOCAL(uint64_t, u64Value);
6315 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6316 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6317 IEM_MC_ADVANCE_RIP_AND_FINISH();
6318 IEM_MC_END();
6319 break;
6320
6321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6322 }
6323 }
6324 else
6325 {
6326 /*
6327 * We're loading a register from memory.
6328 */
6329 switch (pVCpu->iem.s.enmEffOpSize)
6330 {
6331 case IEMMODE_16BIT:
6332 IEM_MC_BEGIN(0, 0);
6333 IEM_MC_LOCAL(uint16_t, u16Value);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6339 IEM_MC_ADVANCE_RIP_AND_FINISH();
6340 IEM_MC_END();
6341 break;
6342
6343 case IEMMODE_32BIT:
6344 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6345 IEM_MC_LOCAL(uint32_t, u32Value);
6346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6350 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6351 IEM_MC_ADVANCE_RIP_AND_FINISH();
6352 IEM_MC_END();
6353 break;
6354
6355 case IEMMODE_64BIT:
6356 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6357 IEM_MC_LOCAL(uint64_t, u64Value);
6358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6361 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6362 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6363 IEM_MC_ADVANCE_RIP_AND_FINISH();
6364 IEM_MC_END();
6365 break;
6366
6367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6368 }
6369 }
6370}
6371
6372
6373/**
6374 * opcode 0x63
6375 * @todo Table fixme
6376 */
6377FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
6378{
6379 if (!IEM_IS_64BIT_CODE(pVCpu))
6380 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
6381 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6382 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
6383 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
6384}
6385
6386
6387/**
6388 * @opcode 0x8c
6389 */
6390FNIEMOP_DEF(iemOp_mov_Ev_Sw)
6391{
6392 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
6393
6394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6395
6396 /*
6397 * Check that the destination register exists. The REX.R prefix is ignored.
6398 */
6399 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6400 if (iSegReg > X86_SREG_GS)
6401 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6402
6403 /*
6404 * If rm is denoting a register, no more instruction bytes.
6405 * In that case, the operand size is respected and the upper bits are
6406 * cleared (starting with some pentium).
6407 */
6408 if (IEM_IS_MODRM_REG_MODE(bRm))
6409 {
6410 switch (pVCpu->iem.s.enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 IEM_MC_BEGIN(0, 0);
6414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6415 IEM_MC_LOCAL(uint16_t, u16Value);
6416 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6417 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6418 IEM_MC_ADVANCE_RIP_AND_FINISH();
6419 IEM_MC_END();
6420 break;
6421
6422 case IEMMODE_32BIT:
6423 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_LOCAL(uint32_t, u32Value);
6426 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6427 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430 break;
6431
6432 case IEMMODE_64BIT:
6433 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6435 IEM_MC_LOCAL(uint64_t, u64Value);
6436 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6437 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6438 IEM_MC_ADVANCE_RIP_AND_FINISH();
6439 IEM_MC_END();
6440 break;
6441
6442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6443 }
6444 }
6445 else
6446 {
6447 /*
6448 * We're saving the register to memory. The access is word sized
6449 * regardless of operand size prefixes.
6450 */
6451#if 0 /* not necessary */
6452 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6453#endif
6454 IEM_MC_BEGIN(0, 0);
6455 IEM_MC_LOCAL(uint16_t, u16Value);
6456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6459 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6460 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6461 IEM_MC_ADVANCE_RIP_AND_FINISH();
6462 IEM_MC_END();
6463 }
6464}
6465
6466
6467
6468
6469/**
6470 * @opcode 0x8d
6471 */
6472FNIEMOP_DEF(iemOp_lea_Gv_M)
6473{
6474 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6476 if (IEM_IS_MODRM_REG_MODE(bRm))
6477 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6478
6479 switch (pVCpu->iem.s.enmEffOpSize)
6480 {
6481 case IEMMODE_16BIT:
6482 IEM_MC_BEGIN(0, 0);
6483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6487 * operand-size, which is usually the case. It'll save an instruction
6488 * and a register. */
6489 IEM_MC_LOCAL(uint16_t, u16Cast);
6490 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6492 IEM_MC_ADVANCE_RIP_AND_FINISH();
6493 IEM_MC_END();
6494 break;
6495
6496 case IEMMODE_32BIT:
6497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6501 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6502 * operand-size, which is usually the case. It'll save an instruction
6503 * and a register. */
6504 IEM_MC_LOCAL(uint32_t, u32Cast);
6505 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6507 IEM_MC_ADVANCE_RIP_AND_FINISH();
6508 IEM_MC_END();
6509 break;
6510
6511 case IEMMODE_64BIT:
6512 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6517 IEM_MC_ADVANCE_RIP_AND_FINISH();
6518 IEM_MC_END();
6519 break;
6520
6521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6522 }
6523}
6524
6525
6526/**
6527 * @opcode 0x8e
6528 */
6529FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6530{
6531 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6532
6533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6534
6535 /*
6536 * The practical operand size is 16-bit.
6537 */
6538#if 0 /* not necessary */
6539 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6540#endif
6541
6542 /*
6543 * Check that the destination register exists and can be used with this
6544 * instruction. The REX.R prefix is ignored.
6545 */
6546 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6547 /** @todo r=bird: What does 8086 do here wrt CS? */
6548 if ( iSegReg == X86_SREG_CS
6549 || iSegReg > X86_SREG_GS)
6550 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6551
6552 /*
6553 * If rm is denoting a register, no more instruction bytes.
6554 *
6555 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6556 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6557 * register. This is a restriction of the current recompiler
6558 * approach.
6559 */
6560 if (IEM_IS_MODRM_REG_MODE(bRm))
6561 {
6562#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6563 IEM_MC_BEGIN(0, a_fCImplFlags); \
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6565 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6566 IEM_MC_ARG(uint16_t, u16Value, 1); \
6567 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6568 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6569 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6570 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6571 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6572 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6573 iemCImpl_load_SReg, iSRegArg, u16Value); \
6574 IEM_MC_END()
6575
6576 if (iSegReg == X86_SREG_SS)
6577 {
6578 if (IEM_IS_32BIT_CODE(pVCpu))
6579 {
6580 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6581 }
6582 else
6583 {
6584 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6585 }
6586 }
6587 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6588 {
6589 IEMOP_MOV_SW_EV_REG_BODY(0);
6590 }
6591 else
6592 {
6593 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6594 }
6595#undef IEMOP_MOV_SW_EV_REG_BODY
6596 }
6597 else
6598 {
6599 /*
6600 * We're loading the register from memory. The access is word sized
6601 * regardless of operand size prefixes.
6602 */
6603#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6604 IEM_MC_BEGIN(0, a_fCImplFlags); \
6605 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6606 IEM_MC_ARG(uint16_t, u16Value, 1); \
6607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6610 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6611 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6612 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6613 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6614 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6615 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6616 iemCImpl_load_SReg, iSRegArg, u16Value); \
6617 IEM_MC_END()
6618
6619 if (iSegReg == X86_SREG_SS)
6620 {
6621 if (IEM_IS_32BIT_CODE(pVCpu))
6622 {
6623 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6624 }
6625 else
6626 {
6627 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6628 }
6629 }
6630 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6631 {
6632 IEMOP_MOV_SW_EV_MEM_BODY(0);
6633 }
6634 else
6635 {
6636 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6637 }
6638#undef IEMOP_MOV_SW_EV_MEM_BODY
6639 }
6640}
6641
6642
6643/** Opcode 0x8f /0. */
6644FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6645{
6646 /* This bugger is rather annoying as it requires rSP to be updated before
6647 doing the effective address calculations. Will eventually require a
6648 split between the R/M+SIB decoding and the effective address
6649 calculation - which is something that is required for any attempt at
6650 reusing this code for a recompiler. It may also be good to have if we
6651 need to delay #UD exception caused by invalid lock prefixes.
6652
6653 For now, we'll do a mostly safe interpreter-only implementation here. */
6654 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6655 * now until tests show it's checked.. */
6656 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6657
6658 /* Register access is relatively easy and can share code. */
6659 if (IEM_IS_MODRM_REG_MODE(bRm))
6660 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6661
6662 /*
6663 * Memory target.
6664 *
6665 * Intel says that RSP is incremented before it's used in any effective
6666 * address calcuations. This means some serious extra annoyance here since
6667 * we decode and calculate the effective address in one step and like to
6668 * delay committing registers till everything is done.
6669 *
6670 * So, we'll decode and calculate the effective address twice. This will
6671 * require some recoding if turned into a recompiler.
6672 */
6673 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6674
6675#if 1 /* This can be compiled, optimize later if needed. */
6676 switch (pVCpu->iem.s.enmEffOpSize)
6677 {
6678 case IEMMODE_16BIT:
6679 IEM_MC_BEGIN(0, 0);
6680 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6683 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6684 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6685 IEM_MC_END();
6686 break;
6687
6688 case IEMMODE_32BIT:
6689 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6690 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6693 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6694 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6695 IEM_MC_END();
6696 break;
6697
6698 case IEMMODE_64BIT:
6699 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6700 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6703 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6704 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6705 IEM_MC_END();
6706 break;
6707
6708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6709 }
6710
6711#else
6712# ifndef TST_IEM_CHECK_MC
6713 /* Calc effective address with modified ESP. */
6714/** @todo testcase */
6715 RTGCPTR GCPtrEff;
6716 VBOXSTRICTRC rcStrict;
6717 switch (pVCpu->iem.s.enmEffOpSize)
6718 {
6719 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6720 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6721 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6723 }
6724 if (rcStrict != VINF_SUCCESS)
6725 return rcStrict;
6726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6727
6728 /* Perform the operation - this should be CImpl. */
6729 RTUINT64U TmpRsp;
6730 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6731 switch (pVCpu->iem.s.enmEffOpSize)
6732 {
6733 case IEMMODE_16BIT:
6734 {
6735 uint16_t u16Value;
6736 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6737 if (rcStrict == VINF_SUCCESS)
6738 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6739 break;
6740 }
6741
6742 case IEMMODE_32BIT:
6743 {
6744 uint32_t u32Value;
6745 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6746 if (rcStrict == VINF_SUCCESS)
6747 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6748 break;
6749 }
6750
6751 case IEMMODE_64BIT:
6752 {
6753 uint64_t u64Value;
6754 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6755 if (rcStrict == VINF_SUCCESS)
6756 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6757 break;
6758 }
6759
6760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6761 }
6762 if (rcStrict == VINF_SUCCESS)
6763 {
6764 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6765 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6766 }
6767 return rcStrict;
6768
6769# else
6770 return VERR_IEM_IPE_2;
6771# endif
6772#endif
6773}
6774
6775
6776/**
6777 * @opcode 0x8f
6778 */
6779FNIEMOP_DEF(iemOp_Grp1A__xop)
6780{
6781 /*
6782 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6783 * three byte VEX prefix, except that the mmmmm field cannot have the values
6784 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6785 */
6786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6787 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6788 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6789
6790 IEMOP_MNEMONIC(xop, "xop");
6791 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6792 {
6793 /** @todo Test when exctly the XOP conformance checks kick in during
6794 * instruction decoding and fetching (using \#PF). */
6795 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6796 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6797 if ( ( pVCpu->iem.s.fPrefixes
6798 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6799 == 0)
6800 {
6801 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6802 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6804 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6805 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6806 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6807 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6808 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6809 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6810
6811 /** @todo XOP: Just use new tables and decoders. */
6812 switch (bRm & 0x1f)
6813 {
6814 case 8: /* xop opcode map 8. */
6815 IEMOP_BITCH_ABOUT_STUB();
6816 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6817
6818 case 9: /* xop opcode map 9. */
6819 IEMOP_BITCH_ABOUT_STUB();
6820 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6821
6822 case 10: /* xop opcode map 10. */
6823 IEMOP_BITCH_ABOUT_STUB();
6824 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6825
6826 default:
6827 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6828 IEMOP_RAISE_INVALID_OPCODE_RET();
6829 }
6830 }
6831 else
6832 Log(("XOP: Invalid prefix mix!\n"));
6833 }
6834 else
6835 Log(("XOP: XOP support disabled!\n"));
6836 IEMOP_RAISE_INVALID_OPCODE_RET();
6837}
6838
6839
6840/**
6841 * Common 'xchg reg,rAX' helper.
6842 */
6843FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6844{
6845 iReg |= pVCpu->iem.s.uRexB;
6846 switch (pVCpu->iem.s.enmEffOpSize)
6847 {
6848 case IEMMODE_16BIT:
6849 IEM_MC_BEGIN(0, 0);
6850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6851 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6852 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6853 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6854 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6855 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6856 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6857 IEM_MC_ADVANCE_RIP_AND_FINISH();
6858 IEM_MC_END();
6859 break;
6860
6861 case IEMMODE_32BIT:
6862 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6865 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6866 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6867 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6868 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6869 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6870 IEM_MC_ADVANCE_RIP_AND_FINISH();
6871 IEM_MC_END();
6872 break;
6873
6874 case IEMMODE_64BIT:
6875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6877 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6878 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6879 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6880 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6881 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6882 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6883 IEM_MC_ADVANCE_RIP_AND_FINISH();
6884 IEM_MC_END();
6885 break;
6886
6887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6888 }
6889}
6890
6891
6892/**
6893 * @opcode 0x90
6894 */
6895FNIEMOP_DEF(iemOp_nop)
6896{
6897 /* R8/R8D and RAX/EAX can be exchanged. */
6898 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6899 {
6900 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6901 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6902 }
6903
6904 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6905 {
6906 IEMOP_MNEMONIC(pause, "pause");
6907 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6908 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6909 if (!IEM_IS_IN_GUEST(pVCpu))
6910 { /* probable */ }
6911#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6912 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6914#endif
6915#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6916 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6917 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6918#endif
6919 }
6920 else
6921 IEMOP_MNEMONIC(nop, "nop");
6922 /** @todo testcase: lock nop; lock pause */
6923 IEM_MC_BEGIN(0, 0);
6924 IEMOP_HLP_DONE_DECODING();
6925 IEM_MC_ADVANCE_RIP_AND_FINISH();
6926 IEM_MC_END();
6927}
6928
6929
6930/**
6931 * @opcode 0x91
6932 */
6933FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6934{
6935 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6936 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6937}
6938
6939
6940/**
6941 * @opcode 0x92
6942 */
6943FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6944{
6945 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6946 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6947}
6948
6949
6950/**
6951 * @opcode 0x93
6952 */
6953FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6954{
6955 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6956 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6957}
6958
6959
6960/**
6961 * @opcode 0x94
6962 */
6963FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6964{
6965 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6966 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6967}
6968
6969
6970/**
6971 * @opcode 0x95
6972 */
6973FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6974{
6975 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6976 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6977}
6978
6979
6980/**
6981 * @opcode 0x96
6982 */
6983FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6984{
6985 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6986 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6987}
6988
6989
6990/**
6991 * @opcode 0x97
6992 */
6993FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6994{
6995 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6996 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6997}
6998
6999
7000/**
7001 * @opcode 0x98
7002 */
7003FNIEMOP_DEF(iemOp_cbw)
7004{
7005 switch (pVCpu->iem.s.enmEffOpSize)
7006 {
7007 case IEMMODE_16BIT:
7008 IEMOP_MNEMONIC(cbw, "cbw");
7009 IEM_MC_BEGIN(0, 0);
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
7012 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
7013 } IEM_MC_ELSE() {
7014 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
7015 } IEM_MC_ENDIF();
7016 IEM_MC_ADVANCE_RIP_AND_FINISH();
7017 IEM_MC_END();
7018 break;
7019
7020 case IEMMODE_32BIT:
7021 IEMOP_MNEMONIC(cwde, "cwde");
7022 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7024 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
7025 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
7026 } IEM_MC_ELSE() {
7027 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
7028 } IEM_MC_ENDIF();
7029 IEM_MC_ADVANCE_RIP_AND_FINISH();
7030 IEM_MC_END();
7031 break;
7032
7033 case IEMMODE_64BIT:
7034 IEMOP_MNEMONIC(cdqe, "cdqe");
7035 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7037 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
7038 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
7039 } IEM_MC_ELSE() {
7040 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
7041 } IEM_MC_ENDIF();
7042 IEM_MC_ADVANCE_RIP_AND_FINISH();
7043 IEM_MC_END();
7044 break;
7045
7046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7047 }
7048}
7049
7050
7051/**
7052 * @opcode 0x99
7053 */
7054FNIEMOP_DEF(iemOp_cwd)
7055{
7056 switch (pVCpu->iem.s.enmEffOpSize)
7057 {
7058 case IEMMODE_16BIT:
7059 IEMOP_MNEMONIC(cwd, "cwd");
7060 IEM_MC_BEGIN(0, 0);
7061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7062 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
7063 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
7064 } IEM_MC_ELSE() {
7065 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
7066 } IEM_MC_ENDIF();
7067 IEM_MC_ADVANCE_RIP_AND_FINISH();
7068 IEM_MC_END();
7069 break;
7070
7071 case IEMMODE_32BIT:
7072 IEMOP_MNEMONIC(cdq, "cdq");
7073 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7075 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
7076 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
7077 } IEM_MC_ELSE() {
7078 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
7079 } IEM_MC_ENDIF();
7080 IEM_MC_ADVANCE_RIP_AND_FINISH();
7081 IEM_MC_END();
7082 break;
7083
7084 case IEMMODE_64BIT:
7085 IEMOP_MNEMONIC(cqo, "cqo");
7086 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7088 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
7089 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
7090 } IEM_MC_ELSE() {
7091 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
7092 } IEM_MC_ENDIF();
7093 IEM_MC_ADVANCE_RIP_AND_FINISH();
7094 IEM_MC_END();
7095 break;
7096
7097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7098 }
7099}
7100
7101
7102/**
7103 * @opcode 0x9a
7104 */
7105FNIEMOP_DEF(iemOp_call_Ap)
7106{
7107 IEMOP_MNEMONIC(call_Ap, "call Ap");
7108 IEMOP_HLP_NO_64BIT();
7109
7110 /* Decode the far pointer address and pass it on to the far call C implementation. */
7111 uint32_t off32Seg;
7112 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
7113 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
7114 else
7115 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
7116 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
7119 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
7120 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
7121 /** @todo make task-switches, ring-switches, ++ return non-zero status */
7122}
7123
7124
7125/** Opcode 0x9b. (aka fwait) */
7126FNIEMOP_DEF(iemOp_wait)
7127{
7128 IEMOP_MNEMONIC(wait, "wait");
7129 IEM_MC_BEGIN(0, 0);
7130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7131 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
7132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7133 IEM_MC_ADVANCE_RIP_AND_FINISH();
7134 IEM_MC_END();
7135}
7136
7137
7138/**
7139 * @opcode 0x9c
7140 */
7141FNIEMOP_DEF(iemOp_pushf_Fv)
7142{
7143 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
7144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7146 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
7147 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
7148}
7149
7150
7151/**
7152 * @opcode 0x9d
7153 */
7154FNIEMOP_DEF(iemOp_popf_Fv)
7155{
7156 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
7157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7158 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7159 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
7160 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
7161 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
7162}
7163
7164
7165/**
7166 * @opcode 0x9e
7167 * @opflmodify cf,pf,af,zf,sf
7168 */
7169FNIEMOP_DEF(iemOp_sahf)
7170{
7171 IEMOP_MNEMONIC(sahf, "sahf");
7172 if ( IEM_IS_64BIT_CODE(pVCpu)
7173 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7174 IEMOP_RAISE_INVALID_OPCODE_RET();
7175 IEM_MC_BEGIN(0, 0);
7176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7177 IEM_MC_LOCAL(uint32_t, u32Flags);
7178 IEM_MC_LOCAL(uint32_t, EFlags);
7179 IEM_MC_FETCH_EFLAGS(EFlags);
7180 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
7181 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7182 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
7183 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
7184 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
7185 IEM_MC_COMMIT_EFLAGS(EFlags);
7186 IEM_MC_ADVANCE_RIP_AND_FINISH();
7187 IEM_MC_END();
7188}
7189
7190
7191/**
7192 * @opcode 0x9f
7193 * @opfltest cf,pf,af,zf,sf
7194 */
7195FNIEMOP_DEF(iemOp_lahf)
7196{
7197 IEMOP_MNEMONIC(lahf, "lahf");
7198 if ( IEM_IS_64BIT_CODE(pVCpu)
7199 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7200 IEMOP_RAISE_INVALID_OPCODE_RET();
7201 IEM_MC_BEGIN(0, 0);
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7203 IEM_MC_LOCAL(uint8_t, u8Flags);
7204 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
7205 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
7206 IEM_MC_ADVANCE_RIP_AND_FINISH();
7207 IEM_MC_END();
7208}
7209
7210
7211/**
7212 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
7213 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
7214 * Will return/throw on failures.
7215 * @param a_GCPtrMemOff The variable to store the offset in.
7216 */
7217#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
7218 do \
7219 { \
7220 switch (pVCpu->iem.s.enmEffAddrMode) \
7221 { \
7222 case IEMMODE_16BIT: \
7223 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
7224 break; \
7225 case IEMMODE_32BIT: \
7226 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
7227 break; \
7228 case IEMMODE_64BIT: \
7229 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
7230 break; \
7231 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
7232 } \
7233 } while (0)
7234
7235/**
7236 * @opcode 0xa0
7237 */
7238FNIEMOP_DEF(iemOp_mov_AL_Ob)
7239{
7240 /*
7241 * Get the offset.
7242 */
7243 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
7244 RTGCPTR GCPtrMemOffDecode;
7245 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7246
7247 /*
7248 * Fetch AL.
7249 */
7250 IEM_MC_BEGIN(0, 0);
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252 IEM_MC_LOCAL(uint8_t, u8Tmp);
7253 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7254 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7255 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7256 IEM_MC_ADVANCE_RIP_AND_FINISH();
7257 IEM_MC_END();
7258}
7259
7260
7261/**
7262 * @opcode 0xa1
7263 */
7264FNIEMOP_DEF(iemOp_mov_rAX_Ov)
7265{
7266 /*
7267 * Get the offset.
7268 */
7269 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
7270 RTGCPTR GCPtrMemOffDecode;
7271 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7272
7273 /*
7274 * Fetch rAX.
7275 */
7276 switch (pVCpu->iem.s.enmEffOpSize)
7277 {
7278 case IEMMODE_16BIT:
7279 IEM_MC_BEGIN(0, 0);
7280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7281 IEM_MC_LOCAL(uint16_t, u16Tmp);
7282 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7283 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7284 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
7285 IEM_MC_ADVANCE_RIP_AND_FINISH();
7286 IEM_MC_END();
7287 break;
7288
7289 case IEMMODE_32BIT:
7290 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7292 IEM_MC_LOCAL(uint32_t, u32Tmp);
7293 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7294 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7295 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
7296 IEM_MC_ADVANCE_RIP_AND_FINISH();
7297 IEM_MC_END();
7298 break;
7299
7300 case IEMMODE_64BIT:
7301 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303 IEM_MC_LOCAL(uint64_t, u64Tmp);
7304 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7305 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7306 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
7307 IEM_MC_ADVANCE_RIP_AND_FINISH();
7308 IEM_MC_END();
7309 break;
7310
7311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7312 }
7313}
7314
7315
7316/**
7317 * @opcode 0xa2
7318 */
7319FNIEMOP_DEF(iemOp_mov_Ob_AL)
7320{
7321 /*
7322 * Get the offset.
7323 */
7324 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
7325 RTGCPTR GCPtrMemOffDecode;
7326 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7327
7328 /*
7329 * Store AL.
7330 */
7331 IEM_MC_BEGIN(0, 0);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 IEM_MC_LOCAL(uint8_t, u8Tmp);
7334 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
7335 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7336 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
7337 IEM_MC_ADVANCE_RIP_AND_FINISH();
7338 IEM_MC_END();
7339}
7340
7341
7342/**
7343 * @opcode 0xa3
7344 */
7345FNIEMOP_DEF(iemOp_mov_Ov_rAX)
7346{
7347 /*
7348 * Get the offset.
7349 */
7350 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
7351 RTGCPTR GCPtrMemOffDecode;
7352 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7353
7354 /*
7355 * Store rAX.
7356 */
7357 switch (pVCpu->iem.s.enmEffOpSize)
7358 {
7359 case IEMMODE_16BIT:
7360 IEM_MC_BEGIN(0, 0);
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362 IEM_MC_LOCAL(uint16_t, u16Tmp);
7363 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
7364 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7365 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
7366 IEM_MC_ADVANCE_RIP_AND_FINISH();
7367 IEM_MC_END();
7368 break;
7369
7370 case IEMMODE_32BIT:
7371 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373 IEM_MC_LOCAL(uint32_t, u32Tmp);
7374 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
7375 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7376 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
7377 IEM_MC_ADVANCE_RIP_AND_FINISH();
7378 IEM_MC_END();
7379 break;
7380
7381 case IEMMODE_64BIT:
7382 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7384 IEM_MC_LOCAL(uint64_t, u64Tmp);
7385 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
7386 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7387 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
7388 IEM_MC_ADVANCE_RIP_AND_FINISH();
7389 IEM_MC_END();
7390 break;
7391
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394}
7395
7396/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7397#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7398 IEM_MC_BEGIN(a_fMcFlags, 0); \
7399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7400 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7401 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7402 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7403 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7404 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7405 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7406 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7407 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7408 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7409 } IEM_MC_ELSE() { \
7410 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7411 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7412 } IEM_MC_ENDIF(); \
7413 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7414 IEM_MC_END() \
7415
7416/**
7417 * @opcode 0xa4
7418 * @opfltest df
7419 */
7420FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7421{
7422 /*
7423 * Use the C implementation if a repeat prefix is encountered.
7424 */
7425 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7426 {
7427 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7429 switch (pVCpu->iem.s.enmEffAddrMode)
7430 {
7431 case IEMMODE_16BIT:
7432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7436 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7437 case IEMMODE_32BIT:
7438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7440 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7441 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7442 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7443 case IEMMODE_64BIT:
7444 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7445 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7446 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7448 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7450 }
7451 }
7452
7453 /*
7454 * Sharing case implementation with movs[wdq] below.
7455 */
7456 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7457 switch (pVCpu->iem.s.enmEffAddrMode)
7458 {
7459 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7460 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7461 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7463 }
7464}
7465
7466
7467/**
7468 * @opcode 0xa5
7469 * @opfltest df
7470 */
7471FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7472{
7473
7474 /*
7475 * Use the C implementation if a repeat prefix is encountered.
7476 */
7477 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7478 {
7479 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481 switch (pVCpu->iem.s.enmEffOpSize)
7482 {
7483 case IEMMODE_16BIT:
7484 switch (pVCpu->iem.s.enmEffAddrMode)
7485 {
7486 case IEMMODE_16BIT:
7487 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7488 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7489 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7491 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7492 case IEMMODE_32BIT:
7493 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7494 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7495 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7497 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7498 case IEMMODE_64BIT:
7499 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7503 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7505 }
7506 break;
7507 case IEMMODE_32BIT:
7508 switch (pVCpu->iem.s.enmEffAddrMode)
7509 {
7510 case IEMMODE_16BIT:
7511 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7515 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7516 case IEMMODE_32BIT:
7517 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7519 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7521 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7522 case IEMMODE_64BIT:
7523 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7524 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7525 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7526 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7527 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7529 }
7530 case IEMMODE_64BIT:
7531 switch (pVCpu->iem.s.enmEffAddrMode)
7532 {
7533 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7534 case IEMMODE_32BIT:
7535 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7539 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7540 case IEMMODE_64BIT:
7541 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7542 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7545 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7547 }
7548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7549 }
7550 }
7551
7552 /*
7553 * Annoying double switch here.
7554 * Using ugly macro for implementing the cases, sharing it with movsb.
7555 */
7556 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7557 switch (pVCpu->iem.s.enmEffOpSize)
7558 {
7559 case IEMMODE_16BIT:
7560 switch (pVCpu->iem.s.enmEffAddrMode)
7561 {
7562 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7563 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7564 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7566 }
7567 break;
7568
7569 case IEMMODE_32BIT:
7570 switch (pVCpu->iem.s.enmEffAddrMode)
7571 {
7572 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7573 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7574 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7576 }
7577 break;
7578
7579 case IEMMODE_64BIT:
7580 switch (pVCpu->iem.s.enmEffAddrMode)
7581 {
7582 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7583 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7584 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7586 }
7587 break;
7588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7589 }
7590}
7591
7592#undef IEM_MOVS_CASE
7593
7594/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7595#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7596 IEM_MC_BEGIN(a_fMcFlags, 0); \
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7598 \
7599 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7600 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7601 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7602 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7603 \
7604 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7605 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7606 IEM_MC_ARG(uint##ValBits##_t, uValue2, 2); \
7607 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7608 \
7609 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 1); \
7610 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
7611 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_cmp_u##ValBits, fEFlagsIn, puValue1, uValue2); \
7612 \
7613 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
7614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7615 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7616 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7617 } IEM_MC_ELSE() { \
7618 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7619 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7620 } IEM_MC_ENDIF(); \
7621 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7622 IEM_MC_END() \
7623
7624/**
7625 * @opcode 0xa6
7626 * @opflclass arithmetic
7627 * @opfltest df
7628 */
7629FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7630{
7631
7632 /*
7633 * Use the C implementation if a repeat prefix is encountered.
7634 */
7635 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7636 {
7637 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639 switch (pVCpu->iem.s.enmEffAddrMode)
7640 {
7641 case IEMMODE_16BIT:
7642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7643 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7644 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7646 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7647 case IEMMODE_32BIT:
7648 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7652 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7653 case IEMMODE_64BIT:
7654 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7657 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7658 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7660 }
7661 }
7662 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7663 {
7664 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7666 switch (pVCpu->iem.s.enmEffAddrMode)
7667 {
7668 case IEMMODE_16BIT:
7669 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7670 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7671 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7672 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7673 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7674 case IEMMODE_32BIT:
7675 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7676 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7677 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7679 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7680 case IEMMODE_64BIT:
7681 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7682 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7683 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7685 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7687 }
7688 }
7689
7690 /*
7691 * Sharing case implementation with cmps[wdq] below.
7692 */
7693 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7694 switch (pVCpu->iem.s.enmEffAddrMode)
7695 {
7696 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7697 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7698 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0xa7
7706 * @opflclass arithmetic
7707 * @opfltest df
7708 */
7709FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7710{
7711 /*
7712 * Use the C implementation if a repeat prefix is encountered.
7713 */
7714 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7715 {
7716 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 switch (pVCpu->iem.s.enmEffOpSize)
7719 {
7720 case IEMMODE_16BIT:
7721 switch (pVCpu->iem.s.enmEffAddrMode)
7722 {
7723 case IEMMODE_16BIT:
7724 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7727 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7728 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7729 case IEMMODE_32BIT:
7730 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7731 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7732 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7733 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7734 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7735 case IEMMODE_64BIT:
7736 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7737 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7738 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7739 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7740 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7742 }
7743 break;
7744 case IEMMODE_32BIT:
7745 switch (pVCpu->iem.s.enmEffAddrMode)
7746 {
7747 case IEMMODE_16BIT:
7748 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7751 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7752 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7753 case IEMMODE_32BIT:
7754 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7755 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7759 case IEMMODE_64BIT:
7760 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7761 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7763 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7764 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7766 }
7767 case IEMMODE_64BIT:
7768 switch (pVCpu->iem.s.enmEffAddrMode)
7769 {
7770 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7771 case IEMMODE_32BIT:
7772 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7773 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7775 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7776 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7777 case IEMMODE_64BIT:
7778 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7779 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7780 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7781 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7782 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7784 }
7785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7786 }
7787 }
7788
7789 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7790 {
7791 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7793 switch (pVCpu->iem.s.enmEffOpSize)
7794 {
7795 case IEMMODE_16BIT:
7796 switch (pVCpu->iem.s.enmEffAddrMode)
7797 {
7798 case IEMMODE_16BIT:
7799 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7800 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7801 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7803 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7804 case IEMMODE_32BIT:
7805 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7806 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7807 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7809 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7810 case IEMMODE_64BIT:
7811 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7815 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7817 }
7818 break;
7819 case IEMMODE_32BIT:
7820 switch (pVCpu->iem.s.enmEffAddrMode)
7821 {
7822 case IEMMODE_16BIT:
7823 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7824 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7825 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7827 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7828 case IEMMODE_32BIT:
7829 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7830 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7831 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7834 case IEMMODE_64BIT:
7835 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7838 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7839 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7841 }
7842 case IEMMODE_64BIT:
7843 switch (pVCpu->iem.s.enmEffAddrMode)
7844 {
7845 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7846 case IEMMODE_32BIT:
7847 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7848 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7849 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7850 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7851 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7852 case IEMMODE_64BIT:
7853 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7856 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7857 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 }
7863
7864 /*
7865 * Annoying double switch here.
7866 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7867 */
7868 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7869 switch (pVCpu->iem.s.enmEffOpSize)
7870 {
7871 case IEMMODE_16BIT:
7872 switch (pVCpu->iem.s.enmEffAddrMode)
7873 {
7874 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7875 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7876 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7878 }
7879 break;
7880
7881 case IEMMODE_32BIT:
7882 switch (pVCpu->iem.s.enmEffAddrMode)
7883 {
7884 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7885 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7886 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7888 }
7889 break;
7890
7891 case IEMMODE_64BIT:
7892 switch (pVCpu->iem.s.enmEffAddrMode)
7893 {
7894 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7895 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7896 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7898 }
7899 break;
7900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7901 }
7902}
7903
7904#undef IEM_CMPS_CASE
7905
7906/**
7907 * @opcode 0xa8
7908 * @opflclass logical
7909 */
7910FNIEMOP_DEF(iemOp_test_AL_Ib)
7911{
7912 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7914 IEMOP_BODY_BINARY_AL_Ib(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7915}
7916
7917
7918/**
7919 * @opcode 0xa9
7920 * @opflclass logical
7921 */
7922FNIEMOP_DEF(iemOp_test_eAX_Iz)
7923{
7924 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7926 IEMOP_BODY_BINARY_rAX_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7927}
7928
7929
7930/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7931#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7932 IEM_MC_BEGIN(a_fMcFlags, 0); \
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7934 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7935 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7936 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7937 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7938 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7940 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7941 } IEM_MC_ELSE() { \
7942 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7943 } IEM_MC_ENDIF(); \
7944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7945 IEM_MC_END() \
7946
7947/**
7948 * @opcode 0xaa
7949 */
7950FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7951{
7952 /*
7953 * Use the C implementation if a repeat prefix is encountered.
7954 */
7955 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7956 {
7957 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7959 switch (pVCpu->iem.s.enmEffAddrMode)
7960 {
7961 case IEMMODE_16BIT:
7962 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7963 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7964 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7965 iemCImpl_stos_al_m16);
7966 case IEMMODE_32BIT:
7967 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7968 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7970 iemCImpl_stos_al_m32);
7971 case IEMMODE_64BIT:
7972 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7975 iemCImpl_stos_al_m64);
7976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7977 }
7978 }
7979
7980 /*
7981 * Sharing case implementation with stos[wdq] below.
7982 */
7983 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7984 switch (pVCpu->iem.s.enmEffAddrMode)
7985 {
7986 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7987 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7988 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7990 }
7991}
7992
7993
7994/**
7995 * @opcode 0xab
7996 */
7997FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7998{
7999 /*
8000 * Use the C implementation if a repeat prefix is encountered.
8001 */
8002 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8003 {
8004 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
8005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8006 switch (pVCpu->iem.s.enmEffOpSize)
8007 {
8008 case IEMMODE_16BIT:
8009 switch (pVCpu->iem.s.enmEffAddrMode)
8010 {
8011 case IEMMODE_16BIT:
8012 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8013 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8014 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8015 iemCImpl_stos_ax_m16);
8016 case IEMMODE_32BIT:
8017 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8018 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8019 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8020 iemCImpl_stos_ax_m32);
8021 case IEMMODE_64BIT:
8022 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8023 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8024 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8025 iemCImpl_stos_ax_m64);
8026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8027 }
8028 break;
8029 case IEMMODE_32BIT:
8030 switch (pVCpu->iem.s.enmEffAddrMode)
8031 {
8032 case IEMMODE_16BIT:
8033 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8036 iemCImpl_stos_eax_m16);
8037 case IEMMODE_32BIT:
8038 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8039 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8041 iemCImpl_stos_eax_m32);
8042 case IEMMODE_64BIT:
8043 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8044 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8045 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8046 iemCImpl_stos_eax_m64);
8047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8048 }
8049 case IEMMODE_64BIT:
8050 switch (pVCpu->iem.s.enmEffAddrMode)
8051 {
8052 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
8053 case IEMMODE_32BIT:
8054 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8055 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8056 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8057 iemCImpl_stos_rax_m32);
8058 case IEMMODE_64BIT:
8059 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8060 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8061 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8062 iemCImpl_stos_rax_m64);
8063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8064 }
8065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8066 }
8067 }
8068
8069 /*
8070 * Annoying double switch here.
8071 * Using ugly macro for implementing the cases, sharing it with stosb.
8072 */
8073 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
8074 switch (pVCpu->iem.s.enmEffOpSize)
8075 {
8076 case IEMMODE_16BIT:
8077 switch (pVCpu->iem.s.enmEffAddrMode)
8078 {
8079 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8080 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8081 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
8082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8083 }
8084 break;
8085
8086 case IEMMODE_32BIT:
8087 switch (pVCpu->iem.s.enmEffAddrMode)
8088 {
8089 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8090 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8091 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
8092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8093 }
8094 break;
8095
8096 case IEMMODE_64BIT:
8097 switch (pVCpu->iem.s.enmEffAddrMode)
8098 {
8099 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8100 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
8101 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
8102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8103 }
8104 break;
8105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8106 }
8107}
8108
8109#undef IEM_STOS_CASE
8110
8111/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
8112#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
8113 IEM_MC_BEGIN(a_fMcFlags, 0); \
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8115 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8116 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8117 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8118 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
8119 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
8120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8121 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8122 } IEM_MC_ELSE() { \
8123 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8124 } IEM_MC_ENDIF(); \
8125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8126 IEM_MC_END() \
8127
8128/**
8129 * @opcode 0xac
8130 * @opfltest df
8131 */
8132FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
8133{
8134 /*
8135 * Use the C implementation if a repeat prefix is encountered.
8136 */
8137 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8138 {
8139 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
8140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8141 switch (pVCpu->iem.s.enmEffAddrMode)
8142 {
8143 case IEMMODE_16BIT:
8144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8145 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8147 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8148 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
8149 case IEMMODE_32BIT:
8150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8151 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8154 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
8155 case IEMMODE_64BIT:
8156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8160 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
8161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8162 }
8163 }
8164
8165 /*
8166 * Sharing case implementation with stos[wdq] below.
8167 */
8168 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
8169 switch (pVCpu->iem.s.enmEffAddrMode)
8170 {
8171 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8172 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8173 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
8174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8175 }
8176}
8177
8178
8179/**
8180 * @opcode 0xad
8181 * @opfltest df
8182 */
8183FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
8184{
8185 /*
8186 * Use the C implementation if a repeat prefix is encountered.
8187 */
8188 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8189 {
8190 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 switch (pVCpu->iem.s.enmEffOpSize)
8193 {
8194 case IEMMODE_16BIT:
8195 switch (pVCpu->iem.s.enmEffAddrMode)
8196 {
8197 case IEMMODE_16BIT:
8198 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8199 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8200 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8201 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8202 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
8203 case IEMMODE_32BIT:
8204 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8206 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8207 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8208 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
8209 case IEMMODE_64BIT:
8210 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8211 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8212 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8213 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8214 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
8215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8216 }
8217 break;
8218 case IEMMODE_32BIT:
8219 switch (pVCpu->iem.s.enmEffAddrMode)
8220 {
8221 case IEMMODE_16BIT:
8222 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8223 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8224 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8225 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8226 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
8227 case IEMMODE_32BIT:
8228 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8229 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8230 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8231 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8232 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
8233 case IEMMODE_64BIT:
8234 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8235 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8236 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8237 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8238 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
8239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8240 }
8241 case IEMMODE_64BIT:
8242 switch (pVCpu->iem.s.enmEffAddrMode)
8243 {
8244 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
8245 case IEMMODE_32BIT:
8246 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8247 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8248 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8249 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8250 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
8251 case IEMMODE_64BIT:
8252 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8253 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8254 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8255 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8256 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
8257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8258 }
8259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8260 }
8261 }
8262
8263 /*
8264 * Annoying double switch here.
8265 * Using ugly macro for implementing the cases, sharing it with lodsb.
8266 */
8267 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
8268 switch (pVCpu->iem.s.enmEffOpSize)
8269 {
8270 case IEMMODE_16BIT:
8271 switch (pVCpu->iem.s.enmEffAddrMode)
8272 {
8273 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8274 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8275 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
8276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8277 }
8278 break;
8279
8280 case IEMMODE_32BIT:
8281 switch (pVCpu->iem.s.enmEffAddrMode)
8282 {
8283 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8284 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8285 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
8286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8287 }
8288 break;
8289
8290 case IEMMODE_64BIT:
8291 switch (pVCpu->iem.s.enmEffAddrMode)
8292 {
8293 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8294 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
8295 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
8296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8297 }
8298 break;
8299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8300 }
8301}
8302
8303#undef IEM_LODS_CASE
8304
8305/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8306#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
8307 IEM_MC_BEGIN(a_fMcFlags, 0); \
8308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8309 \
8310 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8311 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8312 \
8313 IEM_MC_ARG(uint##ValBits##_t, uValue, 2); \
8314 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
8315 IEM_MC_ARG(uint##ValBits##_t *, puRax, 1); \
8316 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
8317 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8318 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_cmp_u##ValBits, fEFlagsIn, puRax, uValue); \
8319 \
8320 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);\
8321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8322 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8323 } IEM_MC_ELSE() { \
8324 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8325 } IEM_MC_ENDIF(); \
8326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8327 IEM_MC_END();
8328
8329/**
8330 * @opcode 0xae
8331 * @opflclass arithmetic
8332 * @opfltest df
8333 */
8334FNIEMOP_DEF(iemOp_scasb_AL_Xb)
8335{
8336 /*
8337 * Use the C implementation if a repeat prefix is encountered.
8338 */
8339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8340 {
8341 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8343 switch (pVCpu->iem.s.enmEffAddrMode)
8344 {
8345 case IEMMODE_16BIT:
8346 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8347 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8348 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8349 iemCImpl_repe_scas_al_m16);
8350 case IEMMODE_32BIT:
8351 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8352 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8353 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8354 iemCImpl_repe_scas_al_m32);
8355 case IEMMODE_64BIT:
8356 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8358 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8359 iemCImpl_repe_scas_al_m64);
8360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8361 }
8362 }
8363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8364 {
8365 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
8366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8367 switch (pVCpu->iem.s.enmEffAddrMode)
8368 {
8369 case IEMMODE_16BIT:
8370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8371 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8372 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8373 iemCImpl_repne_scas_al_m16);
8374 case IEMMODE_32BIT:
8375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8376 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8377 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8378 iemCImpl_repne_scas_al_m32);
8379 case IEMMODE_64BIT:
8380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8382 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8383 iemCImpl_repne_scas_al_m64);
8384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8385 }
8386 }
8387
8388 /*
8389 * Sharing case implementation with stos[wdq] below.
8390 */
8391 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
8392 switch (pVCpu->iem.s.enmEffAddrMode)
8393 {
8394 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8395 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8396 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
8397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8398 }
8399}
8400
8401
8402/**
8403 * @opcode 0xaf
8404 * @opflclass arithmetic
8405 * @opfltest df
8406 */
8407FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8408{
8409 /*
8410 * Use the C implementation if a repeat prefix is encountered.
8411 */
8412 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8413 {
8414 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 switch (pVCpu->iem.s.enmEffOpSize)
8417 {
8418 case IEMMODE_16BIT:
8419 switch (pVCpu->iem.s.enmEffAddrMode)
8420 {
8421 case IEMMODE_16BIT:
8422 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8423 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8424 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8425 iemCImpl_repe_scas_ax_m16);
8426 case IEMMODE_32BIT:
8427 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8428 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8429 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8430 iemCImpl_repe_scas_ax_m32);
8431 case IEMMODE_64BIT:
8432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8435 iemCImpl_repe_scas_ax_m64);
8436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8437 }
8438 break;
8439 case IEMMODE_32BIT:
8440 switch (pVCpu->iem.s.enmEffAddrMode)
8441 {
8442 case IEMMODE_16BIT:
8443 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8444 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8445 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8446 iemCImpl_repe_scas_eax_m16);
8447 case IEMMODE_32BIT:
8448 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8449 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8451 iemCImpl_repe_scas_eax_m32);
8452 case IEMMODE_64BIT:
8453 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8456 iemCImpl_repe_scas_eax_m64);
8457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8458 }
8459 case IEMMODE_64BIT:
8460 switch (pVCpu->iem.s.enmEffAddrMode)
8461 {
8462 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8463 case IEMMODE_32BIT:
8464 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8467 iemCImpl_repe_scas_rax_m32);
8468 case IEMMODE_64BIT:
8469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8470 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8471 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8472 iemCImpl_repe_scas_rax_m64);
8473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8474 }
8475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8476 }
8477 }
8478 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8479 {
8480 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8482 switch (pVCpu->iem.s.enmEffOpSize)
8483 {
8484 case IEMMODE_16BIT:
8485 switch (pVCpu->iem.s.enmEffAddrMode)
8486 {
8487 case IEMMODE_16BIT:
8488 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8489 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8491 iemCImpl_repne_scas_ax_m16);
8492 case IEMMODE_32BIT:
8493 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8494 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8495 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8496 iemCImpl_repne_scas_ax_m32);
8497 case IEMMODE_64BIT:
8498 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8499 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8500 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8501 iemCImpl_repne_scas_ax_m64);
8502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8503 }
8504 break;
8505 case IEMMODE_32BIT:
8506 switch (pVCpu->iem.s.enmEffAddrMode)
8507 {
8508 case IEMMODE_16BIT:
8509 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8512 iemCImpl_repne_scas_eax_m16);
8513 case IEMMODE_32BIT:
8514 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8515 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8517 iemCImpl_repne_scas_eax_m32);
8518 case IEMMODE_64BIT:
8519 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8520 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8522 iemCImpl_repne_scas_eax_m64);
8523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8524 }
8525 case IEMMODE_64BIT:
8526 switch (pVCpu->iem.s.enmEffAddrMode)
8527 {
8528 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8529 case IEMMODE_32BIT:
8530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8532 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8533 iemCImpl_repne_scas_rax_m32);
8534 case IEMMODE_64BIT:
8535 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8538 iemCImpl_repne_scas_rax_m64);
8539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8540 }
8541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8542 }
8543 }
8544
8545 /*
8546 * Annoying double switch here.
8547 * Using ugly macro for implementing the cases, sharing it with scasb.
8548 */
8549 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8550 switch (pVCpu->iem.s.enmEffOpSize)
8551 {
8552 case IEMMODE_16BIT:
8553 switch (pVCpu->iem.s.enmEffAddrMode)
8554 {
8555 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8556 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8557 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8559 }
8560 break;
8561
8562 case IEMMODE_32BIT:
8563 switch (pVCpu->iem.s.enmEffAddrMode)
8564 {
8565 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8566 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8567 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8569 }
8570 break;
8571
8572 case IEMMODE_64BIT:
8573 switch (pVCpu->iem.s.enmEffAddrMode)
8574 {
8575 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8576 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8577 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8579 }
8580 break;
8581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8582 }
8583}
8584
8585#undef IEM_SCAS_CASE
8586
8587/**
8588 * Common 'mov r8, imm8' helper.
8589 */
8590FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8591{
8592 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8593 IEM_MC_BEGIN(0, 0);
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8596 IEM_MC_ADVANCE_RIP_AND_FINISH();
8597 IEM_MC_END();
8598}
8599
8600
8601/**
8602 * @opcode 0xb0
8603 */
8604FNIEMOP_DEF(iemOp_mov_AL_Ib)
8605{
8606 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8607 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8608}
8609
8610
8611/**
8612 * @opcode 0xb1
8613 */
8614FNIEMOP_DEF(iemOp_CL_Ib)
8615{
8616 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8617 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8618}
8619
8620
8621/**
8622 * @opcode 0xb2
8623 */
8624FNIEMOP_DEF(iemOp_DL_Ib)
8625{
8626 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8627 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8628}
8629
8630
8631/**
8632 * @opcode 0xb3
8633 */
8634FNIEMOP_DEF(iemOp_BL_Ib)
8635{
8636 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8637 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8638}
8639
8640
8641/**
8642 * @opcode 0xb4
8643 */
8644FNIEMOP_DEF(iemOp_mov_AH_Ib)
8645{
8646 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8647 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8648}
8649
8650
8651/**
8652 * @opcode 0xb5
8653 */
8654FNIEMOP_DEF(iemOp_CH_Ib)
8655{
8656 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8657 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8658}
8659
8660
8661/**
8662 * @opcode 0xb6
8663 */
8664FNIEMOP_DEF(iemOp_DH_Ib)
8665{
8666 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8667 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8668}
8669
8670
8671/**
8672 * @opcode 0xb7
8673 */
8674FNIEMOP_DEF(iemOp_BH_Ib)
8675{
8676 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8677 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8678}
8679
8680
8681/**
8682 * Common 'mov regX,immX' helper.
8683 */
8684FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8685{
8686 switch (pVCpu->iem.s.enmEffOpSize)
8687 {
8688 case IEMMODE_16BIT:
8689 IEM_MC_BEGIN(0, 0);
8690 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8693 IEM_MC_ADVANCE_RIP_AND_FINISH();
8694 IEM_MC_END();
8695 break;
8696
8697 case IEMMODE_32BIT:
8698 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8699 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8702 IEM_MC_ADVANCE_RIP_AND_FINISH();
8703 IEM_MC_END();
8704 break;
8705
8706 case IEMMODE_64BIT:
8707 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
8708 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8711 IEM_MC_ADVANCE_RIP_AND_FINISH();
8712 IEM_MC_END();
8713 break;
8714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8715 }
8716}
8717
8718
8719/**
8720 * @opcode 0xb8
8721 */
8722FNIEMOP_DEF(iemOp_eAX_Iv)
8723{
8724 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8725 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8726}
8727
8728
8729/**
8730 * @opcode 0xb9
8731 */
8732FNIEMOP_DEF(iemOp_eCX_Iv)
8733{
8734 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8735 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8736}
8737
8738
8739/**
8740 * @opcode 0xba
8741 */
8742FNIEMOP_DEF(iemOp_eDX_Iv)
8743{
8744 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8745 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8746}
8747
8748
8749/**
8750 * @opcode 0xbb
8751 */
8752FNIEMOP_DEF(iemOp_eBX_Iv)
8753{
8754 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8755 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8756}
8757
8758
8759/**
8760 * @opcode 0xbc
8761 */
8762FNIEMOP_DEF(iemOp_eSP_Iv)
8763{
8764 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8765 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8766}
8767
8768
8769/**
8770 * @opcode 0xbd
8771 */
8772FNIEMOP_DEF(iemOp_eBP_Iv)
8773{
8774 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8775 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8776}
8777
8778
8779/**
8780 * @opcode 0xbe
8781 */
8782FNIEMOP_DEF(iemOp_eSI_Iv)
8783{
8784 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8785 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8786}
8787
8788
8789/**
8790 * @opcode 0xbf
8791 */
8792FNIEMOP_DEF(iemOp_eDI_Iv)
8793{
8794 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8795 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8796}
8797
8798
8799/**
8800 * @opcode 0xc0
8801 */
8802FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8803{
8804 IEMOP_HLP_MIN_186();
8805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8806
8807 /* Need to use a body macro here since the EFLAGS behaviour differs between
8808 the shifts, rotates and rotate w/ carry. Sigh. */
8809#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8810 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8811 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8812 { \
8813 /* register */ \
8814 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8815 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8817 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
8818 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8819 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8820 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8821 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
8822 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8824 IEM_MC_END(); \
8825 } \
8826 else \
8827 { \
8828 /* memory */ \
8829 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8832 \
8833 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8835 \
8836 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8837 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
8838 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8839 \
8840 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8841 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8842 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
8843 \
8844 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8845 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8846 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8847 IEM_MC_END(); \
8848 } (void)0
8849
8850 switch (IEM_GET_MODRM_REG_8(bRm))
8851 {
8852 /**
8853 * @opdone
8854 * @opmaps grp2_c0
8855 * @opcode /0
8856 * @opflclass rotate_count
8857 */
8858 case 0:
8859 {
8860 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8862 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8863 break;
8864 }
8865 /**
8866 * @opdone
8867 * @opmaps grp2_c0
8868 * @opcode /1
8869 * @opflclass rotate_count
8870 */
8871 case 1:
8872 {
8873 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8875 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8876 break;
8877 }
8878 /**
8879 * @opdone
8880 * @opmaps grp2_c0
8881 * @opcode /2
8882 * @opflclass rotate_carry_count
8883 */
8884 case 2:
8885 {
8886 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8888 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8889 break;
8890 }
8891 /**
8892 * @opdone
8893 * @opmaps grp2_c0
8894 * @opcode /3
8895 * @opflclass rotate_carry_count
8896 */
8897 case 3:
8898 {
8899 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8901 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8902 break;
8903 }
8904 /**
8905 * @opdone
8906 * @opmaps grp2_c0
8907 * @opcode /4
8908 * @opflclass shift_count
8909 */
8910 case 4:
8911 {
8912 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8914 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8915 break;
8916 }
8917 /**
8918 * @opdone
8919 * @opmaps grp2_c0
8920 * @opcode /5
8921 * @opflclass shift_count
8922 */
8923 case 5:
8924 {
8925 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8927 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8928 break;
8929 }
8930 /**
8931 * @opdone
8932 * @opmaps grp2_c0
8933 * @opcode /7
8934 * @opflclass shift_count
8935 */
8936 case 7:
8937 {
8938 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8940 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8941 break;
8942 }
8943
8944 /** @opdone */
8945 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8946 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8947 }
8948#undef GRP2_BODY_Eb_Ib
8949}
8950
8951
8952/* Need to use a body macro here since the EFLAGS behaviour differs between
8953 the shifts, rotates and rotate w/ carry. Sigh. */
8954#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8955 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8956 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8957 { \
8958 /* register */ \
8959 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8960 switch (pVCpu->iem.s.enmEffOpSize) \
8961 { \
8962 case IEMMODE_16BIT: \
8963 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8965 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8966 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8967 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8968 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8969 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
8970 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8972 IEM_MC_END(); \
8973 break; \
8974 \
8975 case IEMMODE_32BIT: \
8976 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8978 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8979 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8980 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8981 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8982 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
8983 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8984 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8986 IEM_MC_END(); \
8987 break; \
8988 \
8989 case IEMMODE_64BIT: \
8990 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8992 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8993 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8994 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8995 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8996 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
8997 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8998 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8999 IEM_MC_END(); \
9000 break; \
9001 \
9002 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9003 } \
9004 } \
9005 else \
9006 { \
9007 /* memory */ \
9008 switch (pVCpu->iem.s.enmEffOpSize) \
9009 { \
9010 case IEMMODE_16BIT: \
9011 IEM_MC_BEGIN(0, 0); \
9012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9014 \
9015 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9017 \
9018 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9019 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9020 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9021 \
9022 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
9024 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
9025 \
9026 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9027 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9029 IEM_MC_END(); \
9030 break; \
9031 \
9032 case IEMMODE_32BIT: \
9033 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9036 \
9037 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9039 \
9040 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9041 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9042 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9043 \
9044 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9045 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
9046 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
9047 \
9048 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9049 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9051 IEM_MC_END(); \
9052 break; \
9053 \
9054 case IEMMODE_64BIT: \
9055 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9058 \
9059 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9061 \
9062 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9063 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9064 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9065 \
9066 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9067 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
9068 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
9069 \
9070 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9071 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9072 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9073 IEM_MC_END(); \
9074 break; \
9075 \
9076 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9077 } \
9078 } (void)0
9079
9080/**
9081 * @opmaps grp2_c1
9082 * @opcode /0
9083 * @opflclass rotate_count
9084 */
9085FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
9086{
9087 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9088 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9089}
9090
9091
9092/**
9093 * @opmaps grp2_c1
9094 * @opcode /1
9095 * @opflclass rotate_count
9096 */
9097FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
9098{
9099 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9100 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9101}
9102
9103
9104/**
9105 * @opmaps grp2_c1
9106 * @opcode /2
9107 * @opflclass rotate_carry_count
9108 */
9109FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
9110{
9111 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9112 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9113}
9114
9115
9116/**
9117 * @opmaps grp2_c1
9118 * @opcode /3
9119 * @opflclass rotate_carry_count
9120 */
9121FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
9122{
9123 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9124 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9125}
9126
9127
9128/**
9129 * @opmaps grp2_c1
9130 * @opcode /4
9131 * @opflclass shift_count
9132 */
9133FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
9134{
9135 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9136 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9137}
9138
9139
9140/**
9141 * @opmaps grp2_c1
9142 * @opcode /5
9143 * @opflclass shift_count
9144 */
9145FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
9146{
9147 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9148 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9149}
9150
9151
9152/**
9153 * @opmaps grp2_c1
9154 * @opcode /7
9155 * @opflclass shift_count
9156 */
9157FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
9158{
9159 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9160 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9161}
9162
9163#undef GRP2_BODY_Ev_Ib
9164
9165/**
9166 * @opcode 0xc1
9167 */
9168FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
9169{
9170 IEMOP_HLP_MIN_186();
9171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9172
9173 switch (IEM_GET_MODRM_REG_8(bRm))
9174 {
9175 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
9176 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
9177 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
9178 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
9179 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
9180 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
9181 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
9182 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9183 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9184 }
9185}
9186
9187
9188/**
9189 * @opcode 0xc2
9190 */
9191FNIEMOP_DEF(iemOp_retn_Iw)
9192{
9193 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
9194 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9195 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9197 switch (pVCpu->iem.s.enmEffOpSize)
9198 {
9199 case IEMMODE_16BIT:
9200 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9201 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
9202 case IEMMODE_32BIT:
9203 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9204 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
9205 case IEMMODE_64BIT:
9206 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9207 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
9208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9209 }
9210}
9211
9212
9213/**
9214 * @opcode 0xc3
9215 */
9216FNIEMOP_DEF(iemOp_retn)
9217{
9218 IEMOP_MNEMONIC(retn, "retn");
9219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9221 switch (pVCpu->iem.s.enmEffOpSize)
9222 {
9223 case IEMMODE_16BIT:
9224 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9225 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
9226 case IEMMODE_32BIT:
9227 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9228 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
9229 case IEMMODE_64BIT:
9230 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9231 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
9232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9233 }
9234}
9235
9236
9237/**
9238 * @opcode 0xc4
9239 */
9240FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
9241{
9242 /* The LDS instruction is invalid 64-bit mode. In legacy and
9243 compatability mode it is invalid with MOD=3.
9244 The use as a VEX prefix is made possible by assigning the inverted
9245 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
9246 outside of 64-bit mode. VEX is not available in real or v86 mode. */
9247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9248 if ( IEM_IS_64BIT_CODE(pVCpu)
9249 || IEM_IS_MODRM_REG_MODE(bRm) )
9250 {
9251 IEMOP_MNEMONIC(vex3_prefix, "vex3");
9252 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9253 {
9254 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9255 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9256 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
9257 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9259#if 1
9260 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
9261 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
9262#else
9263 if (bVex2 & 0x80 /* VEX.W */)
9264 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
9265#endif
9266 if (IEM_IS_64BIT_CODE(pVCpu))
9267 {
9268#if 1
9269 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
9270 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
9271#else
9272 if (~bRm & 0x20 /* VEX.~B */)
9273 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
9274 if (~bRm & 0x40 /* VEX.~X */)
9275 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
9276 if (~bRm & 0x80 /* VEX.~R */)
9277 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
9278#endif
9279 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9280 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
9281 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
9282 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
9283 }
9284 else
9285 {
9286 pVCpu->iem.s.uRexReg = 0;
9287 pVCpu->iem.s.uRexIndex = 0;
9288 pVCpu->iem.s.uRexB = 0;
9289 /** @todo testcase: Will attemps to access registers 8 thru 15 from 16&32 bit
9290 * code raise \#UD or just be ignored? We're ignoring for now... */
9291 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0x7;
9292 }
9293 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
9294 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
9295
9296 switch (bRm & 0x1f)
9297 {
9298 case 1: /* 0x0f lead opcode byte. */
9299#ifdef IEM_WITH_VEX
9300 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9301#else
9302 IEMOP_BITCH_ABOUT_STUB();
9303 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9304#endif
9305
9306 case 2: /* 0x0f 0x38 lead opcode bytes. */
9307#ifdef IEM_WITH_VEX
9308 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9309#else
9310 IEMOP_BITCH_ABOUT_STUB();
9311 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9312#endif
9313
9314 case 3: /* 0x0f 0x3a lead opcode bytes. */
9315#ifdef IEM_WITH_VEX
9316 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9317#else
9318 IEMOP_BITCH_ABOUT_STUB();
9319 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9320#endif
9321
9322 default:
9323 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
9324 IEMOP_RAISE_INVALID_OPCODE_RET();
9325 }
9326 }
9327 Log(("VEX3: VEX support disabled!\n"));
9328 IEMOP_RAISE_INVALID_OPCODE_RET();
9329 }
9330
9331 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
9332 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
9333}
9334
9335
9336/**
9337 * @opcode 0xc5
9338 */
9339FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
9340{
9341 /* The LES instruction is invalid 64-bit mode. In legacy and
9342 compatability mode it is invalid with MOD=3.
9343 The use as a VEX prefix is made possible by assigning the inverted
9344 REX.R to the top MOD bit, and the top bit in the inverted register
9345 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
9346 to accessing registers 0..7 in this VEX form. */
9347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9348 if ( IEM_IS_64BIT_CODE(pVCpu)
9349 || IEM_IS_MODRM_REG_MODE(bRm))
9350 {
9351 IEMOP_MNEMONIC(vex2_prefix, "vex2");
9352 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9353 {
9354 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9355 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9356 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9357 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9358 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
9359 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
9360 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9361 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
9362 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
9363 pVCpu->iem.s.idxPrefix = bRm & 0x3;
9364
9365#ifdef IEM_WITH_VEX
9366 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9367#else
9368 IEMOP_BITCH_ABOUT_STUB();
9369 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9370#endif
9371 }
9372
9373 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
9374 Log(("VEX2: VEX support disabled!\n"));
9375 IEMOP_RAISE_INVALID_OPCODE_RET();
9376 }
9377
9378 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
9379 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
9380}
9381
9382
9383/**
9384 * @opcode 0xc6
9385 */
9386FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9387{
9388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9389 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9390 IEMOP_RAISE_INVALID_OPCODE_RET();
9391 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
9392
9393 if (IEM_IS_MODRM_REG_MODE(bRm))
9394 {
9395 /* register access */
9396 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9397 IEM_MC_BEGIN(0, 0);
9398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9399 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
9400 IEM_MC_ADVANCE_RIP_AND_FINISH();
9401 IEM_MC_END();
9402 }
9403 else
9404 {
9405 /* memory access. */
9406 IEM_MC_BEGIN(0, 0);
9407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9409 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9412 IEM_MC_ADVANCE_RIP_AND_FINISH();
9413 IEM_MC_END();
9414 }
9415}
9416
9417
9418/**
9419 * @opcode 0xc7
9420 */
9421FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9422{
9423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9424 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9425 IEMOP_RAISE_INVALID_OPCODE_RET();
9426 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9427
9428 if (IEM_IS_MODRM_REG_MODE(bRm))
9429 {
9430 /* register access */
9431 switch (pVCpu->iem.s.enmEffOpSize)
9432 {
9433 case IEMMODE_16BIT:
9434 IEM_MC_BEGIN(0, 0);
9435 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9437 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9438 IEM_MC_ADVANCE_RIP_AND_FINISH();
9439 IEM_MC_END();
9440 break;
9441
9442 case IEMMODE_32BIT:
9443 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9444 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9446 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9447 IEM_MC_ADVANCE_RIP_AND_FINISH();
9448 IEM_MC_END();
9449 break;
9450
9451 case IEMMODE_64BIT:
9452 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9453 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9455 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9456 IEM_MC_ADVANCE_RIP_AND_FINISH();
9457 IEM_MC_END();
9458 break;
9459
9460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9461 }
9462 }
9463 else
9464 {
9465 /* memory access. */
9466 switch (pVCpu->iem.s.enmEffOpSize)
9467 {
9468 case IEMMODE_16BIT:
9469 IEM_MC_BEGIN(0, 0);
9470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9474 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9475 IEM_MC_ADVANCE_RIP_AND_FINISH();
9476 IEM_MC_END();
9477 break;
9478
9479 case IEMMODE_32BIT:
9480 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9483 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9485 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9486 IEM_MC_ADVANCE_RIP_AND_FINISH();
9487 IEM_MC_END();
9488 break;
9489
9490 case IEMMODE_64BIT:
9491 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9494 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9497 IEM_MC_ADVANCE_RIP_AND_FINISH();
9498 IEM_MC_END();
9499 break;
9500
9501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9502 }
9503 }
9504}
9505
9506
9507
9508
9509/**
9510 * @opcode 0xc8
9511 */
9512FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9513{
9514 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9515 IEMOP_HLP_MIN_186();
9516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9517 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9518 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9520 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9521 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9523 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9524}
9525
9526
9527/**
9528 * @opcode 0xc9
9529 */
9530FNIEMOP_DEF(iemOp_leave)
9531{
9532 IEMOP_MNEMONIC(leave, "leave");
9533 IEMOP_HLP_MIN_186();
9534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9536 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9539 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9540}
9541
9542
9543/**
9544 * @opcode 0xca
9545 */
9546FNIEMOP_DEF(iemOp_retf_Iw)
9547{
9548 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9549 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9551 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9552 | IEM_CIMPL_F_MODE,
9553 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9554 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9555 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9556 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9557 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9558 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9559 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9560 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9561 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9562 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9563 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9564 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9565 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9566 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9567 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9568 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9569 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9570 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9571}
9572
9573
9574/**
9575 * @opcode 0xcb
9576 */
9577FNIEMOP_DEF(iemOp_retf)
9578{
9579 IEMOP_MNEMONIC(retf, "retf");
9580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9581 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9582 | IEM_CIMPL_F_MODE,
9583 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9584 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9585 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9586 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9587 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9588 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9589 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9590 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9591 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9592 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9593 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9594 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9595 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9596 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9597 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9598 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9599 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9600 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9601}
9602
9603
9604/**
9605 * @opcode 0xcc
9606 */
9607FNIEMOP_DEF(iemOp_int3)
9608{
9609 IEMOP_MNEMONIC(int3, "int3");
9610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9611 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9612 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9613 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9614}
9615
9616
9617/**
9618 * @opcode 0xcd
9619 */
9620FNIEMOP_DEF(iemOp_int_Ib)
9621{
9622 IEMOP_MNEMONIC(int_Ib, "int Ib");
9623 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9625 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9626 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9627 iemCImpl_int, u8Int, IEMINT_INTN);
9628 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9629}
9630
9631
9632/**
9633 * @opcode 0xce
9634 */
9635FNIEMOP_DEF(iemOp_into)
9636{
9637 IEMOP_MNEMONIC(into, "into");
9638 IEMOP_HLP_NO_64BIT();
9639 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9640 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9641 UINT64_MAX,
9642 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9643 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9644}
9645
9646
9647/**
9648 * @opcode 0xcf
9649 */
9650FNIEMOP_DEF(iemOp_iret)
9651{
9652 IEMOP_MNEMONIC(iret, "iret");
9653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9654 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9655 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9656 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9657 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9658 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9659 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9660 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9661 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9662 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9663 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9664 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9665 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9666 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9667 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9668 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9669 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9670 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9671 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9672 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9673 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9674 /* Segment registers are sanitized when returning to an outer ring, or fully
9675 reloaded when returning to v86 mode. Thus the large flush list above. */
9676}
9677
9678
9679/**
9680 * @opcode 0xd0
9681 */
9682FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9683{
9684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9685
9686 /* Need to use a body macro here since the EFLAGS behaviour differs between
9687 the shifts, rotates and rotate w/ carry. Sigh. */
9688#define GRP2_BODY_Eb_1(a_pImplExpr) \
9689 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9690 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9691 { \
9692 /* register */ \
9693 IEM_MC_BEGIN(0, 0); \
9694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9695 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
9696 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9697 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9698 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 2); \
9699 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
9700 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9701 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9702 IEM_MC_END(); \
9703 } \
9704 else \
9705 { \
9706 /* memory */ \
9707 IEM_MC_BEGIN(0, 0); \
9708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9711 \
9712 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9713 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
9714 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9715 \
9716 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9717 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 2); \
9718 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
9719 \
9720 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9721 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9723 IEM_MC_END(); \
9724 } (void)0
9725
9726 switch (IEM_GET_MODRM_REG_8(bRm))
9727 {
9728 /**
9729 * @opdone
9730 * @opmaps grp2_d0
9731 * @opcode /0
9732 * @opflclass rotate_1
9733 */
9734 case 0:
9735 {
9736 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9737 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9738 break;
9739 }
9740 /**
9741 * @opdone
9742 * @opmaps grp2_d0
9743 * @opcode /1
9744 * @opflclass rotate_1
9745 */
9746 case 1:
9747 {
9748 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9749 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9750 break;
9751 }
9752 /**
9753 * @opdone
9754 * @opmaps grp2_d0
9755 * @opcode /2
9756 * @opflclass rotate_carry_1
9757 */
9758 case 2:
9759 {
9760 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9761 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9762 break;
9763 }
9764 /**
9765 * @opdone
9766 * @opmaps grp2_d0
9767 * @opcode /3
9768 * @opflclass rotate_carry_1
9769 */
9770 case 3:
9771 {
9772 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9773 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9774 break;
9775 }
9776 /**
9777 * @opdone
9778 * @opmaps grp2_d0
9779 * @opcode /4
9780 * @opflclass shift_1
9781 */
9782 case 4:
9783 {
9784 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9785 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9786 break;
9787 }
9788 /**
9789 * @opdone
9790 * @opmaps grp2_d0
9791 * @opcode /5
9792 * @opflclass shift_1
9793 */
9794 case 5:
9795 {
9796 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9797 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9798 break;
9799 }
9800 /**
9801 * @opdone
9802 * @opmaps grp2_d0
9803 * @opcode /7
9804 * @opflclass shift_1
9805 */
9806 case 7:
9807 {
9808 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9809 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9810 break;
9811 }
9812 /** @opdone */
9813 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9814 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9815 }
9816#undef GRP2_BODY_Eb_1
9817}
9818
9819
9820/* Need to use a body macro here since the EFLAGS behaviour differs between
9821 the shifts, rotates and rotate w/ carry. Sigh. */
9822#define GRP2_BODY_Ev_1(a_pImplExpr) \
9823 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9824 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9825 { \
9826 /* register */ \
9827 switch (pVCpu->iem.s.enmEffOpSize) \
9828 { \
9829 case IEMMODE_16BIT: \
9830 IEM_MC_BEGIN(0, 0); \
9831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9832 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9833 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9834 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9835 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9836 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
9837 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9838 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9839 IEM_MC_END(); \
9840 break; \
9841 \
9842 case IEMMODE_32BIT: \
9843 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9845 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9846 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9847 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9848 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9849 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
9850 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9851 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9853 IEM_MC_END(); \
9854 break; \
9855 \
9856 case IEMMODE_64BIT: \
9857 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9859 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9860 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9861 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9862 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9863 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
9864 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9865 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9866 IEM_MC_END(); \
9867 break; \
9868 \
9869 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9870 } \
9871 } \
9872 else \
9873 { \
9874 /* memory */ \
9875 switch (pVCpu->iem.s.enmEffOpSize) \
9876 { \
9877 case IEMMODE_16BIT: \
9878 IEM_MC_BEGIN(0, 0); \
9879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9882 \
9883 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9884 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9885 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9886 \
9887 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9888 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9889 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
9890 \
9891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9892 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9894 IEM_MC_END(); \
9895 break; \
9896 \
9897 case IEMMODE_32BIT: \
9898 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9902 \
9903 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9904 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9905 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9906 \
9907 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9908 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9909 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
9910 \
9911 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9912 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9914 IEM_MC_END(); \
9915 break; \
9916 \
9917 case IEMMODE_64BIT: \
9918 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9922 \
9923 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9924 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9925 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9926 \
9927 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9928 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9929 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
9930 \
9931 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9932 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9934 IEM_MC_END(); \
9935 break; \
9936 \
9937 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9938 } \
9939 } (void)0
9940
9941/**
9942 * @opmaps grp2_d1
9943 * @opcode /0
9944 * @opflclass rotate_1
9945 */
9946FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9947{
9948 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9949 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9950}
9951
9952
9953/**
9954 * @opmaps grp2_d1
9955 * @opcode /1
9956 * @opflclass rotate_1
9957 */
9958FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9959{
9960 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9961 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9962}
9963
9964
9965/**
9966 * @opmaps grp2_d1
9967 * @opcode /2
9968 * @opflclass rotate_carry_1
9969 */
9970FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9971{
9972 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9973 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9974}
9975
9976
9977/**
9978 * @opmaps grp2_d1
9979 * @opcode /3
9980 * @opflclass rotate_carry_1
9981 */
9982FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9983{
9984 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9985 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9986}
9987
9988
9989/**
9990 * @opmaps grp2_d1
9991 * @opcode /4
9992 * @opflclass shift_1
9993 */
9994FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9995{
9996 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9997 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9998}
9999
10000
10001/**
10002 * @opmaps grp2_d1
10003 * @opcode /5
10004 * @opflclass shift_1
10005 */
10006FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
10007{
10008 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
10009 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10010}
10011
10012
10013/**
10014 * @opmaps grp2_d1
10015 * @opcode /7
10016 * @opflclass shift_1
10017 */
10018FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
10019{
10020 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
10021 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10022}
10023
10024#undef GRP2_BODY_Ev_1
10025
10026/**
10027 * @opcode 0xd1
10028 */
10029FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10030{
10031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10032 switch (IEM_GET_MODRM_REG_8(bRm))
10033 {
10034 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
10035 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
10036 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
10037 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
10038 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
10039 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
10040 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
10041 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10042 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10043 }
10044}
10045
10046
10047/**
10048 * @opcode 0xd2
10049 */
10050FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
10051{
10052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10053
10054 /* Need to use a body macro here since the EFLAGS behaviour differs between
10055 the shifts, rotates and rotate w/ carry. Sigh. */
10056#define GRP2_BODY_Eb_CL(a_pImplExpr) \
10057 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10058 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10059 { \
10060 /* register */ \
10061 IEM_MC_BEGIN(0, 0); \
10062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10063 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10064 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10065 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
10066 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10067 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10068 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
10069 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10070 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10071 IEM_MC_END(); \
10072 } \
10073 else \
10074 { \
10075 /* memory */ \
10076 IEM_MC_BEGIN(0, 0); \
10077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10080 \
10081 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10082 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
10083 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10084 \
10085 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10086 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10087 \
10088 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10089 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
10090 \
10091 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10092 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10093 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10094 IEM_MC_END(); \
10095 } (void)0
10096
10097 switch (IEM_GET_MODRM_REG_8(bRm))
10098 {
10099 /**
10100 * @opdone
10101 * @opmaps grp2_d0
10102 * @opcode /0
10103 * @opflclass rotate_count
10104 */
10105 case 0:
10106 {
10107 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10108 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
10109 break;
10110 }
10111 /**
10112 * @opdone
10113 * @opmaps grp2_d0
10114 * @opcode /1
10115 * @opflclass rotate_count
10116 */
10117 case 1:
10118 {
10119 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10120 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
10121 break;
10122 }
10123 /**
10124 * @opdone
10125 * @opmaps grp2_d0
10126 * @opcode /2
10127 * @opflclass rotate_carry_count
10128 */
10129 case 2:
10130 {
10131 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10132 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
10133 break;
10134 }
10135 /**
10136 * @opdone
10137 * @opmaps grp2_d0
10138 * @opcode /3
10139 * @opflclass rotate_carry_count
10140 */
10141 case 3:
10142 {
10143 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10144 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
10145 break;
10146 }
10147 /**
10148 * @opdone
10149 * @opmaps grp2_d0
10150 * @opcode /4
10151 * @opflclass shift_count
10152 */
10153 case 4:
10154 {
10155 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10156 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
10157 break;
10158 }
10159 /**
10160 * @opdone
10161 * @opmaps grp2_d0
10162 * @opcode /5
10163 * @opflclass shift_count
10164 */
10165 case 5:
10166 {
10167 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10168 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10169 break;
10170 }
10171 /**
10172 * @opdone
10173 * @opmaps grp2_d0
10174 * @opcode /7
10175 * @opflclass shift_count
10176 */
10177 case 7:
10178 {
10179 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10180 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10181 break;
10182 }
10183 /** @opdone */
10184 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10185 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10186 }
10187#undef GRP2_BODY_Eb_CL
10188}
10189
10190
10191/* Need to use a body macro here since the EFLAGS behaviour differs between
10192 the shifts, rotates and rotate w/ carry. Sigh. */
10193#define GRP2_BODY_Ev_CL(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
10194 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10195 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10196 { \
10197 /* register */ \
10198 switch (pVCpu->iem.s.enmEffOpSize) \
10199 { \
10200 case IEMMODE_16BIT: \
10201 IEM_MC_BEGIN(0, 0); \
10202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10203 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10204 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10205 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10206 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10207 IEM_MC_LOCAL(uint16_t, u16Dst); \
10208 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10209 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10210 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u16Dst, cShiftArg, fEFlags, 16); \
10211 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
10212 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10213 } IEM_MC_NATIVE_ELSE() { \
10214 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10215 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10216 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10217 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10218 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
10219 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10220 } IEM_MC_NATIVE_ENDIF(); \
10221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10222 IEM_MC_END(); \
10223 break; \
10224 \
10225 case IEMMODE_32BIT: \
10226 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10228 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10229 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10230 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10231 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10232 IEM_MC_LOCAL(uint32_t, u32Dst); \
10233 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10234 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10235 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u32Dst, cShiftArg, fEFlags, 32); \
10236 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
10237 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10238 } IEM_MC_NATIVE_ELSE() { \
10239 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10240 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10241 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10242 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10243 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
10244 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10245 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10246 } IEM_MC_NATIVE_ENDIF(); \
10247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10248 IEM_MC_END(); \
10249 break; \
10250 \
10251 case IEMMODE_64BIT: \
10252 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10254 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10255 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10256 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10257 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10258 IEM_MC_LOCAL(uint64_t, u64Dst); \
10259 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10260 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10261 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u64Dst, cShiftArg, fEFlags, 64); \
10262 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
10263 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10264 } IEM_MC_NATIVE_ELSE() { \
10265 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10266 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10267 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10268 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10269 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
10270 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10271 } IEM_MC_NATIVE_ENDIF(); \
10272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10273 IEM_MC_END(); \
10274 break; \
10275 \
10276 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10277 } \
10278 } \
10279 else \
10280 { \
10281 /* memory */ \
10282 switch (pVCpu->iem.s.enmEffOpSize) \
10283 { \
10284 case IEMMODE_16BIT: \
10285 IEM_MC_BEGIN(0, 0); \
10286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10289 \
10290 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10291 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10292 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10293 \
10294 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10295 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10296 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10297 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
10298 \
10299 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10300 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10301 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10302 IEM_MC_END(); \
10303 break; \
10304 \
10305 case IEMMODE_32BIT: \
10306 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10310 \
10311 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10312 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10313 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10314 \
10315 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10316 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10317 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10318 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
10319 \
10320 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10321 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10322 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10323 IEM_MC_END(); \
10324 break; \
10325 \
10326 case IEMMODE_64BIT: \
10327 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10331 \
10332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10333 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10334 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10335 \
10336 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10337 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10338 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10339 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
10340 \
10341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10342 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10344 IEM_MC_END(); \
10345 break; \
10346 \
10347 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10348 } \
10349 } (void)0
10350
10351
10352/**
10353 * @opmaps grp2_d0
10354 * @opcode /0
10355 * @opflclass rotate_count
10356 */
10357FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
10358{
10359 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10360 GRP2_BODY_Ev_CL(rol, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags), 0, 0);
10361}
10362
10363
10364/**
10365 * @opmaps grp2_d0
10366 * @opcode /1
10367 * @opflclass rotate_count
10368 */
10369FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
10370{
10371 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10372 GRP2_BODY_Ev_CL(ror, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags), 0, 0);
10373}
10374
10375
10376/**
10377 * @opmaps grp2_d0
10378 * @opcode /2
10379 * @opflclass rotate_carry_count
10380 */
10381FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
10382{
10383 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10384 GRP2_BODY_Ev_CL(rcl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags), 0, 0);
10385}
10386
10387
10388/**
10389 * @opmaps grp2_d0
10390 * @opcode /3
10391 * @opflclass rotate_carry_count
10392 */
10393FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
10394{
10395 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10396 GRP2_BODY_Ev_CL(rcr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags), 0, 0);
10397}
10398
10399
10400/**
10401 * @opmaps grp2_d0
10402 * @opcode /4
10403 * @opflclass shift_count
10404 */
10405FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
10406{
10407 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10408 GRP2_BODY_Ev_CL(shl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags), RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
10409}
10410
10411
10412/**
10413 * @opmaps grp2_d0
10414 * @opcode /5
10415 * @opflclass shift_count
10416 */
10417FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
10418{
10419 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10420 GRP2_BODY_Ev_CL(shr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags), 0, 0);
10421}
10422
10423
10424/**
10425 * @opmaps grp2_d0
10426 * @opcode /7
10427 * @opflclass shift_count
10428 */
10429FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
10430{
10431 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10432 GRP2_BODY_Ev_CL(sar, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags), 0, 0);
10433}
10434
10435#undef GRP2_BODY_Ev_CL
10436
10437/**
10438 * @opcode 0xd3
10439 */
10440FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10441{
10442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10443 switch (IEM_GET_MODRM_REG_8(bRm))
10444 {
10445 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10446 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10447 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10448 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10449 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10450 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10451 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10452 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10453 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10454 }
10455}
10456
10457
10458/**
10459 * @opcode 0xd4
10460 * @opflmodify cf,pf,af,zf,sf,of
10461 * @opflundef cf,af,of
10462 */
10463FNIEMOP_DEF(iemOp_aam_Ib)
10464{
10465/** @todo testcase: aam */
10466 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10467 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEMOP_HLP_NO_64BIT();
10470 if (!bImm)
10471 IEMOP_RAISE_DIVIDE_ERROR_RET();
10472 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10473}
10474
10475
10476/**
10477 * @opcode 0xd5
10478 * @opflmodify cf,pf,af,zf,sf,of
10479 * @opflundef cf,af,of
10480 */
10481FNIEMOP_DEF(iemOp_aad_Ib)
10482{
10483/** @todo testcase: aad? */
10484 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10485 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10487 IEMOP_HLP_NO_64BIT();
10488 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10489}
10490
10491
10492/**
10493 * @opcode 0xd6
10494 */
10495FNIEMOP_DEF(iemOp_salc)
10496{
10497 IEMOP_MNEMONIC(salc, "salc");
10498 IEMOP_HLP_NO_64BIT();
10499
10500 IEM_MC_BEGIN(0, 0);
10501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10503 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10504 } IEM_MC_ELSE() {
10505 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10506 } IEM_MC_ENDIF();
10507 IEM_MC_ADVANCE_RIP_AND_FINISH();
10508 IEM_MC_END();
10509}
10510
10511
10512/**
10513 * @opcode 0xd7
10514 */
10515FNIEMOP_DEF(iemOp_xlat)
10516{
10517 IEMOP_MNEMONIC(xlat, "xlat");
10518 switch (pVCpu->iem.s.enmEffAddrMode)
10519 {
10520 case IEMMODE_16BIT:
10521 IEM_MC_BEGIN(0, 0);
10522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10523 IEM_MC_LOCAL(uint8_t, u8Tmp);
10524 IEM_MC_LOCAL(uint16_t, u16Addr);
10525 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10526 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10527 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10528 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530 IEM_MC_END();
10531 break;
10532
10533 case IEMMODE_32BIT:
10534 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10536 IEM_MC_LOCAL(uint8_t, u8Tmp);
10537 IEM_MC_LOCAL(uint32_t, u32Addr);
10538 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10539 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10540 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10541 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10542 IEM_MC_ADVANCE_RIP_AND_FINISH();
10543 IEM_MC_END();
10544 break;
10545
10546 case IEMMODE_64BIT:
10547 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10549 IEM_MC_LOCAL(uint8_t, u8Tmp);
10550 IEM_MC_LOCAL(uint64_t, u64Addr);
10551 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10552 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10553 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10554 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10555 IEM_MC_ADVANCE_RIP_AND_FINISH();
10556 IEM_MC_END();
10557 break;
10558
10559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10560 }
10561}
10562
10563
10564/**
10565 * Common worker for FPU instructions working on ST0 and STn, and storing the
10566 * result in ST0.
10567 *
10568 * @param bRm Mod R/M byte.
10569 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10570 */
10571FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10572{
10573 IEM_MC_BEGIN(0, 0);
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10576 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10577 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10578 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10579
10580 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10581 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10582 IEM_MC_PREPARE_FPU_USAGE();
10583 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10584 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10585 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10586 } IEM_MC_ELSE() {
10587 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10588 } IEM_MC_ENDIF();
10589 IEM_MC_ADVANCE_RIP_AND_FINISH();
10590
10591 IEM_MC_END();
10592}
10593
10594
10595/**
10596 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10597 * flags.
10598 *
10599 * @param bRm Mod R/M byte.
10600 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10601 */
10602FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10603{
10604 IEM_MC_BEGIN(0, 0);
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606 IEM_MC_LOCAL(uint16_t, u16Fsw);
10607 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10608 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10609 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10610
10611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10612 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10613 IEM_MC_PREPARE_FPU_USAGE();
10614 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10615 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10616 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10617 } IEM_MC_ELSE() {
10618 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10619 } IEM_MC_ENDIF();
10620 IEM_MC_ADVANCE_RIP_AND_FINISH();
10621
10622 IEM_MC_END();
10623}
10624
10625
10626/**
10627 * Common worker for FPU instructions working on ST0 and STn, only affecting
10628 * flags, and popping when done.
10629 *
10630 * @param bRm Mod R/M byte.
10631 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10632 */
10633FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10634{
10635 IEM_MC_BEGIN(0, 0);
10636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10637 IEM_MC_LOCAL(uint16_t, u16Fsw);
10638 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10639 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10640 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10641
10642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10644 IEM_MC_PREPARE_FPU_USAGE();
10645 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10646 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10647 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10648 } IEM_MC_ELSE() {
10649 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10650 } IEM_MC_ENDIF();
10651 IEM_MC_ADVANCE_RIP_AND_FINISH();
10652
10653 IEM_MC_END();
10654}
10655
10656
10657/** Opcode 0xd8 11/0. */
10658FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10659{
10660 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10661 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10662}
10663
10664
10665/** Opcode 0xd8 11/1. */
10666FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10667{
10668 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10669 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10670}
10671
10672
10673/** Opcode 0xd8 11/2. */
10674FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10675{
10676 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10677 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10678}
10679
10680
10681/** Opcode 0xd8 11/3. */
10682FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10683{
10684 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10685 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10686}
10687
10688
10689/** Opcode 0xd8 11/4. */
10690FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10691{
10692 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10693 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10694}
10695
10696
10697/** Opcode 0xd8 11/5. */
10698FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10699{
10700 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10701 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10702}
10703
10704
10705/** Opcode 0xd8 11/6. */
10706FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10707{
10708 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10710}
10711
10712
10713/** Opcode 0xd8 11/7. */
10714FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10715{
10716 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10717 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10718}
10719
10720
10721/**
10722 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10723 * the result in ST0.
10724 *
10725 * @param bRm Mod R/M byte.
10726 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10727 */
10728FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10729{
10730 IEM_MC_BEGIN(0, 0);
10731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10732 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10733 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10734 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10736 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10737
10738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10740
10741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10742 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10743 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10744
10745 IEM_MC_PREPARE_FPU_USAGE();
10746 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10747 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10748 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10749 } IEM_MC_ELSE() {
10750 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10751 } IEM_MC_ENDIF();
10752 IEM_MC_ADVANCE_RIP_AND_FINISH();
10753
10754 IEM_MC_END();
10755}
10756
10757
10758/** Opcode 0xd8 !11/0. */
10759FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10760{
10761 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10762 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10763}
10764
10765
10766/** Opcode 0xd8 !11/1. */
10767FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10768{
10769 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10770 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10771}
10772
10773
10774/** Opcode 0xd8 !11/2. */
10775FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10776{
10777 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10778
10779 IEM_MC_BEGIN(0, 0);
10780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10781 IEM_MC_LOCAL(uint16_t, u16Fsw);
10782 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10783 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10785 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10786
10787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789
10790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10792 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10793
10794 IEM_MC_PREPARE_FPU_USAGE();
10795 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10796 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10797 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10798 } IEM_MC_ELSE() {
10799 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10800 } IEM_MC_ENDIF();
10801 IEM_MC_ADVANCE_RIP_AND_FINISH();
10802
10803 IEM_MC_END();
10804}
10805
10806
10807/** Opcode 0xd8 !11/3. */
10808FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10809{
10810 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10811
10812 IEM_MC_BEGIN(0, 0);
10813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10814 IEM_MC_LOCAL(uint16_t, u16Fsw);
10815 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10816 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10817 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10818 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10819
10820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10822
10823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10825 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10826
10827 IEM_MC_PREPARE_FPU_USAGE();
10828 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10829 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10830 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10831 } IEM_MC_ELSE() {
10832 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10833 } IEM_MC_ENDIF();
10834 IEM_MC_ADVANCE_RIP_AND_FINISH();
10835
10836 IEM_MC_END();
10837}
10838
10839
10840/** Opcode 0xd8 !11/4. */
10841FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10842{
10843 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10844 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10845}
10846
10847
10848/** Opcode 0xd8 !11/5. */
10849FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10850{
10851 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10853}
10854
10855
10856/** Opcode 0xd8 !11/6. */
10857FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10858{
10859 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10860 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10861}
10862
10863
10864/** Opcode 0xd8 !11/7. */
10865FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10866{
10867 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10868 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10869}
10870
10871
10872/**
10873 * @opcode 0xd8
10874 */
10875FNIEMOP_DEF(iemOp_EscF0)
10876{
10877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10878 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10879
10880 if (IEM_IS_MODRM_REG_MODE(bRm))
10881 {
10882 switch (IEM_GET_MODRM_REG_8(bRm))
10883 {
10884 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10885 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10886 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10887 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10888 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10889 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10890 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10891 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10893 }
10894 }
10895 else
10896 {
10897 switch (IEM_GET_MODRM_REG_8(bRm))
10898 {
10899 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10900 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10901 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10902 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10903 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10904 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10905 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10906 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10908 }
10909 }
10910}
10911
10912
10913/** Opcode 0xd9 /0 mem32real
10914 * @sa iemOp_fld_m64r */
10915FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10916{
10917 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10918
10919 IEM_MC_BEGIN(0, 0);
10920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10921 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10922 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10923 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10924 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10925
10926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10928
10929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10931 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10932 IEM_MC_PREPARE_FPU_USAGE();
10933 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10934 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10935 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10936 } IEM_MC_ELSE() {
10937 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10938 } IEM_MC_ENDIF();
10939 IEM_MC_ADVANCE_RIP_AND_FINISH();
10940
10941 IEM_MC_END();
10942}
10943
10944
10945/** Opcode 0xd9 !11/2 mem32real */
10946FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10947{
10948 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10949 IEM_MC_BEGIN(0, 0);
10950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10952
10953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10956 IEM_MC_PREPARE_FPU_USAGE();
10957
10958 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10959 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10960 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10961
10962 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10964 IEM_MC_LOCAL(uint16_t, u16Fsw);
10965 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10967 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10968 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10969 } IEM_MC_ELSE() {
10970 IEM_MC_IF_FCW_IM() {
10971 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10972 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10973 } IEM_MC_ELSE() {
10974 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10975 } IEM_MC_ENDIF();
10976 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10977 } IEM_MC_ENDIF();
10978 IEM_MC_ADVANCE_RIP_AND_FINISH();
10979
10980 IEM_MC_END();
10981}
10982
10983
10984/** Opcode 0xd9 !11/3 */
10985FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10986{
10987 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10988 IEM_MC_BEGIN(0, 0);
10989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10991
10992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10995 IEM_MC_PREPARE_FPU_USAGE();
10996
10997 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10998 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10999 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11000
11001 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11002 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11003 IEM_MC_LOCAL(uint16_t, u16Fsw);
11004 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11005 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11006 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11007 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11008 } IEM_MC_ELSE() {
11009 IEM_MC_IF_FCW_IM() {
11010 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11011 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11012 } IEM_MC_ELSE() {
11013 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11014 } IEM_MC_ENDIF();
11015 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11016 } IEM_MC_ENDIF();
11017 IEM_MC_ADVANCE_RIP_AND_FINISH();
11018
11019 IEM_MC_END();
11020}
11021
11022
11023/** Opcode 0xd9 !11/4 */
11024FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11025{
11026 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
11027 IEM_MC_BEGIN(0, 0);
11028 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11030
11031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11034
11035 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11036 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11037 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
11038 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11039 IEM_MC_END();
11040}
11041
11042
11043/** Opcode 0xd9 !11/5 */
11044FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11045{
11046 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
11047 IEM_MC_BEGIN(0, 0);
11048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11050
11051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11053 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11054
11055 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11056 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11057
11058 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
11059 iemCImpl_fldcw, u16Fsw);
11060 IEM_MC_END();
11061}
11062
11063
11064/** Opcode 0xd9 !11/6 */
11065FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11066{
11067 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
11068 IEM_MC_BEGIN(0, 0);
11069 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11071
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11073 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11074 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11075
11076 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11077 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11078 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11079 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11080 IEM_MC_END();
11081}
11082
11083
11084/** Opcode 0xd9 !11/7 */
11085FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11086{
11087 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
11088 IEM_MC_BEGIN(0, 0);
11089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11090 IEM_MC_LOCAL(uint16_t, u16Fcw);
11091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11094 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11095 IEM_MC_FETCH_FCW(u16Fcw);
11096 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
11097 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11098 IEM_MC_END();
11099}
11100
11101
11102/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
11103FNIEMOP_DEF(iemOp_fnop)
11104{
11105 IEMOP_MNEMONIC(fnop, "fnop");
11106 IEM_MC_BEGIN(0, 0);
11107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11110 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11111 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11112 * intel optimizations. Investigate. */
11113 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11114 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11115 IEM_MC_END();
11116}
11117
11118
11119/** Opcode 0xd9 11/0 stN */
11120FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11121{
11122 IEMOP_MNEMONIC(fld_stN, "fld stN");
11123 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11124 * indicates that it does. */
11125 IEM_MC_BEGIN(0, 0);
11126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11127 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11128 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11131
11132 IEM_MC_PREPARE_FPU_USAGE();
11133 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
11134 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11135 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11136 } IEM_MC_ELSE() {
11137 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
11138 } IEM_MC_ENDIF();
11139
11140 IEM_MC_ADVANCE_RIP_AND_FINISH();
11141 IEM_MC_END();
11142}
11143
11144
11145/** Opcode 0xd9 11/3 stN */
11146FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11147{
11148 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
11149 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11150 * indicates that it does. */
11151 IEM_MC_BEGIN(0, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11154 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11155 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11156 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
11157 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
11158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11160
11161 IEM_MC_PREPARE_FPU_USAGE();
11162 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
11163 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11164 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
11165 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11166 } IEM_MC_ELSE() {
11167 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
11168 } IEM_MC_ENDIF();
11169
11170 IEM_MC_ADVANCE_RIP_AND_FINISH();
11171 IEM_MC_END();
11172}
11173
11174
11175/** Opcode 0xd9 11/4, 0xdd 11/2. */
11176FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11177{
11178 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
11179
11180 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
11181 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
11182 if (!iDstReg)
11183 {
11184 IEM_MC_BEGIN(0, 0);
11185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11186 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11189
11190 IEM_MC_PREPARE_FPU_USAGE();
11191 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
11192 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11193 } IEM_MC_ELSE() {
11194 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
11195 } IEM_MC_ENDIF();
11196
11197 IEM_MC_ADVANCE_RIP_AND_FINISH();
11198 IEM_MC_END();
11199 }
11200 else
11201 {
11202 IEM_MC_BEGIN(0, 0);
11203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11204 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11205 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11208
11209 IEM_MC_PREPARE_FPU_USAGE();
11210 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11211 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11212 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
11213 } IEM_MC_ELSE() {
11214 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
11215 } IEM_MC_ENDIF();
11216
11217 IEM_MC_ADVANCE_RIP_AND_FINISH();
11218 IEM_MC_END();
11219 }
11220}
11221
11222
11223/**
11224 * Common worker for FPU instructions working on ST0 and replaces it with the
11225 * result, i.e. unary operators.
11226 *
11227 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11228 */
11229FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11230{
11231 IEM_MC_BEGIN(0, 0);
11232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11233 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11234 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11235 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11236
11237 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11238 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11239 IEM_MC_PREPARE_FPU_USAGE();
11240 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11241 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11242 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11243 } IEM_MC_ELSE() {
11244 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11245 } IEM_MC_ENDIF();
11246 IEM_MC_ADVANCE_RIP_AND_FINISH();
11247
11248 IEM_MC_END();
11249}
11250
11251
11252/** Opcode 0xd9 0xe0. */
11253FNIEMOP_DEF(iemOp_fchs)
11254{
11255 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
11256 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11257}
11258
11259
11260/** Opcode 0xd9 0xe1. */
11261FNIEMOP_DEF(iemOp_fabs)
11262{
11263 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
11264 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11265}
11266
11267
11268/** Opcode 0xd9 0xe4. */
11269FNIEMOP_DEF(iemOp_ftst)
11270{
11271 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
11272 IEM_MC_BEGIN(0, 0);
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11274 IEM_MC_LOCAL(uint16_t, u16Fsw);
11275 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11277
11278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11280 IEM_MC_PREPARE_FPU_USAGE();
11281 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11282 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
11283 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11284 } IEM_MC_ELSE() {
11285 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
11286 } IEM_MC_ENDIF();
11287 IEM_MC_ADVANCE_RIP_AND_FINISH();
11288
11289 IEM_MC_END();
11290}
11291
11292
11293/** Opcode 0xd9 0xe5. */
11294FNIEMOP_DEF(iemOp_fxam)
11295{
11296 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
11297 IEM_MC_BEGIN(0, 0);
11298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11299 IEM_MC_LOCAL(uint16_t, u16Fsw);
11300 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11302
11303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11305 IEM_MC_PREPARE_FPU_USAGE();
11306 IEM_MC_REF_FPUREG(pr80Value, 0);
11307 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
11308 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11309 IEM_MC_ADVANCE_RIP_AND_FINISH();
11310
11311 IEM_MC_END();
11312}
11313
11314
11315/**
11316 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11317 *
11318 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11319 */
11320FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11321{
11322 IEM_MC_BEGIN(0, 0);
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11324 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11325 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11326
11327 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11328 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11329 IEM_MC_PREPARE_FPU_USAGE();
11330 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11331 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11332 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11333 } IEM_MC_ELSE() {
11334 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
11335 } IEM_MC_ENDIF();
11336 IEM_MC_ADVANCE_RIP_AND_FINISH();
11337
11338 IEM_MC_END();
11339}
11340
11341
11342/** Opcode 0xd9 0xe8. */
11343FNIEMOP_DEF(iemOp_fld1)
11344{
11345 IEMOP_MNEMONIC(fld1, "fld1");
11346 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11347}
11348
11349
11350/** Opcode 0xd9 0xe9. */
11351FNIEMOP_DEF(iemOp_fldl2t)
11352{
11353 IEMOP_MNEMONIC(fldl2t, "fldl2t");
11354 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11355}
11356
11357
11358/** Opcode 0xd9 0xea. */
11359FNIEMOP_DEF(iemOp_fldl2e)
11360{
11361 IEMOP_MNEMONIC(fldl2e, "fldl2e");
11362 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11363}
11364
11365/** Opcode 0xd9 0xeb. */
11366FNIEMOP_DEF(iemOp_fldpi)
11367{
11368 IEMOP_MNEMONIC(fldpi, "fldpi");
11369 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11370}
11371
11372
11373/** Opcode 0xd9 0xec. */
11374FNIEMOP_DEF(iemOp_fldlg2)
11375{
11376 IEMOP_MNEMONIC(fldlg2, "fldlg2");
11377 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11378}
11379
11380/** Opcode 0xd9 0xed. */
11381FNIEMOP_DEF(iemOp_fldln2)
11382{
11383 IEMOP_MNEMONIC(fldln2, "fldln2");
11384 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11385}
11386
11387
11388/** Opcode 0xd9 0xee. */
11389FNIEMOP_DEF(iemOp_fldz)
11390{
11391 IEMOP_MNEMONIC(fldz, "fldz");
11392 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11393}
11394
11395
11396/** Opcode 0xd9 0xf0.
11397 *
11398 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
11399 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
11400 * to produce proper results for +Inf and -Inf.
11401 *
11402 * This is probably usful in the implementation pow() and similar.
11403 */
11404FNIEMOP_DEF(iemOp_f2xm1)
11405{
11406 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
11407 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11408}
11409
11410
11411/**
11412 * Common worker for FPU instructions working on STn and ST0, storing the result
11413 * in STn, and popping the stack unless IE, DE or ZE was raised.
11414 *
11415 * @param bRm Mod R/M byte.
11416 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11417 */
11418FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11419{
11420 IEM_MC_BEGIN(0, 0);
11421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11422 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11423 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11424 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11426
11427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11429
11430 IEM_MC_PREPARE_FPU_USAGE();
11431 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11432 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11433 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11434 } IEM_MC_ELSE() {
11435 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11436 } IEM_MC_ENDIF();
11437 IEM_MC_ADVANCE_RIP_AND_FINISH();
11438
11439 IEM_MC_END();
11440}
11441
11442
11443/** Opcode 0xd9 0xf1. */
11444FNIEMOP_DEF(iemOp_fyl2x)
11445{
11446 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11447 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11448}
11449
11450
11451/**
11452 * Common worker for FPU instructions working on ST0 and having two outputs, one
11453 * replacing ST0 and one pushed onto the stack.
11454 *
11455 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11456 */
11457FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11458{
11459 IEM_MC_BEGIN(0, 0);
11460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11461 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11462 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11463 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11464
11465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11467 IEM_MC_PREPARE_FPU_USAGE();
11468 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11469 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11470 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11471 } IEM_MC_ELSE() {
11472 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11473 } IEM_MC_ENDIF();
11474 IEM_MC_ADVANCE_RIP_AND_FINISH();
11475
11476 IEM_MC_END();
11477}
11478
11479
11480/** Opcode 0xd9 0xf2. */
11481FNIEMOP_DEF(iemOp_fptan)
11482{
11483 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11484 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11485}
11486
11487
11488/** Opcode 0xd9 0xf3. */
11489FNIEMOP_DEF(iemOp_fpatan)
11490{
11491 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11492 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11493}
11494
11495
11496/** Opcode 0xd9 0xf4. */
11497FNIEMOP_DEF(iemOp_fxtract)
11498{
11499 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11500 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11501}
11502
11503
11504/** Opcode 0xd9 0xf5. */
11505FNIEMOP_DEF(iemOp_fprem1)
11506{
11507 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11508 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11509}
11510
11511
11512/** Opcode 0xd9 0xf6. */
11513FNIEMOP_DEF(iemOp_fdecstp)
11514{
11515 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11516 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11517 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11518 * FINCSTP and FDECSTP. */
11519 IEM_MC_BEGIN(0, 0);
11520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11521
11522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11524
11525 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11526 IEM_MC_FPU_STACK_DEC_TOP();
11527 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11528
11529 IEM_MC_ADVANCE_RIP_AND_FINISH();
11530 IEM_MC_END();
11531}
11532
11533
11534/** Opcode 0xd9 0xf7. */
11535FNIEMOP_DEF(iemOp_fincstp)
11536{
11537 IEMOP_MNEMONIC(fincstp, "fincstp");
11538 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11539 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11540 * FINCSTP and FDECSTP. */
11541 IEM_MC_BEGIN(0, 0);
11542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11543
11544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11546
11547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11548 IEM_MC_FPU_STACK_INC_TOP();
11549 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11550
11551 IEM_MC_ADVANCE_RIP_AND_FINISH();
11552 IEM_MC_END();
11553}
11554
11555
11556/** Opcode 0xd9 0xf8. */
11557FNIEMOP_DEF(iemOp_fprem)
11558{
11559 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11561}
11562
11563
11564/** Opcode 0xd9 0xf9. */
11565FNIEMOP_DEF(iemOp_fyl2xp1)
11566{
11567 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11568 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11569}
11570
11571
11572/** Opcode 0xd9 0xfa. */
11573FNIEMOP_DEF(iemOp_fsqrt)
11574{
11575 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11576 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11577}
11578
11579
11580/** Opcode 0xd9 0xfb. */
11581FNIEMOP_DEF(iemOp_fsincos)
11582{
11583 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11584 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11585}
11586
11587
11588/** Opcode 0xd9 0xfc. */
11589FNIEMOP_DEF(iemOp_frndint)
11590{
11591 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11592 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11593}
11594
11595
11596/** Opcode 0xd9 0xfd. */
11597FNIEMOP_DEF(iemOp_fscale)
11598{
11599 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11600 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11601}
11602
11603
11604/** Opcode 0xd9 0xfe. */
11605FNIEMOP_DEF(iemOp_fsin)
11606{
11607 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11608 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11609}
11610
11611
11612/** Opcode 0xd9 0xff. */
11613FNIEMOP_DEF(iemOp_fcos)
11614{
11615 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11616 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11617}
11618
11619
11620/** Used by iemOp_EscF1. */
11621IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11622{
11623 /* 0xe0 */ iemOp_fchs,
11624 /* 0xe1 */ iemOp_fabs,
11625 /* 0xe2 */ iemOp_Invalid,
11626 /* 0xe3 */ iemOp_Invalid,
11627 /* 0xe4 */ iemOp_ftst,
11628 /* 0xe5 */ iemOp_fxam,
11629 /* 0xe6 */ iemOp_Invalid,
11630 /* 0xe7 */ iemOp_Invalid,
11631 /* 0xe8 */ iemOp_fld1,
11632 /* 0xe9 */ iemOp_fldl2t,
11633 /* 0xea */ iemOp_fldl2e,
11634 /* 0xeb */ iemOp_fldpi,
11635 /* 0xec */ iemOp_fldlg2,
11636 /* 0xed */ iemOp_fldln2,
11637 /* 0xee */ iemOp_fldz,
11638 /* 0xef */ iemOp_Invalid,
11639 /* 0xf0 */ iemOp_f2xm1,
11640 /* 0xf1 */ iemOp_fyl2x,
11641 /* 0xf2 */ iemOp_fptan,
11642 /* 0xf3 */ iemOp_fpatan,
11643 /* 0xf4 */ iemOp_fxtract,
11644 /* 0xf5 */ iemOp_fprem1,
11645 /* 0xf6 */ iemOp_fdecstp,
11646 /* 0xf7 */ iemOp_fincstp,
11647 /* 0xf8 */ iemOp_fprem,
11648 /* 0xf9 */ iemOp_fyl2xp1,
11649 /* 0xfa */ iemOp_fsqrt,
11650 /* 0xfb */ iemOp_fsincos,
11651 /* 0xfc */ iemOp_frndint,
11652 /* 0xfd */ iemOp_fscale,
11653 /* 0xfe */ iemOp_fsin,
11654 /* 0xff */ iemOp_fcos
11655};
11656
11657
11658/**
11659 * @opcode 0xd9
11660 */
11661FNIEMOP_DEF(iemOp_EscF1)
11662{
11663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11664 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11665
11666 if (IEM_IS_MODRM_REG_MODE(bRm))
11667 {
11668 switch (IEM_GET_MODRM_REG_8(bRm))
11669 {
11670 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11671 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11672 case 2:
11673 if (bRm == 0xd0)
11674 return FNIEMOP_CALL(iemOp_fnop);
11675 IEMOP_RAISE_INVALID_OPCODE_RET();
11676 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11677 case 4:
11678 case 5:
11679 case 6:
11680 case 7:
11681 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11682 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11684 }
11685 }
11686 else
11687 {
11688 switch (IEM_GET_MODRM_REG_8(bRm))
11689 {
11690 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11691 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11692 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11693 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11694 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11695 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11696 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11697 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11699 }
11700 }
11701}
11702
11703
11704/** Opcode 0xda 11/0. */
11705FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11706{
11707 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11708 IEM_MC_BEGIN(0, 0);
11709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11710 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11711
11712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11714
11715 IEM_MC_PREPARE_FPU_USAGE();
11716 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11718 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11719 } IEM_MC_ENDIF();
11720 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11721 } IEM_MC_ELSE() {
11722 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11723 } IEM_MC_ENDIF();
11724 IEM_MC_ADVANCE_RIP_AND_FINISH();
11725
11726 IEM_MC_END();
11727}
11728
11729
11730/** Opcode 0xda 11/1. */
11731FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11732{
11733 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11734 IEM_MC_BEGIN(0, 0);
11735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11736 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11737
11738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11740
11741 IEM_MC_PREPARE_FPU_USAGE();
11742 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11744 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11745 } IEM_MC_ENDIF();
11746 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11747 } IEM_MC_ELSE() {
11748 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11749 } IEM_MC_ENDIF();
11750 IEM_MC_ADVANCE_RIP_AND_FINISH();
11751
11752 IEM_MC_END();
11753}
11754
11755
11756/** Opcode 0xda 11/2. */
11757FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11758{
11759 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11760 IEM_MC_BEGIN(0, 0);
11761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11762 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11763
11764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11765 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11766
11767 IEM_MC_PREPARE_FPU_USAGE();
11768 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11769 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11770 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11771 } IEM_MC_ENDIF();
11772 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11773 } IEM_MC_ELSE() {
11774 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11775 } IEM_MC_ENDIF();
11776 IEM_MC_ADVANCE_RIP_AND_FINISH();
11777
11778 IEM_MC_END();
11779}
11780
11781
11782/** Opcode 0xda 11/3. */
11783FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11784{
11785 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11786 IEM_MC_BEGIN(0, 0);
11787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11788 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11789
11790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11792
11793 IEM_MC_PREPARE_FPU_USAGE();
11794 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11796 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11797 } IEM_MC_ENDIF();
11798 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11799 } IEM_MC_ELSE() {
11800 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11801 } IEM_MC_ENDIF();
11802 IEM_MC_ADVANCE_RIP_AND_FINISH();
11803
11804 IEM_MC_END();
11805}
11806
11807
11808/**
11809 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11810 * flags, and popping twice when done.
11811 *
11812 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11813 */
11814FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11815{
11816 IEM_MC_BEGIN(0, 0);
11817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11818 IEM_MC_LOCAL(uint16_t, u16Fsw);
11819 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11821 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11822
11823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11825
11826 IEM_MC_PREPARE_FPU_USAGE();
11827 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11828 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11829 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11830 } IEM_MC_ELSE() {
11831 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11832 } IEM_MC_ENDIF();
11833 IEM_MC_ADVANCE_RIP_AND_FINISH();
11834
11835 IEM_MC_END();
11836}
11837
11838
11839/** Opcode 0xda 0xe9. */
11840FNIEMOP_DEF(iemOp_fucompp)
11841{
11842 IEMOP_MNEMONIC(fucompp, "fucompp");
11843 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11844}
11845
11846
11847/**
11848 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11849 * the result in ST0.
11850 *
11851 * @param bRm Mod R/M byte.
11852 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11853 */
11854FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11855{
11856 IEM_MC_BEGIN(0, 0);
11857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11858 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11859 IEM_MC_LOCAL(int32_t, i32Val2);
11860 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11861 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11862 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11863
11864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11866
11867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11869 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11870
11871 IEM_MC_PREPARE_FPU_USAGE();
11872 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11873 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11874 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11875 } IEM_MC_ELSE() {
11876 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11877 } IEM_MC_ENDIF();
11878 IEM_MC_ADVANCE_RIP_AND_FINISH();
11879
11880 IEM_MC_END();
11881}
11882
11883
11884/** Opcode 0xda !11/0. */
11885FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11886{
11887 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11889}
11890
11891
11892/** Opcode 0xda !11/1. */
11893FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11894{
11895 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11896 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11897}
11898
11899
11900/** Opcode 0xda !11/2. */
11901FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11902{
11903 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11904
11905 IEM_MC_BEGIN(0, 0);
11906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11907 IEM_MC_LOCAL(uint16_t, u16Fsw);
11908 IEM_MC_LOCAL(int32_t, i32Val2);
11909 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11911 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11912
11913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11915
11916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11917 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11918 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11919
11920 IEM_MC_PREPARE_FPU_USAGE();
11921 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11922 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11923 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11924 } IEM_MC_ELSE() {
11925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11926 } IEM_MC_ENDIF();
11927 IEM_MC_ADVANCE_RIP_AND_FINISH();
11928
11929 IEM_MC_END();
11930}
11931
11932
11933/** Opcode 0xda !11/3. */
11934FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11935{
11936 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11937
11938 IEM_MC_BEGIN(0, 0);
11939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11940 IEM_MC_LOCAL(uint16_t, u16Fsw);
11941 IEM_MC_LOCAL(int32_t, i32Val2);
11942 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11944 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11945
11946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11948
11949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11951 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11952
11953 IEM_MC_PREPARE_FPU_USAGE();
11954 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11955 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11956 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11957 } IEM_MC_ELSE() {
11958 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11959 } IEM_MC_ENDIF();
11960 IEM_MC_ADVANCE_RIP_AND_FINISH();
11961
11962 IEM_MC_END();
11963}
11964
11965
11966/** Opcode 0xda !11/4. */
11967FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11968{
11969 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11970 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11971}
11972
11973
11974/** Opcode 0xda !11/5. */
11975FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11976{
11977 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11978 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11979}
11980
11981
11982/** Opcode 0xda !11/6. */
11983FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11984{
11985 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11986 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11987}
11988
11989
11990/** Opcode 0xda !11/7. */
11991FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11992{
11993 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11994 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11995}
11996
11997
11998/**
11999 * @opcode 0xda
12000 */
12001FNIEMOP_DEF(iemOp_EscF2)
12002{
12003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12004 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
12005 if (IEM_IS_MODRM_REG_MODE(bRm))
12006 {
12007 switch (IEM_GET_MODRM_REG_8(bRm))
12008 {
12009 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
12010 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
12011 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
12012 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
12013 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12014 case 5:
12015 if (bRm == 0xe9)
12016 return FNIEMOP_CALL(iemOp_fucompp);
12017 IEMOP_RAISE_INVALID_OPCODE_RET();
12018 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12019 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12021 }
12022 }
12023 else
12024 {
12025 switch (IEM_GET_MODRM_REG_8(bRm))
12026 {
12027 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12028 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12029 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12030 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12031 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12032 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12033 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12034 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12036 }
12037 }
12038}
12039
12040
12041/** Opcode 0xdb !11/0. */
12042FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12043{
12044 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
12045
12046 IEM_MC_BEGIN(0, 0);
12047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12048 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12049 IEM_MC_LOCAL(int32_t, i32Val);
12050 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12051 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12052
12053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12055
12056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12058 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12059
12060 IEM_MC_PREPARE_FPU_USAGE();
12061 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12062 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
12063 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12064 } IEM_MC_ELSE() {
12065 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12066 } IEM_MC_ENDIF();
12067 IEM_MC_ADVANCE_RIP_AND_FINISH();
12068
12069 IEM_MC_END();
12070}
12071
12072
12073/** Opcode 0xdb !11/1. */
12074FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12075{
12076 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
12077 IEM_MC_BEGIN(0, 0);
12078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12080
12081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12084 IEM_MC_PREPARE_FPU_USAGE();
12085
12086 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12087 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12088 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12089
12090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12091 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12092 IEM_MC_LOCAL(uint16_t, u16Fsw);
12093 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12094 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12095 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12096 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12097 } IEM_MC_ELSE() {
12098 IEM_MC_IF_FCW_IM() {
12099 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12100 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12101 } IEM_MC_ELSE() {
12102 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12103 } IEM_MC_ENDIF();
12104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12105 } IEM_MC_ENDIF();
12106 IEM_MC_ADVANCE_RIP_AND_FINISH();
12107
12108 IEM_MC_END();
12109}
12110
12111
12112/** Opcode 0xdb !11/2. */
12113FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12114{
12115 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
12116 IEM_MC_BEGIN(0, 0);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12119
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12123 IEM_MC_PREPARE_FPU_USAGE();
12124
12125 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12126 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12127 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12128
12129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12131 IEM_MC_LOCAL(uint16_t, u16Fsw);
12132 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12133 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12134 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12135 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12136 } IEM_MC_ELSE() {
12137 IEM_MC_IF_FCW_IM() {
12138 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12139 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12140 } IEM_MC_ELSE() {
12141 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12142 } IEM_MC_ENDIF();
12143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12144 } IEM_MC_ENDIF();
12145 IEM_MC_ADVANCE_RIP_AND_FINISH();
12146
12147 IEM_MC_END();
12148}
12149
12150
12151/** Opcode 0xdb !11/3. */
12152FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12153{
12154 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
12155 IEM_MC_BEGIN(0, 0);
12156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12158
12159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12162 IEM_MC_PREPARE_FPU_USAGE();
12163
12164 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12165 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12166 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12167
12168 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12170 IEM_MC_LOCAL(uint16_t, u16Fsw);
12171 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12172 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12173 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12174 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12175 } IEM_MC_ELSE() {
12176 IEM_MC_IF_FCW_IM() {
12177 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12178 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12179 } IEM_MC_ELSE() {
12180 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12181 } IEM_MC_ENDIF();
12182 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12183 } IEM_MC_ENDIF();
12184 IEM_MC_ADVANCE_RIP_AND_FINISH();
12185
12186 IEM_MC_END();
12187}
12188
12189
12190/** Opcode 0xdb !11/5. */
12191FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12192{
12193 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
12194
12195 IEM_MC_BEGIN(0, 0);
12196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12197 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12198 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12199 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12200 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12201
12202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12204
12205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12206 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12207 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12208
12209 IEM_MC_PREPARE_FPU_USAGE();
12210 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12211 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12212 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12213 } IEM_MC_ELSE() {
12214 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12215 } IEM_MC_ENDIF();
12216 IEM_MC_ADVANCE_RIP_AND_FINISH();
12217
12218 IEM_MC_END();
12219}
12220
12221
12222/** Opcode 0xdb !11/7. */
12223FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12224{
12225 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
12226 IEM_MC_BEGIN(0, 0);
12227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12229
12230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12232 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12233 IEM_MC_PREPARE_FPU_USAGE();
12234
12235 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12236 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12237 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12238
12239 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12240 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12241 IEM_MC_LOCAL(uint16_t, u16Fsw);
12242 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12243 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12244 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12245 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12246 } IEM_MC_ELSE() {
12247 IEM_MC_IF_FCW_IM() {
12248 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12249 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12250 } IEM_MC_ELSE() {
12251 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12252 } IEM_MC_ENDIF();
12253 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12254 } IEM_MC_ENDIF();
12255 IEM_MC_ADVANCE_RIP_AND_FINISH();
12256
12257 IEM_MC_END();
12258}
12259
12260
12261/** Opcode 0xdb 11/0. */
12262FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12263{
12264 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
12265 IEM_MC_BEGIN(0, 0);
12266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12267 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12268
12269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12271
12272 IEM_MC_PREPARE_FPU_USAGE();
12273 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12274 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
12275 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12276 } IEM_MC_ENDIF();
12277 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12278 } IEM_MC_ELSE() {
12279 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12280 } IEM_MC_ENDIF();
12281 IEM_MC_ADVANCE_RIP_AND_FINISH();
12282
12283 IEM_MC_END();
12284}
12285
12286
12287/** Opcode 0xdb 11/1. */
12288FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12289{
12290 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
12291 IEM_MC_BEGIN(0, 0);
12292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12293 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12294
12295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12297
12298 IEM_MC_PREPARE_FPU_USAGE();
12299 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12300 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12301 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12302 } IEM_MC_ENDIF();
12303 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12304 } IEM_MC_ELSE() {
12305 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12306 } IEM_MC_ENDIF();
12307 IEM_MC_ADVANCE_RIP_AND_FINISH();
12308
12309 IEM_MC_END();
12310}
12311
12312
12313/** Opcode 0xdb 11/2. */
12314FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12315{
12316 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
12317 IEM_MC_BEGIN(0, 0);
12318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12319 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12320
12321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12323
12324 IEM_MC_PREPARE_FPU_USAGE();
12325 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12326 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
12327 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12328 } IEM_MC_ENDIF();
12329 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12330 } IEM_MC_ELSE() {
12331 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12332 } IEM_MC_ENDIF();
12333 IEM_MC_ADVANCE_RIP_AND_FINISH();
12334
12335 IEM_MC_END();
12336}
12337
12338
12339/** Opcode 0xdb 11/3. */
12340FNIEMOP_DEF_1(iemOp_fcmovnu_stN, uint8_t, bRm)
12341{
12342 IEMOP_MNEMONIC(fcmovnu_st0_stN, "fcmovnu st0,stN");
12343 IEM_MC_BEGIN(0, 0);
12344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12345 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12346
12347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12349
12350 IEM_MC_PREPARE_FPU_USAGE();
12351 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12352 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
12353 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12354 } IEM_MC_ENDIF();
12355 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12356 } IEM_MC_ELSE() {
12357 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12358 } IEM_MC_ENDIF();
12359 IEM_MC_ADVANCE_RIP_AND_FINISH();
12360
12361 IEM_MC_END();
12362}
12363
12364
12365/** Opcode 0xdb 0xe0. */
12366FNIEMOP_DEF(iemOp_fneni)
12367{
12368 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
12369 IEM_MC_BEGIN(0, 0);
12370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12372 IEM_MC_ADVANCE_RIP_AND_FINISH();
12373 IEM_MC_END();
12374}
12375
12376
12377/** Opcode 0xdb 0xe1. */
12378FNIEMOP_DEF(iemOp_fndisi)
12379{
12380 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
12381 IEM_MC_BEGIN(0, 0);
12382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12384 IEM_MC_ADVANCE_RIP_AND_FINISH();
12385 IEM_MC_END();
12386}
12387
12388
12389/** Opcode 0xdb 0xe2. */
12390FNIEMOP_DEF(iemOp_fnclex)
12391{
12392 IEMOP_MNEMONIC(fnclex, "fnclex");
12393 IEM_MC_BEGIN(0, 0);
12394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12396 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12397 IEM_MC_CLEAR_FSW_EX();
12398 IEM_MC_ADVANCE_RIP_AND_FINISH();
12399 IEM_MC_END();
12400}
12401
12402
12403/** Opcode 0xdb 0xe3. */
12404FNIEMOP_DEF(iemOp_fninit)
12405{
12406 IEMOP_MNEMONIC(fninit, "fninit");
12407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12408 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12409 iemCImpl_finit, false /*fCheckXcpts*/);
12410}
12411
12412
12413/** Opcode 0xdb 0xe4. */
12414FNIEMOP_DEF(iemOp_fnsetpm)
12415{
12416 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12417 IEM_MC_BEGIN(0, 0);
12418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12420 IEM_MC_ADVANCE_RIP_AND_FINISH();
12421 IEM_MC_END();
12422}
12423
12424
12425/** Opcode 0xdb 0xe5. */
12426FNIEMOP_DEF(iemOp_frstpm)
12427{
12428 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12429#if 0 /* #UDs on newer CPUs */
12430 IEM_MC_BEGIN(0, 0);
12431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12432 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12433 IEM_MC_ADVANCE_RIP_AND_FINISH();
12434 IEM_MC_END();
12435 return VINF_SUCCESS;
12436#else
12437 IEMOP_RAISE_INVALID_OPCODE_RET();
12438#endif
12439}
12440
12441
12442/** Opcode 0xdb 11/5. */
12443FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12444{
12445 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12446 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12447 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12448 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12449}
12450
12451
12452/** Opcode 0xdb 11/6. */
12453FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12454{
12455 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12456 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12457 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12458 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12459}
12460
12461
12462/**
12463 * @opcode 0xdb
12464 */
12465FNIEMOP_DEF(iemOp_EscF3)
12466{
12467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12468 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12469 if (IEM_IS_MODRM_REG_MODE(bRm))
12470 {
12471 switch (IEM_GET_MODRM_REG_8(bRm))
12472 {
12473 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12474 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12475 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12476 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnu_stN, bRm);
12477 case 4:
12478 switch (bRm)
12479 {
12480 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12481 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12482 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12483 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12484 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12485 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12486 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12487 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12489 }
12490 break;
12491 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12492 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12493 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12495 }
12496 }
12497 else
12498 {
12499 switch (IEM_GET_MODRM_REG_8(bRm))
12500 {
12501 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12502 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12503 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12504 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12505 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12506 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12507 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12508 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12510 }
12511 }
12512}
12513
12514
12515/**
12516 * Common worker for FPU instructions working on STn and ST0, and storing the
12517 * result in STn unless IE, DE or ZE was raised.
12518 *
12519 * @param bRm Mod R/M byte.
12520 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12521 */
12522FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12523{
12524 IEM_MC_BEGIN(0, 0);
12525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12526 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12527 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12530
12531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12533
12534 IEM_MC_PREPARE_FPU_USAGE();
12535 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12536 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12537 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12538 } IEM_MC_ELSE() {
12539 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12540 } IEM_MC_ENDIF();
12541 IEM_MC_ADVANCE_RIP_AND_FINISH();
12542
12543 IEM_MC_END();
12544}
12545
12546
12547/** Opcode 0xdc 11/0. */
12548FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12549{
12550 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12551 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12552}
12553
12554
12555/** Opcode 0xdc 11/1. */
12556FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12557{
12558 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12559 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12560}
12561
12562
12563/** Opcode 0xdc 11/4. */
12564FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12565{
12566 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12567 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12568}
12569
12570
12571/** Opcode 0xdc 11/5. */
12572FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12573{
12574 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12575 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12576}
12577
12578
12579/** Opcode 0xdc 11/6. */
12580FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12581{
12582 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12583 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12584}
12585
12586
12587/** Opcode 0xdc 11/7. */
12588FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12589{
12590 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12591 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12592}
12593
12594
12595/**
12596 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12597 * memory operand, and storing the result in ST0.
12598 *
12599 * @param bRm Mod R/M byte.
12600 * @param pfnImpl Pointer to the instruction implementation (assembly).
12601 */
12602FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12603{
12604 IEM_MC_BEGIN(0, 0);
12605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12606 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12607 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12608 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12609 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12610 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12611
12612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12616
12617 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12618 IEM_MC_PREPARE_FPU_USAGE();
12619 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12620 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12621 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12622 } IEM_MC_ELSE() {
12623 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12624 } IEM_MC_ENDIF();
12625 IEM_MC_ADVANCE_RIP_AND_FINISH();
12626
12627 IEM_MC_END();
12628}
12629
12630
12631/** Opcode 0xdc !11/0. */
12632FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12633{
12634 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12635 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12636}
12637
12638
12639/** Opcode 0xdc !11/1. */
12640FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12641{
12642 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12643 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12644}
12645
12646
12647/** Opcode 0xdc !11/2. */
12648FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12649{
12650 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12651
12652 IEM_MC_BEGIN(0, 0);
12653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12654 IEM_MC_LOCAL(uint16_t, u16Fsw);
12655 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12656 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12657 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12658 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12659
12660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12662
12663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12664 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12665 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12666
12667 IEM_MC_PREPARE_FPU_USAGE();
12668 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12669 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12670 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12671 } IEM_MC_ELSE() {
12672 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12673 } IEM_MC_ENDIF();
12674 IEM_MC_ADVANCE_RIP_AND_FINISH();
12675
12676 IEM_MC_END();
12677}
12678
12679
12680/** Opcode 0xdc !11/3. */
12681FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12682{
12683 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12684
12685 IEM_MC_BEGIN(0, 0);
12686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12687 IEM_MC_LOCAL(uint16_t, u16Fsw);
12688 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12689 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12690 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12691 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12692
12693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12695
12696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12698 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12699
12700 IEM_MC_PREPARE_FPU_USAGE();
12701 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12702 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12703 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12704 } IEM_MC_ELSE() {
12705 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12706 } IEM_MC_ENDIF();
12707 IEM_MC_ADVANCE_RIP_AND_FINISH();
12708
12709 IEM_MC_END();
12710}
12711
12712
12713/** Opcode 0xdc !11/4. */
12714FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12715{
12716 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12717 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12718}
12719
12720
12721/** Opcode 0xdc !11/5. */
12722FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12723{
12724 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12725 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12726}
12727
12728
12729/** Opcode 0xdc !11/6. */
12730FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12731{
12732 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12733 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12734}
12735
12736
12737/** Opcode 0xdc !11/7. */
12738FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12739{
12740 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12741 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12742}
12743
12744
12745/**
12746 * @opcode 0xdc
12747 */
12748FNIEMOP_DEF(iemOp_EscF4)
12749{
12750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12751 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12752 if (IEM_IS_MODRM_REG_MODE(bRm))
12753 {
12754 switch (IEM_GET_MODRM_REG_8(bRm))
12755 {
12756 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12757 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12758 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12759 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12760 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12761 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12762 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12763 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12765 }
12766 }
12767 else
12768 {
12769 switch (IEM_GET_MODRM_REG_8(bRm))
12770 {
12771 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12772 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12773 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12774 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12775 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12776 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12777 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12778 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12780 }
12781 }
12782}
12783
12784
12785/** Opcode 0xdd !11/0.
12786 * @sa iemOp_fld_m32r */
12787FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12788{
12789 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12790
12791 IEM_MC_BEGIN(0, 0);
12792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12793 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12794 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12795 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12796 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12797
12798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12802
12803 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12804 IEM_MC_PREPARE_FPU_USAGE();
12805 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12806 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12807 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12808 } IEM_MC_ELSE() {
12809 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12810 } IEM_MC_ENDIF();
12811 IEM_MC_ADVANCE_RIP_AND_FINISH();
12812
12813 IEM_MC_END();
12814}
12815
12816
12817/** Opcode 0xdd !11/0. */
12818FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12819{
12820 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12821 IEM_MC_BEGIN(0, 0);
12822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12824
12825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12828 IEM_MC_PREPARE_FPU_USAGE();
12829
12830 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12831 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12832 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12833
12834 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12835 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12836 IEM_MC_LOCAL(uint16_t, u16Fsw);
12837 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12838 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12839 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12840 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12841 } IEM_MC_ELSE() {
12842 IEM_MC_IF_FCW_IM() {
12843 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12844 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12845 } IEM_MC_ELSE() {
12846 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12847 } IEM_MC_ENDIF();
12848 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12849 } IEM_MC_ENDIF();
12850 IEM_MC_ADVANCE_RIP_AND_FINISH();
12851
12852 IEM_MC_END();
12853}
12854
12855
12856/** Opcode 0xdd !11/0. */
12857FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12858{
12859 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12860 IEM_MC_BEGIN(0, 0);
12861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12863
12864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12867 IEM_MC_PREPARE_FPU_USAGE();
12868
12869 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12870 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12871 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12872
12873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12874 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12875 IEM_MC_LOCAL(uint16_t, u16Fsw);
12876 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12877 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12878 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12879 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12880 } IEM_MC_ELSE() {
12881 IEM_MC_IF_FCW_IM() {
12882 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12883 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12884 } IEM_MC_ELSE() {
12885 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12886 } IEM_MC_ENDIF();
12887 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12888 } IEM_MC_ENDIF();
12889 IEM_MC_ADVANCE_RIP_AND_FINISH();
12890
12891 IEM_MC_END();
12892}
12893
12894
12895
12896
12897/** Opcode 0xdd !11/0. */
12898FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12899{
12900 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12901 IEM_MC_BEGIN(0, 0);
12902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12904
12905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12908 IEM_MC_PREPARE_FPU_USAGE();
12909
12910 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12911 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12912 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12913
12914 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12915 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12916 IEM_MC_LOCAL(uint16_t, u16Fsw);
12917 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12918 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12919 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12920 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12921 } IEM_MC_ELSE() {
12922 IEM_MC_IF_FCW_IM() {
12923 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12924 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12925 } IEM_MC_ELSE() {
12926 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12927 } IEM_MC_ENDIF();
12928 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12929 } IEM_MC_ENDIF();
12930 IEM_MC_ADVANCE_RIP_AND_FINISH();
12931
12932 IEM_MC_END();
12933}
12934
12935
12936/** Opcode 0xdd !11/0. */
12937FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12938{
12939 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12940 IEM_MC_BEGIN(0, 0);
12941 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12943
12944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12947
12948 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12949 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12950 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12951 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12952 IEM_MC_END();
12953}
12954
12955
12956/** Opcode 0xdd !11/0. */
12957FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12958{
12959 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12960 IEM_MC_BEGIN(0, 0);
12961 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12963
12964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12966 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12967
12968 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12969 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12970 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12971 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12972 IEM_MC_END();
12973}
12974
12975/** Opcode 0xdd !11/0. */
12976FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12977{
12978 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12979
12980 IEM_MC_BEGIN(0, 0);
12981 IEM_MC_LOCAL(uint16_t, u16Tmp);
12982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12983
12984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12987
12988 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12989 IEM_MC_FETCH_FSW(u16Tmp);
12990 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12991 IEM_MC_ADVANCE_RIP_AND_FINISH();
12992
12993/** @todo Debug / drop a hint to the verifier that things may differ
12994 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12995 * NT4SP1. (X86_FSW_PE) */
12996 IEM_MC_END();
12997}
12998
12999
13000/** Opcode 0xdd 11/0. */
13001FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
13002{
13003 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
13004 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
13005 unmodified. */
13006 IEM_MC_BEGIN(0, 0);
13007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13008
13009 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13010 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13011
13012 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13013 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13014 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13015
13016 IEM_MC_ADVANCE_RIP_AND_FINISH();
13017 IEM_MC_END();
13018}
13019
13020
13021/** Opcode 0xdd 11/1. */
13022FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13023{
13024 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
13025 IEM_MC_BEGIN(0, 0);
13026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13027 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13031
13032 IEM_MC_PREPARE_FPU_USAGE();
13033 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13034 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13035 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
13036 } IEM_MC_ELSE() {
13037 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
13038 } IEM_MC_ENDIF();
13039
13040 IEM_MC_ADVANCE_RIP_AND_FINISH();
13041 IEM_MC_END();
13042}
13043
13044
13045/** Opcode 0xdd 11/3. */
13046FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13047{
13048 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
13049 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13050}
13051
13052
13053/** Opcode 0xdd 11/4. */
13054FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13055{
13056 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
13057 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13058}
13059
13060
13061/**
13062 * @opcode 0xdd
13063 */
13064FNIEMOP_DEF(iemOp_EscF5)
13065{
13066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13067 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
13068 if (IEM_IS_MODRM_REG_MODE(bRm))
13069 {
13070 switch (IEM_GET_MODRM_REG_8(bRm))
13071 {
13072 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13073 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13074 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13075 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13076 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13077 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13078 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
13079 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13081 }
13082 }
13083 else
13084 {
13085 switch (IEM_GET_MODRM_REG_8(bRm))
13086 {
13087 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13088 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13089 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13090 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13091 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13092 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
13093 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13094 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13096 }
13097 }
13098}
13099
13100
13101/** Opcode 0xde 11/0. */
13102FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13103{
13104 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
13105 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13106}
13107
13108
13109/** Opcode 0xde 11/0. */
13110FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13111{
13112 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
13113 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13114}
13115
13116
13117/** Opcode 0xde 0xd9. */
13118FNIEMOP_DEF(iemOp_fcompp)
13119{
13120 IEMOP_MNEMONIC(fcompp, "fcompp");
13121 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
13122}
13123
13124
13125/** Opcode 0xde 11/4. */
13126FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13127{
13128 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
13129 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13130}
13131
13132
13133/** Opcode 0xde 11/5. */
13134FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13135{
13136 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
13137 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13138}
13139
13140
13141/** Opcode 0xde 11/6. */
13142FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13143{
13144 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
13145 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13146}
13147
13148
13149/** Opcode 0xde 11/7. */
13150FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13151{
13152 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
13153 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13154}
13155
13156
13157/**
13158 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13159 * the result in ST0.
13160 *
13161 * @param bRm Mod R/M byte.
13162 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13163 */
13164FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13165{
13166 IEM_MC_BEGIN(0, 0);
13167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13168 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13169 IEM_MC_LOCAL(int16_t, i16Val2);
13170 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13171 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13172 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13173
13174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13176
13177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13179 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13180
13181 IEM_MC_PREPARE_FPU_USAGE();
13182 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13183 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13184 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
13185 } IEM_MC_ELSE() {
13186 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
13187 } IEM_MC_ENDIF();
13188 IEM_MC_ADVANCE_RIP_AND_FINISH();
13189
13190 IEM_MC_END();
13191}
13192
13193
13194/** Opcode 0xde !11/0. */
13195FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13196{
13197 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
13198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13199}
13200
13201
13202/** Opcode 0xde !11/1. */
13203FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13204{
13205 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
13206 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13207}
13208
13209
13210/** Opcode 0xde !11/2. */
13211FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13212{
13213 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
13214
13215 IEM_MC_BEGIN(0, 0);
13216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13217 IEM_MC_LOCAL(uint16_t, u16Fsw);
13218 IEM_MC_LOCAL(int16_t, i16Val2);
13219 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13221 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13222
13223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13225
13226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13227 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13228 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13229
13230 IEM_MC_PREPARE_FPU_USAGE();
13231 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13232 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13233 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13234 } IEM_MC_ELSE() {
13235 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13236 } IEM_MC_ENDIF();
13237 IEM_MC_ADVANCE_RIP_AND_FINISH();
13238
13239 IEM_MC_END();
13240}
13241
13242
13243/** Opcode 0xde !11/3. */
13244FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13245{
13246 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
13247
13248 IEM_MC_BEGIN(0, 0);
13249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13250 IEM_MC_LOCAL(uint16_t, u16Fsw);
13251 IEM_MC_LOCAL(int16_t, i16Val2);
13252 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13253 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13254 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13255
13256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13258
13259 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13260 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13261 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13262
13263 IEM_MC_PREPARE_FPU_USAGE();
13264 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13265 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13266 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13267 } IEM_MC_ELSE() {
13268 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13269 } IEM_MC_ENDIF();
13270 IEM_MC_ADVANCE_RIP_AND_FINISH();
13271
13272 IEM_MC_END();
13273}
13274
13275
13276/** Opcode 0xde !11/4. */
13277FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13278{
13279 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
13280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13281}
13282
13283
13284/** Opcode 0xde !11/5. */
13285FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13286{
13287 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
13288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13289}
13290
13291
13292/** Opcode 0xde !11/6. */
13293FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13294{
13295 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
13296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13297}
13298
13299
13300/** Opcode 0xde !11/7. */
13301FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13302{
13303 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
13304 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13305}
13306
13307
13308/**
13309 * @opcode 0xde
13310 */
13311FNIEMOP_DEF(iemOp_EscF6)
13312{
13313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13314 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
13315 if (IEM_IS_MODRM_REG_MODE(bRm))
13316 {
13317 switch (IEM_GET_MODRM_REG_8(bRm))
13318 {
13319 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13320 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13321 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13322 case 3: if (bRm == 0xd9)
13323 return FNIEMOP_CALL(iemOp_fcompp);
13324 IEMOP_RAISE_INVALID_OPCODE_RET();
13325 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13326 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13327 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13328 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13330 }
13331 }
13332 else
13333 {
13334 switch (IEM_GET_MODRM_REG_8(bRm))
13335 {
13336 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13337 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13338 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13339 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13340 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13341 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13342 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13343 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13345 }
13346 }
13347}
13348
13349
13350/** Opcode 0xdf 11/0.
13351 * Undocument instruction, assumed to work like ffree + fincstp. */
13352FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13353{
13354 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
13355 IEM_MC_BEGIN(0, 0);
13356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13357
13358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13359 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13360
13361 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13362 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13363 IEM_MC_FPU_STACK_INC_TOP();
13364 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13365
13366 IEM_MC_ADVANCE_RIP_AND_FINISH();
13367 IEM_MC_END();
13368}
13369
13370
13371/** Opcode 0xdf 0xe0. */
13372FNIEMOP_DEF(iemOp_fnstsw_ax)
13373{
13374 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
13375 IEM_MC_BEGIN(0, 0);
13376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13377 IEM_MC_LOCAL(uint16_t, u16Tmp);
13378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13379 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13380 IEM_MC_FETCH_FSW(u16Tmp);
13381 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13382 IEM_MC_ADVANCE_RIP_AND_FINISH();
13383 IEM_MC_END();
13384}
13385
13386
13387/** Opcode 0xdf 11/5. */
13388FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13389{
13390 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
13391 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13392 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13393 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13394}
13395
13396
13397/** Opcode 0xdf 11/6. */
13398FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13399{
13400 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
13401 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13402 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13403 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13404}
13405
13406
13407/** Opcode 0xdf !11/0. */
13408FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
13409{
13410 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
13411
13412 IEM_MC_BEGIN(0, 0);
13413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13414 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13415 IEM_MC_LOCAL(int16_t, i16Val);
13416 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13417 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
13418
13419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421
13422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13424 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13425
13426 IEM_MC_PREPARE_FPU_USAGE();
13427 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13428 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
13429 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13430 } IEM_MC_ELSE() {
13431 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13432 } IEM_MC_ENDIF();
13433 IEM_MC_ADVANCE_RIP_AND_FINISH();
13434
13435 IEM_MC_END();
13436}
13437
13438
13439/** Opcode 0xdf !11/1. */
13440FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13441{
13442 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13443 IEM_MC_BEGIN(0, 0);
13444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13446
13447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13450 IEM_MC_PREPARE_FPU_USAGE();
13451
13452 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13453 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13454 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13455
13456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13458 IEM_MC_LOCAL(uint16_t, u16Fsw);
13459 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13460 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13461 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13462 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13463 } IEM_MC_ELSE() {
13464 IEM_MC_IF_FCW_IM() {
13465 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13466 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13467 } IEM_MC_ELSE() {
13468 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13469 } IEM_MC_ENDIF();
13470 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13471 } IEM_MC_ENDIF();
13472 IEM_MC_ADVANCE_RIP_AND_FINISH();
13473
13474 IEM_MC_END();
13475}
13476
13477
13478/** Opcode 0xdf !11/2. */
13479FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13480{
13481 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13482 IEM_MC_BEGIN(0, 0);
13483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13485
13486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13488 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13489 IEM_MC_PREPARE_FPU_USAGE();
13490
13491 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13492 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13493 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13494
13495 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13496 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13497 IEM_MC_LOCAL(uint16_t, u16Fsw);
13498 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13499 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13500 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13501 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13502 } IEM_MC_ELSE() {
13503 IEM_MC_IF_FCW_IM() {
13504 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13505 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13506 } IEM_MC_ELSE() {
13507 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13508 } IEM_MC_ENDIF();
13509 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13510 } IEM_MC_ENDIF();
13511 IEM_MC_ADVANCE_RIP_AND_FINISH();
13512
13513 IEM_MC_END();
13514}
13515
13516
13517/** Opcode 0xdf !11/3. */
13518FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13519{
13520 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13521 IEM_MC_BEGIN(0, 0);
13522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13524
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13527 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13528 IEM_MC_PREPARE_FPU_USAGE();
13529
13530 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13531 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13532 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13533
13534 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13535 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13536 IEM_MC_LOCAL(uint16_t, u16Fsw);
13537 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13538 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13539 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13540 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13541 } IEM_MC_ELSE() {
13542 IEM_MC_IF_FCW_IM() {
13543 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13544 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13545 } IEM_MC_ELSE() {
13546 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13547 } IEM_MC_ENDIF();
13548 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13549 } IEM_MC_ENDIF();
13550 IEM_MC_ADVANCE_RIP_AND_FINISH();
13551
13552 IEM_MC_END();
13553}
13554
13555
13556/** Opcode 0xdf !11/4. */
13557FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13558{
13559 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13560
13561 IEM_MC_BEGIN(0, 0);
13562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13563 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13564 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13565 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13566 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13567
13568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13570
13571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13572 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13573 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13574
13575 IEM_MC_PREPARE_FPU_USAGE();
13576 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13577 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13578 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13579 } IEM_MC_ELSE() {
13580 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13581 } IEM_MC_ENDIF();
13582 IEM_MC_ADVANCE_RIP_AND_FINISH();
13583
13584 IEM_MC_END();
13585}
13586
13587
13588/** Opcode 0xdf !11/5. */
13589FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13590{
13591 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13592
13593 IEM_MC_BEGIN(0, 0);
13594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13595 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13596 IEM_MC_LOCAL(int64_t, i64Val);
13597 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13598 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13599
13600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13602
13603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13605 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13606
13607 IEM_MC_PREPARE_FPU_USAGE();
13608 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13609 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13610 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13611 } IEM_MC_ELSE() {
13612 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13613 } IEM_MC_ENDIF();
13614 IEM_MC_ADVANCE_RIP_AND_FINISH();
13615
13616 IEM_MC_END();
13617}
13618
13619
13620/** Opcode 0xdf !11/6. */
13621FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13622{
13623 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13624 IEM_MC_BEGIN(0, 0);
13625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13627
13628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13629 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13630 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13631 IEM_MC_PREPARE_FPU_USAGE();
13632
13633 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13634 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13635 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13636
13637 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13638 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13639 IEM_MC_LOCAL(uint16_t, u16Fsw);
13640 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13641 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13642 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13643 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13644 } IEM_MC_ELSE() {
13645 IEM_MC_IF_FCW_IM() {
13646 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13647 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13648 } IEM_MC_ELSE() {
13649 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13650 } IEM_MC_ENDIF();
13651 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13652 } IEM_MC_ENDIF();
13653 IEM_MC_ADVANCE_RIP_AND_FINISH();
13654
13655 IEM_MC_END();
13656}
13657
13658
13659/** Opcode 0xdf !11/7. */
13660FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13661{
13662 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13663 IEM_MC_BEGIN(0, 0);
13664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13666
13667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13670 IEM_MC_PREPARE_FPU_USAGE();
13671
13672 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13673 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13674 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13675
13676 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13678 IEM_MC_LOCAL(uint16_t, u16Fsw);
13679 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13680 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13681 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13682 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13683 } IEM_MC_ELSE() {
13684 IEM_MC_IF_FCW_IM() {
13685 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13686 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13687 } IEM_MC_ELSE() {
13688 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13689 } IEM_MC_ENDIF();
13690 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13691 } IEM_MC_ENDIF();
13692 IEM_MC_ADVANCE_RIP_AND_FINISH();
13693
13694 IEM_MC_END();
13695}
13696
13697
13698/**
13699 * @opcode 0xdf
13700 */
13701FNIEMOP_DEF(iemOp_EscF7)
13702{
13703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13704 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13705 if (IEM_IS_MODRM_REG_MODE(bRm))
13706 {
13707 switch (IEM_GET_MODRM_REG_8(bRm))
13708 {
13709 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13710 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13711 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13712 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13713 case 4: if (bRm == 0xe0)
13714 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13715 IEMOP_RAISE_INVALID_OPCODE_RET();
13716 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13717 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13718 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13720 }
13721 }
13722 else
13723 {
13724 switch (IEM_GET_MODRM_REG_8(bRm))
13725 {
13726 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13727 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13728 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13729 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13730 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13731 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13732 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13733 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13735 }
13736 }
13737}
13738
13739
13740/**
13741 * @opcode 0xe0
13742 * @opfltest zf
13743 */
13744FNIEMOP_DEF(iemOp_loopne_Jb)
13745{
13746 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13747 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13749
13750 switch (pVCpu->iem.s.enmEffAddrMode)
13751 {
13752 case IEMMODE_16BIT:
13753 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13755 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13756 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13757 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13758 } IEM_MC_ELSE() {
13759 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13760 IEM_MC_ADVANCE_RIP_AND_FINISH();
13761 } IEM_MC_ENDIF();
13762 IEM_MC_END();
13763 break;
13764
13765 case IEMMODE_32BIT:
13766 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13768 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13769 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13770 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13771 } IEM_MC_ELSE() {
13772 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13773 IEM_MC_ADVANCE_RIP_AND_FINISH();
13774 } IEM_MC_ENDIF();
13775 IEM_MC_END();
13776 break;
13777
13778 case IEMMODE_64BIT:
13779 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13781 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13782 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13783 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13784 } IEM_MC_ELSE() {
13785 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13786 IEM_MC_ADVANCE_RIP_AND_FINISH();
13787 } IEM_MC_ENDIF();
13788 IEM_MC_END();
13789 break;
13790
13791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13792 }
13793}
13794
13795
13796/**
13797 * @opcode 0xe1
13798 * @opfltest zf
13799 */
13800FNIEMOP_DEF(iemOp_loope_Jb)
13801{
13802 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13803 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13805
13806 switch (pVCpu->iem.s.enmEffAddrMode)
13807 {
13808 case IEMMODE_16BIT:
13809 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13811 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13812 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13813 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13814 } IEM_MC_ELSE() {
13815 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13816 IEM_MC_ADVANCE_RIP_AND_FINISH();
13817 } IEM_MC_ENDIF();
13818 IEM_MC_END();
13819 break;
13820
13821 case IEMMODE_32BIT:
13822 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13824 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13825 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13826 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13827 } IEM_MC_ELSE() {
13828 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13829 IEM_MC_ADVANCE_RIP_AND_FINISH();
13830 } IEM_MC_ENDIF();
13831 IEM_MC_END();
13832 break;
13833
13834 case IEMMODE_64BIT:
13835 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13837 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13838 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13839 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13840 } IEM_MC_ELSE() {
13841 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13842 IEM_MC_ADVANCE_RIP_AND_FINISH();
13843 } IEM_MC_ENDIF();
13844 IEM_MC_END();
13845 break;
13846
13847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13848 }
13849}
13850
13851
13852/**
13853 * @opcode 0xe2
13854 */
13855FNIEMOP_DEF(iemOp_loop_Jb)
13856{
13857 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13858 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13860
13861 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13862 * using the 32-bit operand size override. How can that be restarted? See
13863 * weird pseudo code in intel manual. */
13864
13865 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13866 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13867 * the loop causes guest crashes, but when logging it's nice to skip a few million
13868 * lines of useless output. */
13869#if defined(LOG_ENABLED)
13870 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13871 switch (pVCpu->iem.s.enmEffAddrMode)
13872 {
13873 case IEMMODE_16BIT:
13874 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13876 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13877 IEM_MC_ADVANCE_RIP_AND_FINISH();
13878 IEM_MC_END();
13879 break;
13880
13881 case IEMMODE_32BIT:
13882 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13884 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13885 IEM_MC_ADVANCE_RIP_AND_FINISH();
13886 IEM_MC_END();
13887 break;
13888
13889 case IEMMODE_64BIT:
13890 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13892 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13893 IEM_MC_ADVANCE_RIP_AND_FINISH();
13894 IEM_MC_END();
13895 break;
13896
13897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13898 }
13899#endif
13900
13901 switch (pVCpu->iem.s.enmEffAddrMode)
13902 {
13903 case IEMMODE_16BIT:
13904 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13906 IEM_MC_IF_CX_IS_NOT_ONE() {
13907 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13908 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13909 } IEM_MC_ELSE() {
13910 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13911 IEM_MC_ADVANCE_RIP_AND_FINISH();
13912 } IEM_MC_ENDIF();
13913 IEM_MC_END();
13914 break;
13915
13916 case IEMMODE_32BIT:
13917 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13919 IEM_MC_IF_ECX_IS_NOT_ONE() {
13920 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13921 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13922 } IEM_MC_ELSE() {
13923 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13924 IEM_MC_ADVANCE_RIP_AND_FINISH();
13925 } IEM_MC_ENDIF();
13926 IEM_MC_END();
13927 break;
13928
13929 case IEMMODE_64BIT:
13930 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13932 IEM_MC_IF_RCX_IS_NOT_ONE() {
13933 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13934 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13935 } IEM_MC_ELSE() {
13936 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13937 IEM_MC_ADVANCE_RIP_AND_FINISH();
13938 } IEM_MC_ENDIF();
13939 IEM_MC_END();
13940 break;
13941
13942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13943 }
13944}
13945
13946
13947/**
13948 * @opcode 0xe3
13949 */
13950FNIEMOP_DEF(iemOp_jecxz_Jb)
13951{
13952 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13953 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13955
13956 switch (pVCpu->iem.s.enmEffAddrMode)
13957 {
13958 case IEMMODE_16BIT:
13959 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13961 IEM_MC_IF_CX_IS_NZ() {
13962 IEM_MC_ADVANCE_RIP_AND_FINISH();
13963 } IEM_MC_ELSE() {
13964 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13965 } IEM_MC_ENDIF();
13966 IEM_MC_END();
13967 break;
13968
13969 case IEMMODE_32BIT:
13970 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13972 IEM_MC_IF_ECX_IS_NZ() {
13973 IEM_MC_ADVANCE_RIP_AND_FINISH();
13974 } IEM_MC_ELSE() {
13975 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13976 } IEM_MC_ENDIF();
13977 IEM_MC_END();
13978 break;
13979
13980 case IEMMODE_64BIT:
13981 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13983 IEM_MC_IF_RCX_IS_NZ() {
13984 IEM_MC_ADVANCE_RIP_AND_FINISH();
13985 } IEM_MC_ELSE() {
13986 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13987 } IEM_MC_ENDIF();
13988 IEM_MC_END();
13989 break;
13990
13991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13992 }
13993}
13994
13995
13996/**
13997 * @opcode 0xe4
13998 * @opfltest iopl
13999 */
14000FNIEMOP_DEF(iemOp_in_AL_Ib)
14001{
14002 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
14003 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14005 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14006 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14007}
14008
14009
14010/**
14011 * @opcode 0xe5
14012 * @opfltest iopl
14013 */
14014FNIEMOP_DEF(iemOp_in_eAX_Ib)
14015{
14016 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
14017 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14019 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14020 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14021 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14022}
14023
14024
14025/**
14026 * @opcode 0xe6
14027 * @opfltest iopl
14028 */
14029FNIEMOP_DEF(iemOp_out_Ib_AL)
14030{
14031 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
14032 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14034 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14035 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14036}
14037
14038
14039/**
14040 * @opcode 0xe7
14041 * @opfltest iopl
14042 */
14043FNIEMOP_DEF(iemOp_out_Ib_eAX)
14044{
14045 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
14046 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14048 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14049 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14050 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14051}
14052
14053
14054/**
14055 * @opcode 0xe8
14056 */
14057FNIEMOP_DEF(iemOp_call_Jv)
14058{
14059 IEMOP_MNEMONIC(call_Jv, "call Jv");
14060 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14061 switch (pVCpu->iem.s.enmEffOpSize)
14062 {
14063 case IEMMODE_16BIT:
14064 {
14065 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14066 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
14067 iemCImpl_call_rel_16, (int16_t)u16Imm);
14068 }
14069
14070 case IEMMODE_32BIT:
14071 {
14072 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14073 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
14074 iemCImpl_call_rel_32, (int32_t)u32Imm);
14075 }
14076
14077 case IEMMODE_64BIT:
14078 {
14079 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14080 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
14081 iemCImpl_call_rel_64, u64Imm);
14082 }
14083
14084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14085 }
14086}
14087
14088
14089/**
14090 * @opcode 0xe9
14091 */
14092FNIEMOP_DEF(iemOp_jmp_Jv)
14093{
14094 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
14095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14096 switch (pVCpu->iem.s.enmEffOpSize)
14097 {
14098 case IEMMODE_16BIT:
14099 IEM_MC_BEGIN(0, 0);
14100 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14102 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
14103 IEM_MC_END();
14104 break;
14105
14106 case IEMMODE_64BIT:
14107 case IEMMODE_32BIT:
14108 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14109 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14111 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
14112 IEM_MC_END();
14113 break;
14114
14115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14116 }
14117}
14118
14119
14120/**
14121 * @opcode 0xea
14122 */
14123FNIEMOP_DEF(iemOp_jmp_Ap)
14124{
14125 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
14126 IEMOP_HLP_NO_64BIT();
14127
14128 /* Decode the far pointer address and pass it on to the far call C implementation. */
14129 uint32_t off32Seg;
14130 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
14131 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
14132 else
14133 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
14134 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
14135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14136 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
14137 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
14138 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
14139 /** @todo make task-switches, ring-switches, ++ return non-zero status */
14140}
14141
14142
14143/**
14144 * @opcode 0xeb
14145 */
14146FNIEMOP_DEF(iemOp_jmp_Jb)
14147{
14148 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
14149 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14150 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14151
14152 IEM_MC_BEGIN(0, 0);
14153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14154 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
14155 IEM_MC_END();
14156}
14157
14158
14159/**
14160 * @opcode 0xec
14161 * @opfltest iopl
14162 */
14163FNIEMOP_DEF(iemOp_in_AL_DX)
14164{
14165 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
14166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14167 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
14168 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14169 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
14170}
14171
14172
14173/**
14174 * @opcode 0xed
14175 * @opfltest iopl
14176 */
14177FNIEMOP_DEF(iemOp_in_eAX_DX)
14178{
14179 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
14180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14181 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
14182 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14183 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14184 pVCpu->iem.s.enmEffAddrMode);
14185}
14186
14187
14188/**
14189 * @opcode 0xee
14190 * @opfltest iopl
14191 */
14192FNIEMOP_DEF(iemOp_out_DX_AL)
14193{
14194 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
14195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14196 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14197 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
14198}
14199
14200
14201/**
14202 * @opcode 0xef
14203 * @opfltest iopl
14204 */
14205FNIEMOP_DEF(iemOp_out_DX_eAX)
14206{
14207 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
14208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14209 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14210 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14211 pVCpu->iem.s.enmEffAddrMode);
14212}
14213
14214
14215/**
14216 * @opcode 0xf0
14217 */
14218FNIEMOP_DEF(iemOp_lock)
14219{
14220 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
14221 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
14222
14223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14225}
14226
14227
14228/**
14229 * @opcode 0xf1
14230 */
14231FNIEMOP_DEF(iemOp_int1)
14232{
14233 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
14234 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
14235 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
14236 * LOADALL memo. Needs some testing. */
14237 IEMOP_HLP_MIN_386();
14238 /** @todo testcase! */
14239 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
14240 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
14241 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
14242}
14243
14244
14245/**
14246 * @opcode 0xf2
14247 */
14248FNIEMOP_DEF(iemOp_repne)
14249{
14250 /* This overrides any previous REPE prefix. */
14251 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
14252 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
14253 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
14254
14255 /* For the 4 entry opcode tables, REPNZ overrides any previous
14256 REPZ and operand size prefixes. */
14257 pVCpu->iem.s.idxPrefix = 3;
14258
14259 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14260 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14261}
14262
14263
14264/**
14265 * @opcode 0xf3
14266 */
14267FNIEMOP_DEF(iemOp_repe)
14268{
14269 /* This overrides any previous REPNE prefix. */
14270 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
14271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
14272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
14273
14274 /* For the 4 entry opcode tables, REPNZ overrides any previous
14275 REPNZ and operand size prefixes. */
14276 pVCpu->iem.s.idxPrefix = 2;
14277
14278 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14279 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14280}
14281
14282
14283/**
14284 * @opcode 0xf4
14285 */
14286FNIEMOP_DEF(iemOp_hlt)
14287{
14288 IEMOP_MNEMONIC(hlt, "hlt");
14289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14290 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
14291}
14292
14293
14294/**
14295 * @opcode 0xf5
14296 * @opflmodify cf
14297 */
14298FNIEMOP_DEF(iemOp_cmc)
14299{
14300 IEMOP_MNEMONIC(cmc, "cmc");
14301 IEM_MC_BEGIN(0, 0);
14302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14303 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14304 IEM_MC_ADVANCE_RIP_AND_FINISH();
14305 IEM_MC_END();
14306}
14307
14308
14309/**
14310 * Body for of 'inc/dec/not/neg Eb'.
14311 */
14312#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
14313 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
14314 { \
14315 /* register access */ \
14316 IEM_MC_BEGIN(0, 0); \
14317 IEMOP_HLP_DONE_DECODING(); \
14318 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14319 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14320 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
14321 IEM_MC_REF_EFLAGS(pEFlags); \
14322 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14324 IEM_MC_END(); \
14325 } \
14326 else \
14327 { \
14328 /* memory access. */ \
14329 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14330 { \
14331 IEM_MC_BEGIN(0, 0); \
14332 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14334 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14335 \
14336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14337 IEMOP_HLP_DONE_DECODING(); \
14338 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14339 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14340 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14341 \
14342 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14343 IEM_MC_COMMIT_EFLAGS(EFlags); \
14344 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14345 IEM_MC_END(); \
14346 } \
14347 else \
14348 { \
14349 IEM_MC_BEGIN(0, 0); \
14350 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14352 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14353 \
14354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14355 IEMOP_HLP_DONE_DECODING(); \
14356 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14357 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14358 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
14359 \
14360 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14361 IEM_MC_COMMIT_EFLAGS(EFlags); \
14362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14363 IEM_MC_END(); \
14364 } \
14365 } \
14366 (void)0
14367
14368
14369/**
14370 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
14371 */
14372#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
14373 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14374 { \
14375 /* \
14376 * Register target \
14377 */ \
14378 switch (pVCpu->iem.s.enmEffOpSize) \
14379 { \
14380 case IEMMODE_16BIT: \
14381 IEM_MC_BEGIN(0, 0); \
14382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14383 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14384 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14385 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14386 IEM_MC_REF_EFLAGS(pEFlags); \
14387 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14388 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14389 IEM_MC_END(); \
14390 break; \
14391 \
14392 case IEMMODE_32BIT: \
14393 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14395 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14396 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14397 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14398 IEM_MC_REF_EFLAGS(pEFlags); \
14399 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14400 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
14401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14402 IEM_MC_END(); \
14403 break; \
14404 \
14405 case IEMMODE_64BIT: \
14406 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14408 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14409 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14410 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14411 IEM_MC_REF_EFLAGS(pEFlags); \
14412 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14413 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14414 IEM_MC_END(); \
14415 break; \
14416 \
14417 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14418 } \
14419 } \
14420 else \
14421 { \
14422 /* \
14423 * Memory target. \
14424 */ \
14425 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14426 { \
14427 switch (pVCpu->iem.s.enmEffOpSize) \
14428 { \
14429 case IEMMODE_16BIT: \
14430 IEM_MC_BEGIN(0, 0); \
14431 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14434 \
14435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14436 IEMOP_HLP_DONE_DECODING(); \
14437 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14438 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14439 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14440 \
14441 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14442 IEM_MC_COMMIT_EFLAGS(EFlags); \
14443 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14444 IEM_MC_END(); \
14445 break; \
14446 \
14447 case IEMMODE_32BIT: \
14448 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14449 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14451 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14452 \
14453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14454 IEMOP_HLP_DONE_DECODING(); \
14455 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14456 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14457 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14458 \
14459 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14460 IEM_MC_COMMIT_EFLAGS(EFlags); \
14461 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14462 IEM_MC_END(); \
14463 break; \
14464 \
14465 case IEMMODE_64BIT: \
14466 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14467 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14469 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14470 \
14471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14472 IEMOP_HLP_DONE_DECODING(); \
14473 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14474 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14475 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14476 \
14477 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14478 IEM_MC_COMMIT_EFLAGS(EFlags); \
14479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14480 IEM_MC_END(); \
14481 break; \
14482 \
14483 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14484 } \
14485 } \
14486 else \
14487 { \
14488 (void)0
14489
14490#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14491 switch (pVCpu->iem.s.enmEffOpSize) \
14492 { \
14493 case IEMMODE_16BIT: \
14494 IEM_MC_BEGIN(0, 0); \
14495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14498 \
14499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14500 IEMOP_HLP_DONE_DECODING(); \
14501 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14502 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14503 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14504 \
14505 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14506 IEM_MC_COMMIT_EFLAGS(EFlags); \
14507 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14508 IEM_MC_END(); \
14509 break; \
14510 \
14511 case IEMMODE_32BIT: \
14512 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14513 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14515 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14516 \
14517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14518 IEMOP_HLP_DONE_DECODING(); \
14519 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14520 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14521 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14522 \
14523 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14524 IEM_MC_COMMIT_EFLAGS(EFlags); \
14525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14526 IEM_MC_END(); \
14527 break; \
14528 \
14529 case IEMMODE_64BIT: \
14530 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14531 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14533 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14534 \
14535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14536 IEMOP_HLP_DONE_DECODING(); \
14537 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14538 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14539 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14540 \
14541 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14542 IEM_MC_COMMIT_EFLAGS(EFlags); \
14543 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14544 IEM_MC_END(); \
14545 break; \
14546 \
14547 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14548 } \
14549 } \
14550 } \
14551 (void)0
14552
14553
14554/**
14555 * @opmaps grp3_f6
14556 * @opcode /0
14557 * @opflclass logical
14558 * @todo also /1
14559 */
14560FNIEMOP_DEF_1(iemOp_grp3_test_Eb_Ib, uint8_t, bRm)
14561{
14562 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14564 IEMOP_BODY_BINARY_Eb_Ib_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14565}
14566
14567
14568/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14569#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14570 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14571 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14572 { \
14573 /* register access */ \
14574 IEM_MC_BEGIN(0, 0); \
14575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14576 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14577 IEM_MC_ARG(uint8_t, u8Value, 1); \
14578 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14579 \
14580 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14581 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14582 IEM_MC_REF_EFLAGS(pEFlags); \
14583 IEM_MC_CALL_AIMPL_3(int32_t, rc, pfnU8, pu16AX, u8Value, pEFlags); \
14584 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14585 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14586 } IEM_MC_ELSE() { \
14587 IEM_MC_RAISE_DIVIDE_ERROR(); \
14588 } IEM_MC_ENDIF(); \
14589 \
14590 IEM_MC_END(); \
14591 } \
14592 else \
14593 { \
14594 /* memory access. */ \
14595 IEM_MC_BEGIN(0, 0); \
14596 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14597 IEM_MC_ARG(uint8_t, u8Value, 1); \
14598 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14600 \
14601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14603 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14604 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14605 IEM_MC_REF_EFLAGS(pEFlags); \
14606 IEM_MC_CALL_AIMPL_3(int32_t, rc, pfnU8, pu16AX, u8Value, pEFlags); \
14607 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14608 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14609 } IEM_MC_ELSE() { \
14610 IEM_MC_RAISE_DIVIDE_ERROR(); \
14611 } IEM_MC_ENDIF(); \
14612 \
14613 IEM_MC_END(); \
14614 } (void)0
14615
14616
14617/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14618#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14619 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14620 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14621 { \
14622 /* register access */ \
14623 switch (pVCpu->iem.s.enmEffOpSize) \
14624 { \
14625 case IEMMODE_16BIT: \
14626 IEM_MC_BEGIN(0, 0); \
14627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14628 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14629 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14630 IEM_MC_ARG(uint16_t, u16Value, 2); \
14631 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14632 \
14633 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14634 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14635 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14636 IEM_MC_REF_EFLAGS(pEFlags); \
14637 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14638 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14640 } IEM_MC_ELSE() { \
14641 IEM_MC_RAISE_DIVIDE_ERROR(); \
14642 } IEM_MC_ENDIF(); \
14643 \
14644 IEM_MC_END(); \
14645 break; \
14646 \
14647 case IEMMODE_32BIT: \
14648 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14650 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14651 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14652 IEM_MC_ARG(uint32_t, u32Value, 2); \
14653 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14654 \
14655 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14656 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14657 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14658 IEM_MC_REF_EFLAGS(pEFlags); \
14659 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14660 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14661 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14662 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14664 } IEM_MC_ELSE() { \
14665 IEM_MC_RAISE_DIVIDE_ERROR(); \
14666 } IEM_MC_ENDIF(); \
14667 \
14668 IEM_MC_END(); \
14669 break; \
14670 \
14671 case IEMMODE_64BIT: \
14672 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14674 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14675 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14676 IEM_MC_ARG(uint64_t, u64Value, 2); \
14677 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14678 \
14679 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14680 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14681 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14682 IEM_MC_REF_EFLAGS(pEFlags); \
14683 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14684 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14685 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14686 } IEM_MC_ELSE() { \
14687 IEM_MC_RAISE_DIVIDE_ERROR(); \
14688 } IEM_MC_ENDIF(); \
14689 \
14690 IEM_MC_END(); \
14691 break; \
14692 \
14693 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14694 } \
14695 } \
14696 else \
14697 { \
14698 /* memory access. */ \
14699 switch (pVCpu->iem.s.enmEffOpSize) \
14700 { \
14701 case IEMMODE_16BIT: \
14702 IEM_MC_BEGIN(0, 0); \
14703 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14704 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14705 IEM_MC_ARG(uint16_t, u16Value, 2); \
14706 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14708 \
14709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14711 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14712 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14713 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14714 IEM_MC_REF_EFLAGS(pEFlags); \
14715 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14716 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14717 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14718 } IEM_MC_ELSE() { \
14719 IEM_MC_RAISE_DIVIDE_ERROR(); \
14720 } IEM_MC_ENDIF(); \
14721 \
14722 IEM_MC_END(); \
14723 break; \
14724 \
14725 case IEMMODE_32BIT: \
14726 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14727 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14728 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14729 IEM_MC_ARG(uint32_t, u32Value, 2); \
14730 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14732 \
14733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14735 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14736 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14737 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14738 IEM_MC_REF_EFLAGS(pEFlags); \
14739 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14740 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14741 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14742 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14743 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14744 } IEM_MC_ELSE() { \
14745 IEM_MC_RAISE_DIVIDE_ERROR(); \
14746 } IEM_MC_ENDIF(); \
14747 \
14748 IEM_MC_END(); \
14749 break; \
14750 \
14751 case IEMMODE_64BIT: \
14752 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14753 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14754 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14755 IEM_MC_ARG(uint64_t, u64Value, 2); \
14756 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14758 \
14759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14761 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14762 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14763 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14764 IEM_MC_REF_EFLAGS(pEFlags); \
14765 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14766 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14767 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14768 } IEM_MC_ELSE() { \
14769 IEM_MC_RAISE_DIVIDE_ERROR(); \
14770 } IEM_MC_ENDIF(); \
14771 \
14772 IEM_MC_END(); \
14773 break; \
14774 \
14775 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14776 } \
14777 } (void)0
14778
14779
14780/**
14781 * @opmaps grp3_f6
14782 * @opcode /2
14783 * @opflclass unchanged
14784 */
14785FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14786{
14787/** @todo does not modify EFLAGS. */
14788 IEMOP_MNEMONIC(not_Eb, "not Eb");
14789 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14790}
14791
14792
14793/**
14794 * @opmaps grp3_f6
14795 * @opcode /3
14796 * @opflclass arithmetic
14797 */
14798FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14799{
14800 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14801 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14802}
14803
14804
14805/**
14806 * @opcode 0xf6
14807 */
14808FNIEMOP_DEF(iemOp_Grp3_Eb)
14809{
14810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14811 switch (IEM_GET_MODRM_REG_8(bRm))
14812 {
14813 case 0:
14814 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14815 case 1:
14816 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14817 case 2:
14818 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14819 case 3:
14820 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14821 case 4:
14822 {
14823 /**
14824 * @opdone
14825 * @opmaps grp3_f6
14826 * @opcode /4
14827 * @opflclass multiply
14828 */
14829 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14831 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14832 break;
14833 }
14834 case 5:
14835 {
14836 /**
14837 * @opdone
14838 * @opmaps grp3_f6
14839 * @opcode /5
14840 * @opflclass multiply
14841 */
14842 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14844 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14845 break;
14846 }
14847 case 6:
14848 {
14849 /**
14850 * @opdone
14851 * @opmaps grp3_f6
14852 * @opcode /6
14853 * @opflclass division
14854 */
14855 IEMOP_MNEMONIC(div_Eb, "div Eb");
14856 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14857 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14858 break;
14859 }
14860 case 7:
14861 {
14862 /**
14863 * @opdone
14864 * @opmaps grp3_f6
14865 * @opcode /7
14866 * @opflclass division
14867 */
14868 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14870 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14871 break;
14872 }
14873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14874 }
14875}
14876
14877
14878/**
14879 * @opmaps grp3_f7
14880 * @opcode /0
14881 * @opflclass logical
14882 */
14883FNIEMOP_DEF_1(iemOp_grp3_test_Ev_Iz, uint8_t, bRm)
14884{
14885 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14886 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14887 IEMOP_BODY_BINARY_Ev_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14888}
14889
14890
14891/**
14892 * @opmaps grp3_f7
14893 * @opcode /2
14894 * @opflclass unchanged
14895 */
14896FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14897{
14898/** @todo does not modify EFLAGS */
14899 IEMOP_MNEMONIC(not_Ev, "not Ev");
14900 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14901 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14902}
14903
14904
14905/**
14906 * @opmaps grp3_f7
14907 * @opcode /3
14908 * @opflclass arithmetic
14909 */
14910FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14911{
14912 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14913 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14914 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14915}
14916
14917
14918/**
14919 * @opmaps grp3_f7
14920 * @opcode /4
14921 * @opflclass multiply
14922 */
14923FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14924{
14925 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14927 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14928}
14929
14930
14931/**
14932 * @opmaps grp3_f7
14933 * @opcode /5
14934 * @opflclass multiply
14935 */
14936FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14937{
14938 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14940 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14941}
14942
14943
14944/**
14945 * @opmaps grp3_f7
14946 * @opcode /6
14947 * @opflclass division
14948 */
14949FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14950{
14951 IEMOP_MNEMONIC(div_Ev, "div Ev");
14952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14953 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14954}
14955
14956
14957/**
14958 * @opmaps grp3_f7
14959 * @opcode /7
14960 * @opflclass division
14961 */
14962FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14963{
14964 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14966 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14967}
14968
14969
14970/**
14971 * @opcode 0xf7
14972 */
14973FNIEMOP_DEF(iemOp_Grp3_Ev)
14974{
14975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14976 switch (IEM_GET_MODRM_REG_8(bRm))
14977 {
14978 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14979 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14980 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14981 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14982 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14983 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14984 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14985 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14987 }
14988}
14989
14990
14991/**
14992 * @opcode 0xf8
14993 * @opflmodify cf
14994 * @opflclear cf
14995 */
14996FNIEMOP_DEF(iemOp_clc)
14997{
14998 IEMOP_MNEMONIC(clc, "clc");
14999 IEM_MC_BEGIN(0, 0);
15000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15001 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
15002 IEM_MC_ADVANCE_RIP_AND_FINISH();
15003 IEM_MC_END();
15004}
15005
15006
15007/**
15008 * @opcode 0xf9
15009 * @opflmodify cf
15010 * @opflset cf
15011 */
15012FNIEMOP_DEF(iemOp_stc)
15013{
15014 IEMOP_MNEMONIC(stc, "stc");
15015 IEM_MC_BEGIN(0, 0);
15016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15017 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
15018 IEM_MC_ADVANCE_RIP_AND_FINISH();
15019 IEM_MC_END();
15020}
15021
15022
15023/**
15024 * @opcode 0xfa
15025 * @opfltest iopl,vm
15026 * @opflmodify if,vif
15027 */
15028FNIEMOP_DEF(iemOp_cli)
15029{
15030 IEMOP_MNEMONIC(cli, "cli");
15031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15032 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
15033}
15034
15035
15036/**
15037 * @opcode 0xfb
15038 * @opfltest iopl,vm
15039 * @opflmodify if,vif
15040 */
15041FNIEMOP_DEF(iemOp_sti)
15042{
15043 IEMOP_MNEMONIC(sti, "sti");
15044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15045 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
15046 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
15047}
15048
15049
15050/**
15051 * @opcode 0xfc
15052 * @opflmodify df
15053 * @opflclear df
15054 */
15055FNIEMOP_DEF(iemOp_cld)
15056{
15057 IEMOP_MNEMONIC(cld, "cld");
15058 IEM_MC_BEGIN(0, 0);
15059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15060 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15061 IEM_MC_ADVANCE_RIP_AND_FINISH();
15062 IEM_MC_END();
15063}
15064
15065
15066/**
15067 * @opcode 0xfd
15068 * @opflmodify df
15069 * @opflset df
15070 */
15071FNIEMOP_DEF(iemOp_std)
15072{
15073 IEMOP_MNEMONIC(std, "std");
15074 IEM_MC_BEGIN(0, 0);
15075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15076 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15077 IEM_MC_ADVANCE_RIP_AND_FINISH();
15078 IEM_MC_END();
15079}
15080
15081
15082/**
15083 * @opmaps grp4
15084 * @opcode /0
15085 * @opflclass incdec
15086 */
15087FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
15088{
15089 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
15090 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
15091}
15092
15093
15094/**
15095 * @opmaps grp4
15096 * @opcode /1
15097 * @opflclass incdec
15098 */
15099FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
15100{
15101 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
15102 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
15103}
15104
15105
15106/**
15107 * @opcode 0xfe
15108 */
15109FNIEMOP_DEF(iemOp_Grp4)
15110{
15111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15112 switch (IEM_GET_MODRM_REG_8(bRm))
15113 {
15114 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
15115 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
15116 default:
15117 /** @todo is the eff-addr decoded? */
15118 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
15119 IEMOP_RAISE_INVALID_OPCODE_RET();
15120 }
15121}
15122
15123/**
15124 * @opmaps grp5
15125 * @opcode /0
15126 * @opflclass incdec
15127 */
15128FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
15129{
15130 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
15131 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
15132 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
15133}
15134
15135
15136/**
15137 * @opmaps grp5
15138 * @opcode /1
15139 * @opflclass incdec
15140 */
15141FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
15142{
15143 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
15144 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
15145 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
15146}
15147
15148
15149/**
15150 * Opcode 0xff /2.
15151 * @param bRm The RM byte.
15152 */
15153FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15154{
15155 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
15156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15157
15158 if (IEM_IS_MODRM_REG_MODE(bRm))
15159 {
15160 /* The new RIP is taken from a register. */
15161 switch (pVCpu->iem.s.enmEffOpSize)
15162 {
15163 case IEMMODE_16BIT:
15164 IEM_MC_BEGIN(0, 0);
15165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15166 IEM_MC_ARG(uint16_t, u16Target, 0);
15167 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15168 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15169 IEM_MC_END();
15170 break;
15171
15172 case IEMMODE_32BIT:
15173 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15175 IEM_MC_ARG(uint32_t, u32Target, 0);
15176 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15177 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15178 IEM_MC_END();
15179 break;
15180
15181 case IEMMODE_64BIT:
15182 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15184 IEM_MC_ARG(uint64_t, u64Target, 0);
15185 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15186 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15187 IEM_MC_END();
15188 break;
15189
15190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15191 }
15192 }
15193 else
15194 {
15195 /* The new RIP is taken from a register. */
15196 switch (pVCpu->iem.s.enmEffOpSize)
15197 {
15198 case IEMMODE_16BIT:
15199 IEM_MC_BEGIN(0, 0);
15200 IEM_MC_ARG(uint16_t, u16Target, 0);
15201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15204 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15205 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15206 IEM_MC_END();
15207 break;
15208
15209 case IEMMODE_32BIT:
15210 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15211 IEM_MC_ARG(uint32_t, u32Target, 0);
15212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15215 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15216 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15217 IEM_MC_END();
15218 break;
15219
15220 case IEMMODE_64BIT:
15221 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15222 IEM_MC_ARG(uint64_t, u64Target, 0);
15223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15226 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15227 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15228 IEM_MC_END();
15229 break;
15230
15231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15232 }
15233 }
15234}
15235
15236#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
15237 /* Registers? How?? */ \
15238 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
15239 { /* likely */ } \
15240 else \
15241 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
15242 \
15243 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
15244 /** @todo what does VIA do? */ \
15245 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
15246 { /* likely */ } \
15247 else \
15248 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
15249 \
15250 /* Far pointer loaded from memory. */ \
15251 switch (pVCpu->iem.s.enmEffOpSize) \
15252 { \
15253 case IEMMODE_16BIT: \
15254 IEM_MC_BEGIN(0, 0); \
15255 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15256 IEM_MC_ARG(uint16_t, offSeg, 1); \
15257 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15261 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15262 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15263 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15264 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15265 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15266 IEM_MC_END(); \
15267 break; \
15268 \
15269 case IEMMODE_32BIT: \
15270 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
15271 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15272 IEM_MC_ARG(uint32_t, offSeg, 1); \
15273 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15277 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15278 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15279 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15280 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15281 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15282 IEM_MC_END(); \
15283 break; \
15284 \
15285 case IEMMODE_64BIT: \
15286 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15287 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
15288 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15289 IEM_MC_ARG(uint64_t, offSeg, 1); \
15290 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15294 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15295 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15296 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15297 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15298 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15299 IEM_MC_END(); \
15300 break; \
15301 \
15302 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15303 } do {} while (0)
15304
15305
15306/**
15307 * Opcode 0xff /3.
15308 * @param bRm The RM byte.
15309 */
15310FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15311{
15312 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15313 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15314}
15315
15316
15317/**
15318 * Opcode 0xff /4.
15319 * @param bRm The RM byte.
15320 */
15321FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15322{
15323 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15325
15326 if (IEM_IS_MODRM_REG_MODE(bRm))
15327 {
15328 /* The new RIP is taken from a register. */
15329 switch (pVCpu->iem.s.enmEffOpSize)
15330 {
15331 case IEMMODE_16BIT:
15332 IEM_MC_BEGIN(0, 0);
15333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15334 IEM_MC_LOCAL(uint16_t, u16Target);
15335 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15336 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15337 IEM_MC_END();
15338 break;
15339
15340 case IEMMODE_32BIT:
15341 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15343 IEM_MC_LOCAL(uint32_t, u32Target);
15344 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15345 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15346 IEM_MC_END();
15347 break;
15348
15349 case IEMMODE_64BIT:
15350 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15352 IEM_MC_LOCAL(uint64_t, u64Target);
15353 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15354 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15355 IEM_MC_END();
15356 break;
15357
15358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15359 }
15360 }
15361 else
15362 {
15363 /* The new RIP is taken from a memory location. */
15364 switch (pVCpu->iem.s.enmEffOpSize)
15365 {
15366 case IEMMODE_16BIT:
15367 IEM_MC_BEGIN(0, 0);
15368 IEM_MC_LOCAL(uint16_t, u16Target);
15369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15372 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15373 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15374 IEM_MC_END();
15375 break;
15376
15377 case IEMMODE_32BIT:
15378 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15379 IEM_MC_LOCAL(uint32_t, u32Target);
15380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15383 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15384 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15385 IEM_MC_END();
15386 break;
15387
15388 case IEMMODE_64BIT:
15389 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15390 IEM_MC_LOCAL(uint64_t, u64Target);
15391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15394 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15395 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15396 IEM_MC_END();
15397 break;
15398
15399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15400 }
15401 }
15402}
15403
15404
15405/**
15406 * Opcode 0xff /5.
15407 * @param bRm The RM byte.
15408 */
15409FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15410{
15411 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15412 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15413}
15414
15415
15416/**
15417 * Opcode 0xff /6.
15418 * @param bRm The RM byte.
15419 */
15420FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15421{
15422 IEMOP_MNEMONIC(push_Ev, "push Ev");
15423
15424 /* Registers are handled by a common worker. */
15425 if (IEM_IS_MODRM_REG_MODE(bRm))
15426 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15427
15428 /* Memory we do here. */
15429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15430 switch (pVCpu->iem.s.enmEffOpSize)
15431 {
15432 case IEMMODE_16BIT:
15433 IEM_MC_BEGIN(0, 0);
15434 IEM_MC_LOCAL(uint16_t, u16Src);
15435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15438 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15439 IEM_MC_PUSH_U16(u16Src);
15440 IEM_MC_ADVANCE_RIP_AND_FINISH();
15441 IEM_MC_END();
15442 break;
15443
15444 case IEMMODE_32BIT:
15445 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15446 IEM_MC_LOCAL(uint32_t, u32Src);
15447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15450 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15451 IEM_MC_PUSH_U32(u32Src);
15452 IEM_MC_ADVANCE_RIP_AND_FINISH();
15453 IEM_MC_END();
15454 break;
15455
15456 case IEMMODE_64BIT:
15457 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15458 IEM_MC_LOCAL(uint64_t, u64Src);
15459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15462 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15463 IEM_MC_PUSH_U64(u64Src);
15464 IEM_MC_ADVANCE_RIP_AND_FINISH();
15465 IEM_MC_END();
15466 break;
15467
15468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15469 }
15470}
15471
15472
15473/**
15474 * @opcode 0xff
15475 */
15476FNIEMOP_DEF(iemOp_Grp5)
15477{
15478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15479 switch (IEM_GET_MODRM_REG_8(bRm))
15480 {
15481 case 0:
15482 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15483 case 1:
15484 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15485 case 2:
15486 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15487 case 3:
15488 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15489 case 4:
15490 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15491 case 5:
15492 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15493 case 6:
15494 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15495 case 7:
15496 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15497 IEMOP_RAISE_INVALID_OPCODE_RET();
15498 }
15499 AssertFailedReturn(VERR_IEM_IPE_3);
15500}
15501
15502
15503
15504const PFNIEMOP g_apfnOneByteMap[256] =
15505{
15506 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15507 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15508 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15509 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15510 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15511 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15512 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15513 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15514 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15515 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15516 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15517 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15518 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15519 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15520 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15521 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15522 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15523 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15524 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15525 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15526 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15527 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15528 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15529 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15530 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15531 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15532 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15533 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15534 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15535 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15536 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15537 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15538 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15539 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15540 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15541 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15542 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15543 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15544 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15545 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15546 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15547 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15548 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15549 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15550 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15551 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15552 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15553 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15554 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15555 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15556 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15557 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15558 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15559 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15560 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15561 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15562 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15563 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15564 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15565 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15566 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15567 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15568 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15569 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15570};
15571
15572
15573/** @} */
15574
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use