VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h

Last change on this file was 104419, checked in by vboxsync, 4 weeks ago

VMM/IEM: Convert near return (retn) and relative/indirect call instructions to special IEM MC statements in order to be able to recompile them, bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 593.6 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 104419 2024-04-24 14:32:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Special case body for bytes instruction like SUB and XOR that can be used
61 * to zero a register.
62 *
63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the
64 * same register.
65 */
66#define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \
67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
69 { \
70 IEM_MC_BEGIN(0, 0); \
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \
73 IEM_MC_LOCAL_EFLAGS(fEFlags); \
74 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
75 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
76 IEM_MC_COMMIT_EFLAGS(fEFlags); \
77 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
78 IEM_MC_END(); \
79 } ((void)0)
80
81/**
82 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
83 * memory/register as the destination.
84 */
85#define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
86 /* \
87 * If rm is denoting a register, no more instruction bytes. \
88 */ \
89 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
90 { \
91 IEM_MC_BEGIN(0, 0); \
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
93 IEM_MC_ARG(uint8_t, u8Src, 2); \
94 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
95 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
96 IEM_MC_LOCAL(uint8_t, u8Dst); \
97 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
98 IEM_MC_LOCAL_EFLAGS(uEFlags); \
99 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
100 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \
101 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
102 } IEM_MC_NATIVE_ELSE() { \
103 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
104 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
105 IEM_MC_ARG_EFLAGS( fEFlags, 0); \
106 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlags, pu8Dst, u8Src); \
107 IEM_MC_COMMIT_EFLAGS_OPT(fEFlagsRet); \
108 } IEM_MC_NATIVE_ENDIF(); \
109 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
110 IEM_MC_END(); \
111 } \
112 else \
113 { \
114 /* \
115 * We're accessing memory. \
116 */ \
117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
118 { \
119 IEM_MC_BEGIN(0, 0); \
120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
122 IEMOP_HLP_DONE_DECODING(); \
123 \
124 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
126 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
127 IEM_MC_ARG(uint8_t, u8Src, 2); \
128 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
129 IEM_MC_ARG_EFLAGS( fEFlags, 0); \
130 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlags, pu8Dst, u8Src); \
131 \
132 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
133 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
135 IEM_MC_END(); \
136 } \
137 else \
138 { \
139 IEM_MC_BEGIN(0, 0); \
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
142 IEMOP_HLP_DONE_DECODING(); \
143 \
144 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
145 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
146 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
147 IEM_MC_ARG(uint8_t, u8Src, 2); \
148 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
149 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
150 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), fEFlagsIn, pu8Dst, u8Src); \
151 \
152 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
153 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
155 IEM_MC_END(); \
156 } \
157 } \
158 (void)0
159
160/**
161 * Body for instructions like TEST & CMP with a byte memory/registers as
162 * operands.
163 */
164#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
165 /* \
166 * If rm is denoting a register, no more instruction bytes. \
167 */ \
168 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
169 { \
170 IEM_MC_BEGIN(0, 0); \
171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
172 IEM_MC_ARG(uint8_t, u8Src, 2); \
173 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
174 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
175 IEM_MC_LOCAL(uint8_t, u8Dst); \
176 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
177 IEM_MC_LOCAL_EFLAGS(uEFlags); \
178 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
179 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
180 } IEM_MC_NATIVE_ELSE() { \
181 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
182 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
183 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
184 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU8, fEFlagsIn, pu8Dst, u8Src); \
185 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
186 } IEM_MC_NATIVE_ENDIF(); \
187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
188 IEM_MC_END(); \
189 } \
190 else \
191 { \
192 /* \
193 * We're accessing memory. \
194 */ \
195 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
196 { \
197 IEM_MC_BEGIN(0, 0); \
198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
200 IEMOP_HLP_DONE_DECODING(); \
201 IEM_MC_NATIVE_IF(0) { \
202 IEM_MC_LOCAL(uint8_t, u8Dst); \
203 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
204 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
205 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
206 IEM_MC_LOCAL_EFLAGS(uEFlags); \
207 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
208 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
209 } IEM_MC_NATIVE_ELSE() { \
210 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
211 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
212 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
213 IEM_MC_ARG(uint8_t, u8Src, 2); \
214 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
215 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
216 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU8, fEFlagsIn, pu8Dst, u8Src); \
217 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
218 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
219 } IEM_MC_NATIVE_ENDIF(); \
220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
221 IEM_MC_END(); \
222 } \
223 else \
224 { \
225 /** @todo we should probably decode the address first. */ \
226 IEMOP_HLP_DONE_DECODING(); \
227 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
228 } \
229 } \
230 (void)0
231
232/**
233 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
234 * destination.
235 */
236#define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \
237 /* \
238 * If rm is denoting a register, no more instruction bytes. \
239 */ \
240 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
241 { \
242 IEM_MC_BEGIN(0, 0); \
243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
244 IEM_MC_ARG(uint8_t, u8Src, 2); \
245 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
246 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
247 IEM_MC_LOCAL(uint8_t, u8Dst); \
248 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
249 IEM_MC_LOCAL_EFLAGS(uEFlags); \
250 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
251 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
252 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
253 } IEM_MC_NATIVE_ELSE() { \
254 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
255 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
256 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
257 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
258 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
259 } IEM_MC_NATIVE_ENDIF(); \
260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
261 IEM_MC_END(); \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 */ \
268 IEM_MC_BEGIN(0, 0); \
269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
272 IEM_MC_ARG(uint8_t, u8Src, 2); \
273 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
274 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
275 IEM_MC_LOCAL(uint8_t, u8Dst); \
276 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
277 IEM_MC_LOCAL_EFLAGS(uEFlags); \
278 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
279 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
280 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
281 } IEM_MC_NATIVE_ELSE() { \
282 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
283 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
284 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
285 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
286 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
287 } IEM_MC_NATIVE_ENDIF(); \
288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
289 IEM_MC_END(); \
290 } \
291 (void)0
292
293/**
294 * Body for byte instruction CMP with a register as the destination.
295 */
296#define IEMOP_BODY_BINARY_r8_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
297 /* \
298 * If rm is denoting a register, no more instruction bytes. \
299 */ \
300 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
301 { \
302 IEM_MC_BEGIN(0, 0); \
303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
304 IEM_MC_ARG(uint8_t, u8Src, 2); \
305 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
306 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
307 IEM_MC_LOCAL(uint8_t, u8Dst); \
308 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
309 IEM_MC_LOCAL_EFLAGS(uEFlags); \
310 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
311 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
312 } IEM_MC_NATIVE_ELSE() { \
313 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
314 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
315 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
316 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
317 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
318 } IEM_MC_NATIVE_ENDIF(); \
319 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
320 IEM_MC_END(); \
321 } \
322 else \
323 { \
324 /* \
325 * We're accessing memory. \
326 */ \
327 IEM_MC_BEGIN(0, 0); \
328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
331 IEM_MC_ARG(uint8_t, u8Src, 2); \
332 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
333 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
334 IEM_MC_LOCAL(uint8_t, u8Dst); \
335 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
336 IEM_MC_LOCAL_EFLAGS(uEFlags); \
337 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
338 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
339 } IEM_MC_NATIVE_ELSE() { \
340 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
341 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
342 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
343 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
344 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
345 } IEM_MC_NATIVE_ENDIF(); \
346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
347 IEM_MC_END(); \
348 } \
349 (void)0
350
351
352/**
353 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
354 * memory/register as the destination.
355 */
356#define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
357 /* \
358 * If rm is denoting a register, no more instruction bytes. \
359 */ \
360 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
361 { \
362 switch (pVCpu->iem.s.enmEffOpSize) \
363 { \
364 case IEMMODE_16BIT: \
365 IEM_MC_BEGIN(0, 0); \
366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
367 IEM_MC_ARG(uint16_t, u16Src, 2); \
368 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
369 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
370 IEM_MC_LOCAL(uint16_t, u16Dst); \
371 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
372 IEM_MC_LOCAL_EFLAGS(uEFlags); \
373 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
374 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \
375 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
376 } IEM_MC_NATIVE_ELSE() { \
377 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
378 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
379 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
380 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
381 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
382 } IEM_MC_NATIVE_ENDIF(); \
383 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
384 IEM_MC_END(); \
385 break; \
386 \
387 case IEMMODE_32BIT: \
388 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
390 IEM_MC_ARG(uint32_t, u32Src, 2); \
391 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
392 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
393 IEM_MC_LOCAL(uint32_t, u32Dst); \
394 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
395 IEM_MC_LOCAL_EFLAGS(uEFlags); \
396 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
397 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \
398 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
399 } IEM_MC_NATIVE_ELSE() { \
400 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
401 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
402 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
403 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
404 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
405 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
406 } IEM_MC_NATIVE_ENDIF(); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 case IEMMODE_64BIT: \
412 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
414 IEM_MC_ARG(uint64_t, u64Src, 2); \
415 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
416 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
417 IEM_MC_LOCAL(uint64_t, u64Dst); \
418 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
419 IEM_MC_LOCAL_EFLAGS(uEFlags); \
420 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
421 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \
422 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
423 } IEM_MC_NATIVE_ELSE() { \
424 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
426 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
427 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
428 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
429 } IEM_MC_NATIVE_ENDIF(); \
430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
431 IEM_MC_END(); \
432 break; \
433 \
434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
435 } \
436 } \
437 else \
438 { \
439 /* \
440 * We're accessing memory. \
441 */ \
442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
443 { \
444 switch (pVCpu->iem.s.enmEffOpSize) \
445 { \
446 case IEMMODE_16BIT: \
447 IEM_MC_BEGIN(0, 0); \
448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 \
452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
453 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
454 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
455 IEM_MC_ARG(uint16_t, u16Src, 2); \
456 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
457 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
458 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
459 \
460 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
461 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
463 IEM_MC_END(); \
464 break; \
465 \
466 case IEMMODE_32BIT: \
467 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
470 IEMOP_HLP_DONE_DECODING(); \
471 \
472 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
473 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
474 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
475 IEM_MC_ARG(uint32_t, u32Src, 2); \
476 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
477 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
478 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
479 \
480 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
481 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
482 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
483 IEM_MC_END(); \
484 break; \
485 \
486 case IEMMODE_64BIT: \
487 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
490 IEMOP_HLP_DONE_DECODING(); \
491 \
492 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
493 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
494 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
495 IEM_MC_ARG(uint64_t, u64Src, 2); \
496 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
497 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
498 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
499 \
500 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
501 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
502 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
503 IEM_MC_END(); \
504 break; \
505 \
506 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
507 } \
508 } \
509 else \
510 { \
511 (void)0
512/* Separate macro to work around parsing issue in IEMAllInstPython.py */
513#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \
514 switch (pVCpu->iem.s.enmEffOpSize) \
515 { \
516 case IEMMODE_16BIT: \
517 IEM_MC_BEGIN(0, 0); \
518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
520 IEMOP_HLP_DONE_DECODING(); \
521 \
522 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
523 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
524 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
525 IEM_MC_ARG(uint16_t, u16Src, 2); \
526 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
527 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
528 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
529 \
530 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
531 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
533 IEM_MC_END(); \
534 break; \
535 \
536 case IEMMODE_32BIT: \
537 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
540 IEMOP_HLP_DONE_DECODING(); \
541 \
542 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
543 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
544 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
545 IEM_MC_ARG(uint32_t, u32Src, 2); \
546 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
547 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
548 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
549 \
550 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
551 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
552 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
553 IEM_MC_END(); \
554 break; \
555 \
556 case IEMMODE_64BIT: \
557 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
560 IEMOP_HLP_DONE_DECODING(); \
561 \
562 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
563 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
564 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_ARG(uint64_t, u64Src, 2); \
566 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
567 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
568 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
569 \
570 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
571 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
572 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
573 IEM_MC_END(); \
574 break; \
575 \
576 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
577 } \
578 } \
579 } \
580 (void)0
581
582/**
583 * Body for read-only word/dword/qword instructions like TEST and CMP with
584 * memory/register as the destination.
585 */
586#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_InsNm, a_fNativeArchs) \
587 /* \
588 * If rm is denoting a register, no more instruction bytes. \
589 */ \
590 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
591 { \
592 switch (pVCpu->iem.s.enmEffOpSize) \
593 { \
594 case IEMMODE_16BIT: \
595 IEM_MC_BEGIN(0, 0); \
596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
597 IEM_MC_ARG(uint16_t, u16Src, 2); \
598 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
599 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
600 IEM_MC_LOCAL(uint16_t, u16Dst); \
601 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
602 IEM_MC_LOCAL_EFLAGS(uEFlags); \
603 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
604 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
605 } IEM_MC_NATIVE_ELSE() { \
606 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
607 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
608 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
609 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
610 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
611 } IEM_MC_NATIVE_ENDIF(); \
612 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
613 IEM_MC_END(); \
614 break; \
615 \
616 case IEMMODE_32BIT: \
617 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
619 IEM_MC_ARG(uint32_t, u32Src, 2); \
620 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
621 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
622 IEM_MC_LOCAL(uint32_t, u32Dst); \
623 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
624 IEM_MC_LOCAL_EFLAGS(uEFlags); \
625 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
626 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
627 } IEM_MC_NATIVE_ELSE() { \
628 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
629 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
630 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
631 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
632 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
633 } IEM_MC_NATIVE_ENDIF(); \
634 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
635 IEM_MC_END(); \
636 break; \
637 \
638 case IEMMODE_64BIT: \
639 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
641 IEM_MC_ARG(uint64_t, u64Src, 2); \
642 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
643 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
644 IEM_MC_LOCAL(uint64_t, u64Dst); \
645 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
646 IEM_MC_LOCAL_EFLAGS(uEFlags); \
647 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
648 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
649 } IEM_MC_NATIVE_ELSE() { \
650 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
651 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
652 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
653 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
654 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
655 } IEM_MC_NATIVE_ENDIF(); \
656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
657 IEM_MC_END(); \
658 break; \
659 \
660 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
661 } \
662 } \
663 else \
664 { \
665 /* \
666 * We're accessing memory. \
667 */ \
668 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
669 { \
670 switch (pVCpu->iem.s.enmEffOpSize) \
671 { \
672 case IEMMODE_16BIT: \
673 IEM_MC_BEGIN(0, 0); \
674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
676 IEMOP_HLP_DONE_DECODING(); \
677 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
678 IEM_MC_LOCAL(uint16_t, u16Dst); \
679 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
680 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
681 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
682 IEM_MC_LOCAL_EFLAGS(uEFlags); \
683 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
684 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
685 } IEM_MC_NATIVE_ELSE() { \
686 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
687 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
688 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
689 IEM_MC_ARG(uint16_t, u16Src, 2); \
690 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
691 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
692 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
693 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
694 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
695 } IEM_MC_NATIVE_ENDIF(); \
696 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
697 IEM_MC_END(); \
698 break; \
699 \
700 case IEMMODE_32BIT: \
701 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
704 IEMOP_HLP_DONE_DECODING(); \
705 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
706 IEM_MC_LOCAL(uint32_t, u32Dst); \
707 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
708 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
709 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
710 IEM_MC_LOCAL_EFLAGS(uEFlags); \
711 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
712 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
713 } IEM_MC_NATIVE_ELSE() { \
714 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
715 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
716 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
717 IEM_MC_ARG(uint32_t, u32Src, 2); \
718 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
719 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
720 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
721 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
722 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
723 } IEM_MC_NATIVE_ENDIF(); \
724 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
725 IEM_MC_END(); \
726 break; \
727 \
728 case IEMMODE_64BIT: \
729 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
732 IEMOP_HLP_DONE_DECODING(); \
733 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
734 IEM_MC_LOCAL(uint64_t, u64Dst); \
735 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
736 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
737 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
738 IEM_MC_LOCAL_EFLAGS(uEFlags); \
739 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
740 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
741 } IEM_MC_NATIVE_ELSE() { \
742 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
743 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
744 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
745 IEM_MC_ARG(uint64_t, u64Src, 2); \
746 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
747 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
748 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
749 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
750 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
751 } IEM_MC_NATIVE_ENDIF(); \
752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
753 IEM_MC_END(); \
754 break; \
755 \
756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
757 } \
758 } \
759 else \
760 { \
761 IEMOP_HLP_DONE_DECODING(); \
762 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
763 } \
764 } \
765 (void)0
766
767
768/**
769 * Body for instructions like ADD, AND, OR, ++ with working on AL with
770 * a byte immediate.
771 */
772#define IEMOP_BODY_BINARY_AL_Ib(a_InsNm, a_fNativeArchs) \
773 IEM_MC_BEGIN(0, 0); \
774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
776 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
777 IEM_MC_LOCAL(uint8_t, u8Dst); \
778 IEM_MC_FETCH_GREG_U8(u8Dst, X86_GREG_xAX); \
779 IEM_MC_LOCAL(uint32_t, uEFlags); \
780 IEM_MC_FETCH_EFLAGS(uEFlags); \
781 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
782 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Dst); \
783 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
784 } IEM_MC_NATIVE_ELSE() { \
785 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
786 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
788 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 2); \
789 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
790 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
791 } IEM_MC_NATIVE_ENDIF(); \
792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
793 IEM_MC_END()
794
795/**
796 * Body for instructions like ADD, AND, OR, ++ with working on
797 * AX/EAX/RAX with a word/dword immediate.
798 */
799#define IEMOP_BODY_BINARY_rAX_Iz_RW(a_InsNm, a_fNativeArchs) \
800 switch (pVCpu->iem.s.enmEffOpSize) \
801 { \
802 case IEMMODE_16BIT: \
803 { \
804 IEM_MC_BEGIN(0, 0); \
805 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
807 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
808 IEM_MC_LOCAL(uint16_t, u16Dst); \
809 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
810 IEM_MC_LOCAL(uint32_t, uEFlags); \
811 IEM_MC_FETCH_EFLAGS(uEFlags); \
812 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
813 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Dst); \
814 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
815 } IEM_MC_NATIVE_ELSE() { \
816 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2); \
817 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
818 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
819 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
820 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
821 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
822 } IEM_MC_NATIVE_ENDIF(); \
823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
824 IEM_MC_END(); \
825 } \
826 \
827 case IEMMODE_32BIT: \
828 { \
829 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
832 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
833 IEM_MC_LOCAL(uint32_t, u32Dst); \
834 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
835 IEM_MC_LOCAL(uint32_t, uEFlags); \
836 IEM_MC_FETCH_EFLAGS(uEFlags); \
837 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
838 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Dst); \
839 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
840 } IEM_MC_NATIVE_ELSE() { \
841 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2); \
842 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
843 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
844 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
845 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
846 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
847 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
848 } IEM_MC_NATIVE_ENDIF(); \
849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
850 IEM_MC_END(); \
851 } \
852 \
853 case IEMMODE_64BIT: \
854 { \
855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
856 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
858 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
859 IEM_MC_LOCAL(uint64_t, u64Dst); \
860 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
861 IEM_MC_LOCAL(uint32_t, uEFlags); \
862 IEM_MC_FETCH_EFLAGS(uEFlags); \
863 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
864 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Dst); \
865 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
866 } IEM_MC_NATIVE_ELSE() { \
867 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2); \
868 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
869 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
870 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
871 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
872 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
873 } IEM_MC_NATIVE_ENDIF(); \
874 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
875 IEM_MC_END(); \
876 } \
877 \
878 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
879 } \
880 (void)0
881
882/**
883 * Body for the instructions CMP and TEST working on AX/EAX/RAX with a
884 * word/dword immediate.
885 */
886#define IEMOP_BODY_BINARY_rAX_Iz_RO(a_InsNm, a_fNativeArchs) \
887 switch (pVCpu->iem.s.enmEffOpSize) \
888 { \
889 case IEMMODE_16BIT: \
890 { \
891 IEM_MC_BEGIN(0, 0); \
892 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
894 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
895 IEM_MC_LOCAL(uint16_t, u16Dst); \
896 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
897 IEM_MC_LOCAL(uint32_t, uEFlags); \
898 IEM_MC_FETCH_EFLAGS(uEFlags); \
899 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
900 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
901 } IEM_MC_NATIVE_ELSE() { \
902 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2); \
903 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
904 IEM_MC_REF_GREG_U16_CONST(pu16Dst, X86_GREG_xAX); \
905 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
906 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
907 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
908 } IEM_MC_NATIVE_ENDIF(); \
909 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
910 IEM_MC_END(); \
911 } \
912 \
913 case IEMMODE_32BIT: \
914 { \
915 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
916 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
918 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
919 IEM_MC_LOCAL(uint32_t, u32Dst); \
920 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
921 IEM_MC_LOCAL(uint32_t, uEFlags); \
922 IEM_MC_FETCH_EFLAGS(uEFlags); \
923 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
924 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
925 } IEM_MC_NATIVE_ELSE() { \
926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2); \
927 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
928 IEM_MC_REF_GREG_U32_CONST(pu32Dst, X86_GREG_xAX); \
929 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
930 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
931 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
932 } IEM_MC_NATIVE_ENDIF(); \
933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
934 IEM_MC_END(); \
935 } \
936 \
937 case IEMMODE_64BIT: \
938 { \
939 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
940 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
942 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
943 IEM_MC_LOCAL(uint64_t, u64Dst); \
944 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
945 IEM_MC_LOCAL(uint32_t, uEFlags); \
946 IEM_MC_FETCH_EFLAGS(uEFlags); \
947 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
948 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
949 } IEM_MC_NATIVE_ELSE() { \
950 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2); \
951 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
952 IEM_MC_REF_GREG_U64_CONST(pu64Dst, X86_GREG_xAX); \
953 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
954 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
955 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
956 } IEM_MC_NATIVE_ENDIF(); \
957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
958 IEM_MC_END(); \
959 } \
960 \
961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
962 } \
963 (void)0
964
965
966
967/* Instruction specification format - work in progress: */
968
969/**
970 * @opcode 0x00
971 * @opmnemonic add
972 * @op1 rm:Eb
973 * @op2 reg:Gb
974 * @opmaps one
975 * @openc ModR/M
976 * @opflclass arithmetic
977 * @ophints harmless ignores_op_sizes
978 * @opstats add_Eb_Gb
979 * @opgroup og_gen_arith_bin
980 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
981 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
982 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
983 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
984 */
985FNIEMOP_DEF(iemOp_add_Eb_Gb)
986{
987 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
989 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
990}
991
992
993/**
994 * @opcode 0x01
995 * @opgroup og_gen_arith_bin
996 * @opflclass arithmetic
997 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
998 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
999 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1000 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1001 */
1002FNIEMOP_DEF(iemOp_add_Ev_Gv)
1003{
1004 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1006 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1007 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add);
1008}
1009
1010
1011/**
1012 * @opcode 0x02
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic
1015 * @opcopytests iemOp_add_Eb_Gb
1016 */
1017FNIEMOP_DEF(iemOp_add_Gb_Eb)
1018{
1019 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1021 IEMOP_BODY_BINARY_r8_rm(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1022}
1023
1024
1025/**
1026 * @opcode 0x03
1027 * @opgroup og_gen_arith_bin
1028 * @opflclass arithmetic
1029 * @opcopytests iemOp_add_Ev_Gv
1030 */
1031FNIEMOP_DEF(iemOp_add_Gv_Ev)
1032{
1033 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1035 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 0, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1036}
1037
1038
1039/**
1040 * @opcode 0x04
1041 * @opgroup og_gen_arith_bin
1042 * @opflclass arithmetic
1043 * @opcopytests iemOp_add_Eb_Gb
1044 */
1045FNIEMOP_DEF(iemOp_add_Al_Ib)
1046{
1047 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1048 IEMOP_BODY_BINARY_AL_Ib(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1049}
1050
1051
1052/**
1053 * @opcode 0x05
1054 * @opgroup og_gen_arith_bin
1055 * @opflclass arithmetic
1056 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
1057 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1058 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1059 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1060 */
1061FNIEMOP_DEF(iemOp_add_eAX_Iz)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1064 IEMOP_BODY_BINARY_rAX_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1065}
1066
1067
1068/**
1069 * @opcode 0x06
1070 * @opgroup og_stack_sreg
1071 */
1072FNIEMOP_DEF(iemOp_push_ES)
1073{
1074 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1075 IEMOP_HLP_NO_64BIT();
1076 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
1077}
1078
1079
1080/**
1081 * @opcode 0x07
1082 * @opgroup og_stack_sreg
1083 */
1084FNIEMOP_DEF(iemOp_pop_ES)
1085{
1086 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1089 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1090 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1091 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
1092 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
1093 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
1094 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
1095 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
1096}
1097
1098
1099/**
1100 * @opcode 0x08
1101 * @opgroup og_gen_arith_bin
1102 * @opflclass logical
1103 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1104 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1105 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1106 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1107 */
1108FNIEMOP_DEF(iemOp_or_Eb_Gb)
1109{
1110 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1114}
1115
1116
1117/*
1118 * @opcode 0x09
1119 * @opgroup og_gen_arith_bin
1120 * @opflclass logical
1121 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1122 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1123 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1124 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1125 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1126 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1127 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
1128 */
1129FNIEMOP_DEF(iemOp_or_Ev_Gv)
1130{
1131 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1134 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1135 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or);
1136}
1137
1138
1139/**
1140 * @opcode 0x0a
1141 * @opgroup og_gen_arith_bin
1142 * @opflclass logical
1143 * @opcopytests iemOp_or_Eb_Gb
1144 */
1145FNIEMOP_DEF(iemOp_or_Gb_Eb)
1146{
1147 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1148 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1150 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1151}
1152
1153
1154/**
1155 * @opcode 0x0b
1156 * @opgroup og_gen_arith_bin
1157 * @opflclass logical
1158 * @opcopytests iemOp_or_Ev_Gv
1159 */
1160FNIEMOP_DEF(iemOp_or_Gv_Ev)
1161{
1162 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1165 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1166}
1167
1168
1169/**
1170 * @opcode 0x0c
1171 * @opgroup og_gen_arith_bin
1172 * @opflclass logical
1173 * @opcopytests iemOp_or_Eb_Gb
1174 */
1175FNIEMOP_DEF(iemOp_or_Al_Ib)
1176{
1177 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1179 IEMOP_BODY_BINARY_AL_Ib(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1180}
1181
1182
1183/**
1184 * @opcode 0x0d
1185 * @opgroup og_gen_arith_bin
1186 * @opflclass logical
1187 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1188 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1189 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1190 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1191 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1192 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1193 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1194 */
1195FNIEMOP_DEF(iemOp_or_eAX_Iz)
1196{
1197 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1199 IEMOP_BODY_BINARY_rAX_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1200}
1201
1202
1203/**
1204 * @opcode 0x0e
1205 * @opgroup og_stack_sreg
1206 */
1207FNIEMOP_DEF(iemOp_push_CS)
1208{
1209 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1210 IEMOP_HLP_NO_64BIT();
1211 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1212}
1213
1214
1215/**
1216 * @opcode 0x0f
1217 * @opmnemonic EscTwo0f
1218 * @openc two0f
1219 * @opdisenum OP_2B_ESC
1220 * @ophints harmless
1221 * @opgroup og_escapes
1222 */
1223FNIEMOP_DEF(iemOp_2byteEscape)
1224{
1225#if 0 /// @todo def VBOX_STRICT
1226 /* Sanity check the table the first time around. */
1227 static bool s_fTested = false;
1228 if (RT_LIKELY(s_fTested)) { /* likely */ }
1229 else
1230 {
1231 s_fTested = true;
1232 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1233 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1234 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1235 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1236 }
1237#endif
1238
1239 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1240 {
1241 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1242 IEMOP_HLP_MIN_286();
1243 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1244 }
1245 /* @opdone */
1246
1247 /*
1248 * On the 8086 this is a POP CS instruction.
1249 * For the time being we don't specify this this.
1250 */
1251 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1252 IEMOP_HLP_NO_64BIT();
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 /** @todo eliminate END_TB here */
1255 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1256 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1257 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1258 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1259}
1260
1261/**
1262 * @opcode 0x10
1263 * @opgroup og_gen_arith_bin
1264 * @opflclass arithmetic_carry
1265 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1266 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1267 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1268 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1269 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1270 */
1271FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1272{
1273 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1275 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1276}
1277
1278
1279/**
1280 * @opcode 0x11
1281 * @opgroup og_gen_arith_bin
1282 * @opflclass arithmetic_carry
1283 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1284 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1285 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1286 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1287 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1288 */
1289FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1290{
1291 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1293 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1294 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc);
1295}
1296
1297
1298/**
1299 * @opcode 0x12
1300 * @opgroup og_gen_arith_bin
1301 * @opflclass arithmetic_carry
1302 * @opcopytests iemOp_adc_Eb_Gb
1303 */
1304FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1305{
1306 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1308 IEMOP_BODY_BINARY_r8_rm(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1309}
1310
1311
1312/**
1313 * @opcode 0x13
1314 * @opgroup og_gen_arith_bin
1315 * @opflclass arithmetic_carry
1316 * @opcopytests iemOp_adc_Ev_Gv
1317 */
1318FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1319{
1320 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1322 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 0, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1323}
1324
1325
1326/**
1327 * @opcode 0x14
1328 * @opgroup og_gen_arith_bin
1329 * @opflclass arithmetic_carry
1330 * @opcopytests iemOp_adc_Eb_Gb
1331 */
1332FNIEMOP_DEF(iemOp_adc_Al_Ib)
1333{
1334 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1335 IEMOP_BODY_BINARY_AL_Ib(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1336}
1337
1338
1339/**
1340 * @opcode 0x15
1341 * @opgroup og_gen_arith_bin
1342 * @opflclass arithmetic_carry
1343 * @opcopytests iemOp_adc_Ev_Gv
1344 */
1345FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1346{
1347 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1348 IEMOP_BODY_BINARY_rAX_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1349}
1350
1351
1352/**
1353 * @opcode 0x16
1354 */
1355FNIEMOP_DEF(iemOp_push_SS)
1356{
1357 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1358 IEMOP_HLP_NO_64BIT();
1359 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1360}
1361
1362
1363/**
1364 * @opcode 0x17
1365 */
1366FNIEMOP_DEF(iemOp_pop_SS)
1367{
1368 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1370 IEMOP_HLP_NO_64BIT();
1371 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1372 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1373 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1374 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1375 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1376 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1377 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1378}
1379
1380
1381/**
1382 * @opcode 0x18
1383 * @opgroup og_gen_arith_bin
1384 * @opflclass arithmetic_carry
1385 */
1386FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1387{
1388 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1390 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1391}
1392
1393
1394/**
1395 * @opcode 0x19
1396 * @opgroup og_gen_arith_bin
1397 * @opflclass arithmetic_carry
1398 */
1399FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1400{
1401 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1403 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1404 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb);
1405}
1406
1407
1408/**
1409 * @opcode 0x1a
1410 * @opgroup og_gen_arith_bin
1411 * @opflclass arithmetic_carry
1412 */
1413FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1414{
1415 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1417 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1418}
1419
1420
1421/**
1422 * @opcode 0x1b
1423 * @opgroup og_gen_arith_bin
1424 * @opflclass arithmetic_carry
1425 */
1426FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1427{
1428 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1430 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 0, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1431}
1432
1433
1434/**
1435 * @opcode 0x1c
1436 * @opgroup og_gen_arith_bin
1437 * @opflclass arithmetic_carry
1438 */
1439FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1440{
1441 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1442 IEMOP_BODY_BINARY_AL_Ib(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1443}
1444
1445
1446/**
1447 * @opcode 0x1d
1448 * @opgroup og_gen_arith_bin
1449 * @opflclass arithmetic_carry
1450 */
1451FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1452{
1453 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1454 IEMOP_BODY_BINARY_rAX_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1455}
1456
1457
1458/**
1459 * @opcode 0x1e
1460 * @opgroup og_stack_sreg
1461 */
1462FNIEMOP_DEF(iemOp_push_DS)
1463{
1464 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1465 IEMOP_HLP_NO_64BIT();
1466 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1467}
1468
1469
1470/**
1471 * @opcode 0x1f
1472 * @opgroup og_stack_sreg
1473 */
1474FNIEMOP_DEF(iemOp_pop_DS)
1475{
1476 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEMOP_HLP_NO_64BIT();
1479 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1481 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1482 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1483 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1484 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1485 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1486}
1487
1488
1489/**
1490 * @opcode 0x20
1491 * @opgroup og_gen_arith_bin
1492 * @opflclass logical
1493 */
1494FNIEMOP_DEF(iemOp_and_Eb_Gb)
1495{
1496 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1499 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1500}
1501
1502
1503/**
1504 * @opcode 0x21
1505 * @opgroup og_gen_arith_bin
1506 * @opflclass logical
1507 */
1508FNIEMOP_DEF(iemOp_and_Ev_Gv)
1509{
1510 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1514 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and);
1515}
1516
1517
1518/**
1519 * @opcode 0x22
1520 * @opgroup og_gen_arith_bin
1521 * @opflclass logical
1522 */
1523FNIEMOP_DEF(iemOp_and_Gb_Eb)
1524{
1525 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1528 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1529}
1530
1531
1532/**
1533 * @opcode 0x23
1534 * @opgroup og_gen_arith_bin
1535 * @opflclass logical
1536 */
1537FNIEMOP_DEF(iemOp_and_Gv_Ev)
1538{
1539 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1542 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1543}
1544
1545
1546/**
1547 * @opcode 0x24
1548 * @opgroup og_gen_arith_bin
1549 * @opflclass logical
1550 */
1551FNIEMOP_DEF(iemOp_and_Al_Ib)
1552{
1553 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1555 IEMOP_BODY_BINARY_AL_Ib(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1556}
1557
1558
1559/**
1560 * @opcode 0x25
1561 * @opgroup og_gen_arith_bin
1562 * @opflclass logical
1563 */
1564FNIEMOP_DEF(iemOp_and_eAX_Iz)
1565{
1566 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1567 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1568 IEMOP_BODY_BINARY_rAX_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1569}
1570
1571
1572/**
1573 * @opcode 0x26
1574 * @opmnemonic SEG
1575 * @op1 ES
1576 * @opgroup og_prefix
1577 * @openc prefix
1578 * @opdisenum OP_SEG
1579 * @ophints harmless
1580 */
1581FNIEMOP_DEF(iemOp_seg_ES)
1582{
1583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1584 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1585 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1586
1587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1588 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1589}
1590
1591
1592/**
1593 * @opcode 0x27
1594 * @opfltest af,cf
1595 * @opflmodify cf,pf,af,zf,sf,of
1596 * @opflundef of
1597 */
1598FNIEMOP_DEF(iemOp_daa)
1599{
1600 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1601 IEMOP_HLP_NO_64BIT();
1602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1603 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1604 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1605}
1606
1607
1608/**
1609 * Special case body for word/dword/qword instruction like SUB and XOR that can
1610 * be used to zero a register.
1611 *
1612 * This can be used both for the rv_rm and rm_rv forms since it's working on the
1613 * same register.
1614 */
1615#define IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(a_bRm) \
1616 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((a_bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
1617 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
1618 { \
1619 switch (pVCpu->iem.s.enmEffOpSize) \
1620 { \
1621 case IEMMODE_16BIT: \
1622 IEM_MC_BEGIN(0, 0); \
1623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1624 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1625 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1626 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1627 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1628 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1629 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1630 IEM_MC_END(); \
1631 break; \
1632 \
1633 case IEMMODE_32BIT: \
1634 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1636 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1637 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1638 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1639 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1640 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1641 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1642 IEM_MC_END(); \
1643 break; \
1644 \
1645 case IEMMODE_64BIT: \
1646 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1648 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \
1649 IEM_MC_LOCAL_EFLAGS(fEFlags); \
1650 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
1651 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
1652 IEM_MC_COMMIT_EFLAGS(fEFlags); \
1653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1654 IEM_MC_END(); \
1655 break; \
1656 \
1657 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1658 } \
1659 } ((void)0)
1660
1661
1662/**
1663 * @opcode 0x28
1664 * @opgroup og_gen_arith_bin
1665 * @opflclass arithmetic
1666 */
1667FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1668{
1669 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1671 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1672 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1673}
1674
1675
1676/**
1677 * @opcode 0x29
1678 * @opgroup og_gen_arith_bin
1679 * @opflclass arithmetic
1680 */
1681FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1682{
1683 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1685 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1686 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1687 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub);
1688}
1689
1690
1691/**
1692 * @opcode 0x2a
1693 * @opgroup og_gen_arith_bin
1694 * @opflclass arithmetic
1695 */
1696FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1697{
1698 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1700 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1701 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1702}
1703
1704
1705/**
1706 * @opcode 0x2b
1707 * @opgroup og_gen_arith_bin
1708 * @opflclass arithmetic
1709 */
1710FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1711{
1712 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1714 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1715 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1716}
1717
1718
1719/**
1720 * @opcode 0x2c
1721 * @opgroup og_gen_arith_bin
1722 * @opflclass arithmetic
1723 */
1724FNIEMOP_DEF(iemOp_sub_Al_Ib)
1725{
1726 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1727 IEMOP_BODY_BINARY_AL_Ib(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1728}
1729
1730
1731/**
1732 * @opcode 0x2d
1733 * @opgroup og_gen_arith_bin
1734 * @opflclass arithmetic
1735 */
1736FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1737{
1738 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1739 IEMOP_BODY_BINARY_rAX_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1740}
1741
1742
1743/**
1744 * @opcode 0x2e
1745 * @opmnemonic SEG
1746 * @op1 CS
1747 * @opgroup og_prefix
1748 * @openc prefix
1749 * @opdisenum OP_SEG
1750 * @ophints harmless
1751 */
1752FNIEMOP_DEF(iemOp_seg_CS)
1753{
1754 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1755 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1756 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1757
1758 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1759 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1760}
1761
1762
1763/**
1764 * @opcode 0x2f
1765 * @opfltest af,cf
1766 * @opflmodify cf,pf,af,zf,sf,of
1767 * @opflundef of
1768 */
1769FNIEMOP_DEF(iemOp_das)
1770{
1771 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1772 IEMOP_HLP_NO_64BIT();
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1775 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1776}
1777
1778
1779/**
1780 * @opcode 0x30
1781 * @opgroup og_gen_arith_bin
1782 * @opflclass logical
1783 */
1784FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1785{
1786 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1790 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1791}
1792
1793
1794/**
1795 * @opcode 0x31
1796 * @opgroup og_gen_arith_bin
1797 * @opflclass logical
1798 */
1799FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1800{
1801 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1804 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1805 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1806 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor);
1807}
1808
1809
1810/**
1811 * @opcode 0x32
1812 * @opgroup og_gen_arith_bin
1813 * @opflclass logical
1814 */
1815FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1816{
1817 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1818 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1820 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1821 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1822}
1823
1824
1825/**
1826 * @opcode 0x33
1827 * @opgroup og_gen_arith_bin
1828 * @opflclass logical
1829 */
1830FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1831{
1832 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1835 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1836 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1837}
1838
1839
1840/**
1841 * @opcode 0x34
1842 * @opgroup og_gen_arith_bin
1843 * @opflclass logical
1844 */
1845FNIEMOP_DEF(iemOp_xor_Al_Ib)
1846{
1847 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1849 IEMOP_BODY_BINARY_AL_Ib(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1850}
1851
1852
1853/**
1854 * @opcode 0x35
1855 * @opgroup og_gen_arith_bin
1856 * @opflclass logical
1857 */
1858FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1859{
1860 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1862 IEMOP_BODY_BINARY_rAX_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1863}
1864
1865
1866/**
1867 * @opcode 0x36
1868 * @opmnemonic SEG
1869 * @op1 SS
1870 * @opgroup og_prefix
1871 * @openc prefix
1872 * @opdisenum OP_SEG
1873 * @ophints harmless
1874 */
1875FNIEMOP_DEF(iemOp_seg_SS)
1876{
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1879 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883}
1884
1885
1886/**
1887 * @opcode 0x37
1888 * @opfltest af
1889 * @opflmodify cf,pf,af,zf,sf,of
1890 * @opflundef pf,zf,sf,of
1891 * @opgroup og_gen_arith_dec
1892 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1893 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1894 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1895 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1896 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1897 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1898 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1899 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1900 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1901 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1902 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1903 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1904 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1905 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1906 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1907 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1908 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1909 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1910 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1911 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1912 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1913 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1914 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1915 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1916 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1917 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1918 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1919 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1920 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1921 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1922 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1923 */
1924FNIEMOP_DEF(iemOp_aaa)
1925{
1926 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1927 IEMOP_HLP_NO_64BIT();
1928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1930
1931 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1932}
1933
1934
1935/**
1936 * Body for word/dword/qword the instruction CMP, ++ with a register as the
1937 * destination.
1938 *
1939 * @note Used both in OneByte and TwoByte0f.
1940 */
1941#define IEMOP_BODY_BINARY_rv_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
1942 /* \
1943 * If rm is denoting a register, no more instruction bytes. \
1944 */ \
1945 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
1946 { \
1947 switch (pVCpu->iem.s.enmEffOpSize) \
1948 { \
1949 case IEMMODE_16BIT: \
1950 IEM_MC_BEGIN(0, 0); \
1951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1952 IEM_MC_ARG(uint16_t, u16Src, 2); \
1953 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1954 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1955 IEM_MC_LOCAL(uint16_t, u16Dst); \
1956 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1957 IEM_MC_LOCAL_EFLAGS(uEFlags); \
1958 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
1959 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
1960 } IEM_MC_NATIVE_ELSE() { \
1961 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
1962 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1963 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1964 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
1965 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1966 } IEM_MC_NATIVE_ENDIF(); \
1967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1968 IEM_MC_END(); \
1969 break; \
1970 \
1971 case IEMMODE_32BIT: \
1972 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1974 IEM_MC_ARG(uint32_t, u32Src, 2); \
1975 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1976 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1977 IEM_MC_LOCAL(uint32_t, u32Dst); \
1978 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1979 IEM_MC_LOCAL_EFLAGS(uEFlags); \
1980 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
1981 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
1982 } IEM_MC_NATIVE_ELSE() { \
1983 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
1984 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1985 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1986 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
1987 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1988 } IEM_MC_NATIVE_ENDIF(); \
1989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1990 IEM_MC_END(); \
1991 break; \
1992 \
1993 case IEMMODE_64BIT: \
1994 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1996 IEM_MC_ARG(uint64_t, u64Src, 2); \
1997 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
1998 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
1999 IEM_MC_LOCAL(uint64_t, u64Dst); \
2000 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2001 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2002 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
2003 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2004 } IEM_MC_NATIVE_ELSE() { \
2005 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
2006 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2007 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2008 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
2009 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2010 } IEM_MC_NATIVE_ENDIF(); \
2011 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2012 IEM_MC_END(); \
2013 break; \
2014 \
2015 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2016 } \
2017 } \
2018 else \
2019 { \
2020 /* \
2021 * We're accessing memory. \
2022 */ \
2023 switch (pVCpu->iem.s.enmEffOpSize) \
2024 { \
2025 case IEMMODE_16BIT: \
2026 IEM_MC_BEGIN(0, 0); \
2027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2030 IEM_MC_ARG(uint16_t, u16Src, 2); \
2031 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2032 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2033 IEM_MC_LOCAL(uint16_t, u16Dst); \
2034 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2035 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2036 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
2037 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2038 } IEM_MC_NATIVE_ELSE() { \
2039 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
2040 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2041 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2042 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
2043 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2044 } IEM_MC_NATIVE_ENDIF(); \
2045 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2046 IEM_MC_END(); \
2047 break; \
2048 \
2049 case IEMMODE_32BIT: \
2050 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
2051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2054 IEM_MC_ARG(uint32_t, u32Src, 2); \
2055 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2056 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2057 IEM_MC_LOCAL(uint32_t, u32Dst); \
2058 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2059 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2060 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
2061 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2062 } IEM_MC_NATIVE_ELSE() { \
2063 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
2064 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2065 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2066 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
2067 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2068 } IEM_MC_NATIVE_ENDIF(); \
2069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2070 IEM_MC_END(); \
2071 break; \
2072 \
2073 case IEMMODE_64BIT: \
2074 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
2077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2078 IEM_MC_ARG(uint64_t, u64Src, 2); \
2079 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
2080 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
2081 IEM_MC_LOCAL(uint64_t, u64Dst); \
2082 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2083 IEM_MC_LOCAL_EFLAGS(uEFlags); \
2084 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
2085 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
2086 } IEM_MC_NATIVE_ELSE() { \
2087 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
2088 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
2089 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2090 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
2091 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2092 } IEM_MC_NATIVE_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2094 IEM_MC_END(); \
2095 break; \
2096 \
2097 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2098 } \
2099 } \
2100 (void)0
2101
2102
2103/**
2104 * @opcode 0x38
2105 * @opflclass arithmetic
2106 */
2107FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
2108{
2109 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
2110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2111 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2112}
2113
2114
2115/**
2116 * @opcode 0x39
2117 * @opflclass arithmetic
2118 */
2119FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
2120{
2121 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
2122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2123 IEMOP_BODY_BINARY_rm_rv_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2124}
2125
2126
2127/**
2128 * @opcode 0x3a
2129 * @opflclass arithmetic
2130 */
2131FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
2132{
2133 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
2134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2135 IEMOP_BODY_BINARY_r8_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2136}
2137
2138
2139/**
2140 * @opcode 0x3b
2141 * @opflclass arithmetic
2142 */
2143FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
2144{
2145 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 IEMOP_BODY_BINARY_rv_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2148}
2149
2150
2151/**
2152 * @opcode 0x3c
2153 * @opflclass arithmetic
2154 */
2155FNIEMOP_DEF(iemOp_cmp_Al_Ib)
2156{
2157 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
2158 IEMOP_BODY_BINARY_AL_Ib(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2159}
2160
2161
2162/**
2163 * @opcode 0x3d
2164 * @opflclass arithmetic
2165 */
2166FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
2167{
2168 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
2169 IEMOP_BODY_BINARY_rAX_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2170}
2171
2172
2173/**
2174 * @opcode 0x3e
2175 */
2176FNIEMOP_DEF(iemOp_seg_DS)
2177{
2178 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
2179 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
2180 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
2181
2182 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2183 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2184}
2185
2186
2187/**
2188 * @opcode 0x3f
2189 * @opfltest af
2190 * @opflmodify cf,pf,af,zf,sf,of
2191 * @opflundef pf,zf,sf,of
2192 * @opgroup og_gen_arith_dec
2193 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
2194 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
2195 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
2196 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
2197 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
2198 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
2199 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
2200 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
2201 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
2202 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2203 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2204 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
2205 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
2206 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
2207 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
2208 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2209 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2210 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2211 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2212 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
2213 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
2214 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
2215 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
2216 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
2217 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
2218 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
2219 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
2220 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
2221 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
2222 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
2223 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
2224 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2225 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2226 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2227 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2228 */
2229FNIEMOP_DEF(iemOp_aas)
2230{
2231 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
2232 IEMOP_HLP_NO_64BIT();
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
2235
2236 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
2237}
2238
2239
2240/**
2241 * Common 'inc/dec register' helper.
2242 *
2243 * Not for 64-bit code, only for what became the rex prefixes.
2244 */
2245#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
2246 switch (pVCpu->iem.s.enmEffOpSize) \
2247 { \
2248 case IEMMODE_16BIT: \
2249 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0); \
2250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2251 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
2252 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2253 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
2254 IEM_MC_REF_EFLAGS(pEFlags); \
2255 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
2256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2257 IEM_MC_END(); \
2258 break; \
2259 \
2260 case IEMMODE_32BIT: \
2261 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2263 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2264 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2265 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
2266 IEM_MC_REF_EFLAGS(pEFlags); \
2267 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
2268 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
2269 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2270 IEM_MC_END(); \
2271 break; \
2272 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2273 } \
2274 (void)0
2275
2276/**
2277 * @opcode 0x40
2278 * @opflclass incdec
2279 */
2280FNIEMOP_DEF(iemOp_inc_eAX)
2281{
2282 /*
2283 * This is a REX prefix in 64-bit mode.
2284 */
2285 if (IEM_IS_64BIT_CODE(pVCpu))
2286 {
2287 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
2288 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
2289
2290 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2291 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2292 }
2293
2294 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
2295 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
2296}
2297
2298
2299/**
2300 * @opcode 0x41
2301 * @opflclass incdec
2302 */
2303FNIEMOP_DEF(iemOp_inc_eCX)
2304{
2305 /*
2306 * This is a REX prefix in 64-bit mode.
2307 */
2308 if (IEM_IS_64BIT_CODE(pVCpu))
2309 {
2310 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
2311 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2312 pVCpu->iem.s.uRexB = 1 << 3;
2313
2314 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2315 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2316 }
2317
2318 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
2319 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
2320}
2321
2322
2323/**
2324 * @opcode 0x42
2325 * @opflclass incdec
2326 */
2327FNIEMOP_DEF(iemOp_inc_eDX)
2328{
2329 /*
2330 * This is a REX prefix in 64-bit mode.
2331 */
2332 if (IEM_IS_64BIT_CODE(pVCpu))
2333 {
2334 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2335 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2336 pVCpu->iem.s.uRexIndex = 1 << 3;
2337
2338 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2339 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2340 }
2341
2342 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2343 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2344}
2345
2346
2347
2348/**
2349 * @opcode 0x43
2350 * @opflclass incdec
2351 */
2352FNIEMOP_DEF(iemOp_inc_eBX)
2353{
2354 /*
2355 * This is a REX prefix in 64-bit mode.
2356 */
2357 if (IEM_IS_64BIT_CODE(pVCpu))
2358 {
2359 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2360 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2361 pVCpu->iem.s.uRexB = 1 << 3;
2362 pVCpu->iem.s.uRexIndex = 1 << 3;
2363
2364 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2365 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2366 }
2367
2368 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2369 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2370}
2371
2372
2373/**
2374 * @opcode 0x44
2375 * @opflclass incdec
2376 */
2377FNIEMOP_DEF(iemOp_inc_eSP)
2378{
2379 /*
2380 * This is a REX prefix in 64-bit mode.
2381 */
2382 if (IEM_IS_64BIT_CODE(pVCpu))
2383 {
2384 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2385 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2386 pVCpu->iem.s.uRexReg = 1 << 3;
2387
2388 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2389 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2390 }
2391
2392 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2393 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2394}
2395
2396
2397/**
2398 * @opcode 0x45
2399 * @opflclass incdec
2400 */
2401FNIEMOP_DEF(iemOp_inc_eBP)
2402{
2403 /*
2404 * This is a REX prefix in 64-bit mode.
2405 */
2406 if (IEM_IS_64BIT_CODE(pVCpu))
2407 {
2408 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2409 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2410 pVCpu->iem.s.uRexReg = 1 << 3;
2411 pVCpu->iem.s.uRexB = 1 << 3;
2412
2413 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2414 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2415 }
2416
2417 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2418 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2419}
2420
2421
2422/**
2423 * @opcode 0x46
2424 * @opflclass incdec
2425 */
2426FNIEMOP_DEF(iemOp_inc_eSI)
2427{
2428 /*
2429 * This is a REX prefix in 64-bit mode.
2430 */
2431 if (IEM_IS_64BIT_CODE(pVCpu))
2432 {
2433 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2434 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2435 pVCpu->iem.s.uRexReg = 1 << 3;
2436 pVCpu->iem.s.uRexIndex = 1 << 3;
2437
2438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2439 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2440 }
2441
2442 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2443 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2444}
2445
2446
2447/**
2448 * @opcode 0x47
2449 * @opflclass incdec
2450 */
2451FNIEMOP_DEF(iemOp_inc_eDI)
2452{
2453 /*
2454 * This is a REX prefix in 64-bit mode.
2455 */
2456 if (IEM_IS_64BIT_CODE(pVCpu))
2457 {
2458 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2459 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2460 pVCpu->iem.s.uRexReg = 1 << 3;
2461 pVCpu->iem.s.uRexB = 1 << 3;
2462 pVCpu->iem.s.uRexIndex = 1 << 3;
2463
2464 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2465 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2466 }
2467
2468 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2469 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2470}
2471
2472
2473/**
2474 * @opcode 0x48
2475 * @opflclass incdec
2476 */
2477FNIEMOP_DEF(iemOp_dec_eAX)
2478{
2479 /*
2480 * This is a REX prefix in 64-bit mode.
2481 */
2482 if (IEM_IS_64BIT_CODE(pVCpu))
2483 {
2484 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2485 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2486 iemRecalEffOpSize(pVCpu);
2487
2488 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2489 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2490 }
2491
2492 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2493 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2494}
2495
2496
2497/**
2498 * @opcode 0x49
2499 * @opflclass incdec
2500 */
2501FNIEMOP_DEF(iemOp_dec_eCX)
2502{
2503 /*
2504 * This is a REX prefix in 64-bit mode.
2505 */
2506 if (IEM_IS_64BIT_CODE(pVCpu))
2507 {
2508 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2509 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2510 pVCpu->iem.s.uRexB = 1 << 3;
2511 iemRecalEffOpSize(pVCpu);
2512
2513 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2514 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2515 }
2516
2517 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2518 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2519}
2520
2521
2522/**
2523 * @opcode 0x4a
2524 * @opflclass incdec
2525 */
2526FNIEMOP_DEF(iemOp_dec_eDX)
2527{
2528 /*
2529 * This is a REX prefix in 64-bit mode.
2530 */
2531 if (IEM_IS_64BIT_CODE(pVCpu))
2532 {
2533 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2534 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2535 pVCpu->iem.s.uRexIndex = 1 << 3;
2536 iemRecalEffOpSize(pVCpu);
2537
2538 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2539 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2540 }
2541
2542 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2543 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2544}
2545
2546
2547/**
2548 * @opcode 0x4b
2549 * @opflclass incdec
2550 */
2551FNIEMOP_DEF(iemOp_dec_eBX)
2552{
2553 /*
2554 * This is a REX prefix in 64-bit mode.
2555 */
2556 if (IEM_IS_64BIT_CODE(pVCpu))
2557 {
2558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2559 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2560 pVCpu->iem.s.uRexB = 1 << 3;
2561 pVCpu->iem.s.uRexIndex = 1 << 3;
2562 iemRecalEffOpSize(pVCpu);
2563
2564 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2565 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2566 }
2567
2568 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2569 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2570}
2571
2572
2573/**
2574 * @opcode 0x4c
2575 * @opflclass incdec
2576 */
2577FNIEMOP_DEF(iemOp_dec_eSP)
2578{
2579 /*
2580 * This is a REX prefix in 64-bit mode.
2581 */
2582 if (IEM_IS_64BIT_CODE(pVCpu))
2583 {
2584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2585 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2586 pVCpu->iem.s.uRexReg = 1 << 3;
2587 iemRecalEffOpSize(pVCpu);
2588
2589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2590 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2591 }
2592
2593 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2594 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2595}
2596
2597
2598/**
2599 * @opcode 0x4d
2600 * @opflclass incdec
2601 */
2602FNIEMOP_DEF(iemOp_dec_eBP)
2603{
2604 /*
2605 * This is a REX prefix in 64-bit mode.
2606 */
2607 if (IEM_IS_64BIT_CODE(pVCpu))
2608 {
2609 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2610 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2611 pVCpu->iem.s.uRexReg = 1 << 3;
2612 pVCpu->iem.s.uRexB = 1 << 3;
2613 iemRecalEffOpSize(pVCpu);
2614
2615 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2616 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2617 }
2618
2619 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2620 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2621}
2622
2623
2624/**
2625 * @opcode 0x4e
2626 * @opflclass incdec
2627 */
2628FNIEMOP_DEF(iemOp_dec_eSI)
2629{
2630 /*
2631 * This is a REX prefix in 64-bit mode.
2632 */
2633 if (IEM_IS_64BIT_CODE(pVCpu))
2634 {
2635 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2637 pVCpu->iem.s.uRexReg = 1 << 3;
2638 pVCpu->iem.s.uRexIndex = 1 << 3;
2639 iemRecalEffOpSize(pVCpu);
2640
2641 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2642 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2643 }
2644
2645 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2646 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2647}
2648
2649
2650/**
2651 * @opcode 0x4f
2652 * @opflclass incdec
2653 */
2654FNIEMOP_DEF(iemOp_dec_eDI)
2655{
2656 /*
2657 * This is a REX prefix in 64-bit mode.
2658 */
2659 if (IEM_IS_64BIT_CODE(pVCpu))
2660 {
2661 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2662 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2663 pVCpu->iem.s.uRexReg = 1 << 3;
2664 pVCpu->iem.s.uRexB = 1 << 3;
2665 pVCpu->iem.s.uRexIndex = 1 << 3;
2666 iemRecalEffOpSize(pVCpu);
2667
2668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2670 }
2671
2672 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2673 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2674}
2675
2676
2677/**
2678 * Common 'push register' helper.
2679 */
2680FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2681{
2682 if (IEM_IS_64BIT_CODE(pVCpu))
2683 {
2684 iReg |= pVCpu->iem.s.uRexB;
2685 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2686 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2687 }
2688
2689 switch (pVCpu->iem.s.enmEffOpSize)
2690 {
2691 case IEMMODE_16BIT:
2692 IEM_MC_BEGIN(0, 0);
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2694 IEM_MC_LOCAL(uint16_t, u16Value);
2695 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2696 IEM_MC_PUSH_U16(u16Value);
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 break;
2700
2701 case IEMMODE_32BIT:
2702 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2704 IEM_MC_LOCAL(uint32_t, u32Value);
2705 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2706 IEM_MC_PUSH_U32(u32Value);
2707 IEM_MC_ADVANCE_RIP_AND_FINISH();
2708 IEM_MC_END();
2709 break;
2710
2711 case IEMMODE_64BIT:
2712 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2714 IEM_MC_LOCAL(uint64_t, u64Value);
2715 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2716 IEM_MC_PUSH_U64(u64Value);
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 break;
2720
2721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2722 }
2723}
2724
2725
2726/**
2727 * @opcode 0x50
2728 */
2729FNIEMOP_DEF(iemOp_push_eAX)
2730{
2731 IEMOP_MNEMONIC(push_rAX, "push rAX");
2732 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2733}
2734
2735
2736/**
2737 * @opcode 0x51
2738 */
2739FNIEMOP_DEF(iemOp_push_eCX)
2740{
2741 IEMOP_MNEMONIC(push_rCX, "push rCX");
2742 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2743}
2744
2745
2746/**
2747 * @opcode 0x52
2748 */
2749FNIEMOP_DEF(iemOp_push_eDX)
2750{
2751 IEMOP_MNEMONIC(push_rDX, "push rDX");
2752 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2753}
2754
2755
2756/**
2757 * @opcode 0x53
2758 */
2759FNIEMOP_DEF(iemOp_push_eBX)
2760{
2761 IEMOP_MNEMONIC(push_rBX, "push rBX");
2762 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2763}
2764
2765
2766/**
2767 * @opcode 0x54
2768 */
2769FNIEMOP_DEF(iemOp_push_eSP)
2770{
2771 IEMOP_MNEMONIC(push_rSP, "push rSP");
2772 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2773 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2774
2775 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2776 IEM_MC_BEGIN(IEM_MC_F_ONLY_8086, 0);
2777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2778 IEM_MC_LOCAL(uint16_t, u16Value);
2779 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2780 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2781 IEM_MC_PUSH_U16(u16Value);
2782 IEM_MC_ADVANCE_RIP_AND_FINISH();
2783 IEM_MC_END();
2784}
2785
2786
2787/**
2788 * @opcode 0x55
2789 */
2790FNIEMOP_DEF(iemOp_push_eBP)
2791{
2792 IEMOP_MNEMONIC(push_rBP, "push rBP");
2793 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2794}
2795
2796
2797/**
2798 * @opcode 0x56
2799 */
2800FNIEMOP_DEF(iemOp_push_eSI)
2801{
2802 IEMOP_MNEMONIC(push_rSI, "push rSI");
2803 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2804}
2805
2806
2807/**
2808 * @opcode 0x57
2809 */
2810FNIEMOP_DEF(iemOp_push_eDI)
2811{
2812 IEMOP_MNEMONIC(push_rDI, "push rDI");
2813 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2814}
2815
2816
2817/**
2818 * Common 'pop register' helper.
2819 */
2820FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2821{
2822 if (IEM_IS_64BIT_CODE(pVCpu))
2823 {
2824 iReg |= pVCpu->iem.s.uRexB;
2825 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2826 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2827 }
2828
2829 switch (pVCpu->iem.s.enmEffOpSize)
2830 {
2831 case IEMMODE_16BIT:
2832 IEM_MC_BEGIN(0, 0);
2833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2834 IEM_MC_POP_GREG_U16(iReg);
2835 IEM_MC_ADVANCE_RIP_AND_FINISH();
2836 IEM_MC_END();
2837 break;
2838
2839 case IEMMODE_32BIT:
2840 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_POP_GREG_U32(iReg);
2843 IEM_MC_ADVANCE_RIP_AND_FINISH();
2844 IEM_MC_END();
2845 break;
2846
2847 case IEMMODE_64BIT:
2848 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2850 IEM_MC_POP_GREG_U64(iReg);
2851 IEM_MC_ADVANCE_RIP_AND_FINISH();
2852 IEM_MC_END();
2853 break;
2854
2855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2856 }
2857}
2858
2859
2860/**
2861 * @opcode 0x58
2862 */
2863FNIEMOP_DEF(iemOp_pop_eAX)
2864{
2865 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2866 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2867}
2868
2869
2870/**
2871 * @opcode 0x59
2872 */
2873FNIEMOP_DEF(iemOp_pop_eCX)
2874{
2875 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2876 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2877}
2878
2879
2880/**
2881 * @opcode 0x5a
2882 */
2883FNIEMOP_DEF(iemOp_pop_eDX)
2884{
2885 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2886 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2887}
2888
2889
2890/**
2891 * @opcode 0x5b
2892 */
2893FNIEMOP_DEF(iemOp_pop_eBX)
2894{
2895 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2896 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2897}
2898
2899
2900/**
2901 * @opcode 0x5c
2902 */
2903FNIEMOP_DEF(iemOp_pop_eSP)
2904{
2905 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2906 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2907}
2908
2909
2910/**
2911 * @opcode 0x5d
2912 */
2913FNIEMOP_DEF(iemOp_pop_eBP)
2914{
2915 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2916 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2917}
2918
2919
2920/**
2921 * @opcode 0x5e
2922 */
2923FNIEMOP_DEF(iemOp_pop_eSI)
2924{
2925 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2926 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2927}
2928
2929
2930/**
2931 * @opcode 0x5f
2932 */
2933FNIEMOP_DEF(iemOp_pop_eDI)
2934{
2935 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2936 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2937}
2938
2939
2940/**
2941 * @opcode 0x60
2942 */
2943FNIEMOP_DEF(iemOp_pusha)
2944{
2945 IEMOP_MNEMONIC(pusha, "pusha");
2946 IEMOP_HLP_MIN_186();
2947 IEMOP_HLP_NO_64BIT();
2948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2949 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2950 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2951 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2952}
2953
2954
2955/**
2956 * @opcode 0x61
2957 */
2958FNIEMOP_DEF(iemOp_popa__mvex)
2959{
2960 if (!IEM_IS_64BIT_CODE(pVCpu))
2961 {
2962 IEMOP_MNEMONIC(popa, "popa");
2963 IEMOP_HLP_MIN_186();
2964 IEMOP_HLP_NO_64BIT();
2965 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2966 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2967 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2968 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2975 iemCImpl_popa_16);
2976 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2977 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2978 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2982 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2986 iemCImpl_popa_32);
2987 }
2988 IEMOP_MNEMONIC(mvex, "mvex");
2989 Log(("mvex prefix is not supported!\n"));
2990 IEMOP_RAISE_INVALID_OPCODE_RET();
2991}
2992
2993
2994/**
2995 * @opcode 0x62
2996 * @opmnemonic bound
2997 * @op1 Gv_RO
2998 * @op2 Ma
2999 * @opmincpu 80186
3000 * @ophints harmless x86_invalid_64
3001 * @optest op1=0 op2=0 ->
3002 * @optest op1=1 op2=0 -> value.xcpt=5
3003 * @optest o16 / op1=0xffff op2=0x0000fffe ->
3004 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
3005 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
3006 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
3007 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
3008 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
3009 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
3010 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
3011 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
3012 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
3013 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
3014 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
3015 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
3016 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
3017 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
3018 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
3019 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
3020 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
3021 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
3022 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
3023 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
3024 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
3025 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
3026 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
3027 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
3028 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
3029 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
3030 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
3031 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
3032 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
3033 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
3034 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
3035 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
3036 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
3037 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
3038 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
3039 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
3040 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
3041 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
3042 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
3043 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
3044 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
3045 */
3046FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
3047{
3048 /* The BOUND instruction is invalid 64-bit mode. In legacy and
3049 compatability mode it is invalid with MOD=3.
3050
3051 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
3052 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
3053 given as R and X without an exact description, so we assume it builds on
3054 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
3055 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
3056 uint8_t bRm;
3057 if (!IEM_IS_64BIT_CODE(pVCpu))
3058 {
3059 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3060 IEMOP_HLP_MIN_186();
3061 IEM_OPCODE_GET_NEXT_U8(&bRm);
3062 if (IEM_IS_MODRM_MEM_MODE(bRm))
3063 {
3064 /** @todo testcase: check that there are two memory accesses involved. Check
3065 * whether they're both read before the \#BR triggers. */
3066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3067 {
3068 IEM_MC_BEGIN(IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
3069 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
3070 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
3071 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
3072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3073
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076
3077 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
3078 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3079 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
3080
3081 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
3082 IEM_MC_END();
3083 }
3084 else /* 32-bit operands */
3085 {
3086 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3087 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
3088 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
3089 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094
3095 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
3096 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
3098
3099 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
3100 IEM_MC_END();
3101 }
3102 }
3103
3104 /*
3105 * @opdone
3106 */
3107 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
3108 {
3109 /* Note that there is no need for the CPU to fetch further bytes
3110 here because MODRM.MOD == 3. */
3111 Log(("evex not supported by the guest CPU!\n"));
3112 IEMOP_RAISE_INVALID_OPCODE_RET();
3113 }
3114 }
3115 else
3116 {
3117 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
3118 * does modr/m read, whereas AMD probably doesn't... */
3119 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
3120 {
3121 Log(("evex not supported by the guest CPU!\n"));
3122 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
3123 }
3124 IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 }
3126
3127 IEMOP_MNEMONIC(evex, "evex");
3128 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
3129 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
3130 Log(("evex prefix is not implemented!\n"));
3131 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3132}
3133
3134
3135/**
3136 * @opcode 0x63
3137 * @opflmodify zf
3138 * @note non-64-bit modes.
3139 */
3140FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
3141{
3142 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
3143 IEMOP_HLP_MIN_286();
3144 IEMOP_HLP_NO_REAL_OR_V86_MODE();
3145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3146
3147 if (IEM_IS_MODRM_REG_MODE(bRm))
3148 {
3149 /* Register */
3150 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
3151 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3152 IEM_MC_ARG(uint16_t, u16Src, 2);
3153 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3154 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
3155 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
3156 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3157 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_arpl, fEFlagsIn, pu16Dst, u16Src);
3158 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 else
3164 {
3165 /* Memory */
3166 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
3167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3169 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3170
3171 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
3172 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
3173 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3174 IEM_MC_ARG(uint16_t, u16Src, 2);
3175 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3176 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3177 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_arpl, fEFlagsIn, pu16Dst, u16Src);
3178
3179 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
3180 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3181 IEM_MC_ADVANCE_RIP_AND_FINISH();
3182 IEM_MC_END();
3183 }
3184}
3185
3186
3187/**
3188 * @opcode 0x63
3189 *
3190 * @note This is a weird one. It works like a regular move instruction if
3191 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
3192 * @todo This definitely needs a testcase to verify the odd cases. */
3193FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
3194{
3195 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
3196
3197 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
3198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3199
3200 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3201 {
3202 if (IEM_IS_MODRM_REG_MODE(bRm))
3203 {
3204 /*
3205 * Register to register.
3206 */
3207 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 IEM_MC_LOCAL(uint64_t, u64Value);
3210 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3211 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3212 IEM_MC_ADVANCE_RIP_AND_FINISH();
3213 IEM_MC_END();
3214 }
3215 else
3216 {
3217 /*
3218 * We're loading a register from memory.
3219 */
3220 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3221 IEM_MC_LOCAL(uint64_t, u64Value);
3222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3225 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3226 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3227 IEM_MC_ADVANCE_RIP_AND_FINISH();
3228 IEM_MC_END();
3229 }
3230 }
3231 else
3232 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
3233}
3234
3235
3236/**
3237 * @opcode 0x64
3238 * @opmnemonic segfs
3239 * @opmincpu 80386
3240 * @opgroup og_prefixes
3241 */
3242FNIEMOP_DEF(iemOp_seg_FS)
3243{
3244 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
3245 IEMOP_HLP_MIN_386();
3246
3247 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
3248 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
3249
3250 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3251 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3252}
3253
3254
3255/**
3256 * @opcode 0x65
3257 * @opmnemonic seggs
3258 * @opmincpu 80386
3259 * @opgroup og_prefixes
3260 */
3261FNIEMOP_DEF(iemOp_seg_GS)
3262{
3263 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
3264 IEMOP_HLP_MIN_386();
3265
3266 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
3267 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
3268
3269 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3270 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3271}
3272
3273
3274/**
3275 * @opcode 0x66
3276 * @opmnemonic opsize
3277 * @openc prefix
3278 * @opmincpu 80386
3279 * @ophints harmless
3280 * @opgroup og_prefixes
3281 */
3282FNIEMOP_DEF(iemOp_op_size)
3283{
3284 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
3285 IEMOP_HLP_MIN_386();
3286
3287 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
3288 iemRecalEffOpSize(pVCpu);
3289
3290 /* For the 4 entry opcode tables, the operand prefix doesn't not count
3291 when REPZ or REPNZ are present. */
3292 if (pVCpu->iem.s.idxPrefix == 0)
3293 pVCpu->iem.s.idxPrefix = 1;
3294
3295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3297}
3298
3299
3300/**
3301 * @opcode 0x67
3302 * @opmnemonic addrsize
3303 * @openc prefix
3304 * @opmincpu 80386
3305 * @ophints harmless
3306 * @opgroup og_prefixes
3307 */
3308FNIEMOP_DEF(iemOp_addr_size)
3309{
3310 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
3311 IEMOP_HLP_MIN_386();
3312
3313 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3314 switch (pVCpu->iem.s.enmDefAddrMode)
3315 {
3316 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3317 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
3318 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3319 default: AssertFailed();
3320 }
3321
3322 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3323 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3324}
3325
3326
3327/**
3328 * @opcode 0x68
3329 */
3330FNIEMOP_DEF(iemOp_push_Iz)
3331{
3332 IEMOP_MNEMONIC(push_Iz, "push Iz");
3333 IEMOP_HLP_MIN_186();
3334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3339 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3342 IEM_MC_PUSH_U16(u16Value);
3343 IEM_MC_ADVANCE_RIP_AND_FINISH();
3344 IEM_MC_END();
3345 break;
3346
3347 case IEMMODE_32BIT:
3348 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3349 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3352 IEM_MC_PUSH_U32(u32Value);
3353 IEM_MC_ADVANCE_RIP_AND_FINISH();
3354 IEM_MC_END();
3355 break;
3356
3357 case IEMMODE_64BIT:
3358 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3359 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3362 IEM_MC_PUSH_U64(u64Value);
3363 IEM_MC_ADVANCE_RIP_AND_FINISH();
3364 IEM_MC_END();
3365 break;
3366
3367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3368 }
3369}
3370
3371
3372/**
3373 * @opcode 0x69
3374 * @opflclass multiply
3375 */
3376FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3377{
3378 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3379 IEMOP_HLP_MIN_186();
3380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3382
3383 switch (pVCpu->iem.s.enmEffOpSize)
3384 {
3385 case IEMMODE_16BIT:
3386 {
3387 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3388 if (IEM_IS_MODRM_REG_MODE(bRm))
3389 {
3390 /* register operand */
3391 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3392 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3394 IEM_MC_LOCAL(uint16_t, u16Tmp);
3395 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3396
3397 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3398 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3399 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 2);
3400 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3401 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3402 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3403
3404 IEM_MC_ADVANCE_RIP_AND_FINISH();
3405 IEM_MC_END();
3406 }
3407 else
3408 {
3409 /* memory operand */
3410 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3413
3414 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3416
3417 IEM_MC_LOCAL(uint16_t, u16Tmp);
3418 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3419
3420 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3421 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3422 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2);
3423 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3424 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3425 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3426
3427 IEM_MC_ADVANCE_RIP_AND_FINISH();
3428 IEM_MC_END();
3429 }
3430 break;
3431 }
3432
3433 case IEMMODE_32BIT:
3434 {
3435 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3436 if (IEM_IS_MODRM_REG_MODE(bRm))
3437 {
3438 /* register operand */
3439 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3440 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 IEM_MC_LOCAL(uint32_t, u32Tmp);
3443 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3444
3445 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3446 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3447 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 2);
3448 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3449 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3450 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3451
3452 IEM_MC_ADVANCE_RIP_AND_FINISH();
3453 IEM_MC_END();
3454 }
3455 else
3456 {
3457 /* memory operand */
3458 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3461
3462 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464
3465 IEM_MC_LOCAL(uint32_t, u32Tmp);
3466 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3467
3468 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3469 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3470 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2);
3471 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3472 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3473 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3474
3475 IEM_MC_ADVANCE_RIP_AND_FINISH();
3476 IEM_MC_END();
3477 }
3478 break;
3479 }
3480
3481 case IEMMODE_64BIT:
3482 {
3483 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3484 if (IEM_IS_MODRM_REG_MODE(bRm))
3485 {
3486 /* register operand */
3487 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3488 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3492
3493 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3494 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3495 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 2);
3496 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3497 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3498 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3499
3500 IEM_MC_ADVANCE_RIP_AND_FINISH();
3501 IEM_MC_END();
3502 }
3503 else
3504 {
3505 /* memory operand */
3506 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3509
3510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3512
3513 IEM_MC_LOCAL(uint64_t, u64Tmp);
3514 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3515
3516 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3517 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3518 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 2);
3519 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3520 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3521 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3522
3523 IEM_MC_ADVANCE_RIP_AND_FINISH();
3524 IEM_MC_END();
3525 }
3526 break;
3527 }
3528
3529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3530 }
3531}
3532
3533
3534/**
3535 * @opcode 0x6a
3536 */
3537FNIEMOP_DEF(iemOp_push_Ib)
3538{
3539 IEMOP_MNEMONIC(push_Ib, "push Ib");
3540 IEMOP_HLP_MIN_186();
3541 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3543
3544 switch (pVCpu->iem.s.enmEffOpSize)
3545 {
3546 case IEMMODE_16BIT:
3547 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3550 IEM_MC_PUSH_U16(uValue);
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 break;
3554 case IEMMODE_32BIT:
3555 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3558 IEM_MC_PUSH_U32(uValue);
3559 IEM_MC_ADVANCE_RIP_AND_FINISH();
3560 IEM_MC_END();
3561 break;
3562 case IEMMODE_64BIT:
3563 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3566 IEM_MC_PUSH_U64(uValue);
3567 IEM_MC_ADVANCE_RIP_AND_FINISH();
3568 IEM_MC_END();
3569 break;
3570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3571 }
3572}
3573
3574
3575/**
3576 * @opcode 0x6b
3577 * @opflclass multiply
3578 */
3579FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3580{
3581 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3582 IEMOP_HLP_MIN_186();
3583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3585
3586 switch (pVCpu->iem.s.enmEffOpSize)
3587 {
3588 case IEMMODE_16BIT:
3589 {
3590 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3591 if (IEM_IS_MODRM_REG_MODE(bRm))
3592 {
3593 /* register operand */
3594 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3595 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3597
3598 IEM_MC_LOCAL(uint16_t, u16Tmp);
3599 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3600
3601 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3602 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3603 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 2);
3604 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3605 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3606 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3607
3608 IEM_MC_ADVANCE_RIP_AND_FINISH();
3609 IEM_MC_END();
3610 }
3611 else
3612 {
3613 /* memory operand */
3614 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3615
3616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3618
3619 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 IEM_MC_LOCAL(uint16_t, u16Tmp);
3623 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3624
3625 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 1);
3626 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3627 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2);
3628 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU16, fEFlagsIn, pu16Dst, u16Src);
3629 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3630 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635 break;
3636 }
3637
3638 case IEMMODE_32BIT:
3639 {
3640 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3641 if (IEM_IS_MODRM_REG_MODE(bRm))
3642 {
3643 /* register operand */
3644 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3645 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3647 IEM_MC_LOCAL(uint32_t, u32Tmp);
3648 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3649
3650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3651 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3652 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 2);
3653 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3654 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3655 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3656
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 /* memory operand */
3663 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3666
3667 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 IEM_MC_LOCAL(uint32_t, u32Tmp);
3671 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3672
3673 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 1);
3674 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3675 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2);
3676 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU32, fEFlagsIn, pu32Dst, u32Src);
3677 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3678 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3679
3680 IEM_MC_ADVANCE_RIP_AND_FINISH();
3681 IEM_MC_END();
3682 }
3683 break;
3684 }
3685
3686 case IEMMODE_64BIT:
3687 {
3688 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3689 if (IEM_IS_MODRM_REG_MODE(bRm))
3690 {
3691 /* register operand */
3692 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3693 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_LOCAL(uint64_t, u64Tmp);
3696 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3697
3698 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3699 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3700 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 2);
3701 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3702 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3703 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3704
3705 IEM_MC_ADVANCE_RIP_AND_FINISH();
3706 IEM_MC_END();
3707 }
3708 else
3709 {
3710 /* memory operand */
3711 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3714
3715 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_LOCAL(uint64_t, u64Tmp);
3719 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3720
3721 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 1);
3722 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
3723 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 2);
3724 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pfnAImplU64, fEFlagsIn, pu64Dst, u64Src);
3725 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3726 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
3727
3728 IEM_MC_ADVANCE_RIP_AND_FINISH();
3729 IEM_MC_END();
3730 }
3731 break;
3732 }
3733
3734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3735 }
3736}
3737
3738
3739/**
3740 * @opcode 0x6c
3741 * @opfltest iopl,df
3742 */
3743FNIEMOP_DEF(iemOp_insb_Yb_DX)
3744{
3745 IEMOP_HLP_MIN_186();
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3747 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3748 {
3749 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3750 switch (pVCpu->iem.s.enmEffAddrMode)
3751 {
3752 case IEMMODE_16BIT:
3753 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3756 iemCImpl_rep_ins_op8_addr16, false);
3757 case IEMMODE_32BIT:
3758 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3759 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3760 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3761 iemCImpl_rep_ins_op8_addr32, false);
3762 case IEMMODE_64BIT:
3763 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3764 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3766 iemCImpl_rep_ins_op8_addr64, false);
3767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3768 }
3769 }
3770 else
3771 {
3772 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3773 switch (pVCpu->iem.s.enmEffAddrMode)
3774 {
3775 case IEMMODE_16BIT:
3776 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3778 iemCImpl_ins_op8_addr16, false);
3779 case IEMMODE_32BIT:
3780 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3781 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3782 iemCImpl_ins_op8_addr32, false);
3783 case IEMMODE_64BIT:
3784 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3785 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3786 iemCImpl_ins_op8_addr64, false);
3787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3788 }
3789 }
3790}
3791
3792
3793/**
3794 * @opcode 0x6d
3795 * @opfltest iopl,df
3796 */
3797FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3798{
3799 IEMOP_HLP_MIN_186();
3800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3801 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3802 {
3803 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3804 switch (pVCpu->iem.s.enmEffOpSize)
3805 {
3806 case IEMMODE_16BIT:
3807 switch (pVCpu->iem.s.enmEffAddrMode)
3808 {
3809 case IEMMODE_16BIT:
3810 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3811 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3813 iemCImpl_rep_ins_op16_addr16, false);
3814 case IEMMODE_32BIT:
3815 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3816 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3818 iemCImpl_rep_ins_op16_addr32, false);
3819 case IEMMODE_64BIT:
3820 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3823 iemCImpl_rep_ins_op16_addr64, false);
3824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3825 }
3826 break;
3827 case IEMMODE_64BIT:
3828 case IEMMODE_32BIT:
3829 switch (pVCpu->iem.s.enmEffAddrMode)
3830 {
3831 case IEMMODE_16BIT:
3832 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3835 iemCImpl_rep_ins_op32_addr16, false);
3836 case IEMMODE_32BIT:
3837 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3840 iemCImpl_rep_ins_op32_addr32, false);
3841 case IEMMODE_64BIT:
3842 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3845 iemCImpl_rep_ins_op32_addr64, false);
3846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3847 }
3848 break;
3849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3850 }
3851 }
3852 else
3853 {
3854 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3855 switch (pVCpu->iem.s.enmEffOpSize)
3856 {
3857 case IEMMODE_16BIT:
3858 switch (pVCpu->iem.s.enmEffAddrMode)
3859 {
3860 case IEMMODE_16BIT:
3861 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3863 iemCImpl_ins_op16_addr16, false);
3864 case IEMMODE_32BIT:
3865 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3866 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3867 iemCImpl_ins_op16_addr32, false);
3868 case IEMMODE_64BIT:
3869 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3870 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3871 iemCImpl_ins_op16_addr64, false);
3872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3873 }
3874 break;
3875 case IEMMODE_64BIT:
3876 case IEMMODE_32BIT:
3877 switch (pVCpu->iem.s.enmEffAddrMode)
3878 {
3879 case IEMMODE_16BIT:
3880 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3881 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3882 iemCImpl_ins_op32_addr16, false);
3883 case IEMMODE_32BIT:
3884 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3885 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3886 iemCImpl_ins_op32_addr32, false);
3887 case IEMMODE_64BIT:
3888 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3889 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3890 iemCImpl_ins_op32_addr64, false);
3891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3892 }
3893 break;
3894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3895 }
3896 }
3897}
3898
3899
3900/**
3901 * @opcode 0x6e
3902 * @opfltest iopl,df
3903 */
3904FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3905{
3906 IEMOP_HLP_MIN_186();
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3909 {
3910 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3911 switch (pVCpu->iem.s.enmEffAddrMode)
3912 {
3913 case IEMMODE_16BIT:
3914 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3915 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3916 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3917 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3918 case IEMMODE_32BIT:
3919 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3920 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3921 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3922 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3923 case IEMMODE_64BIT:
3924 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3925 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3926 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3927 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3929 }
3930 }
3931 else
3932 {
3933 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3934 switch (pVCpu->iem.s.enmEffAddrMode)
3935 {
3936 case IEMMODE_16BIT:
3937 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3938 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3939 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3940 case IEMMODE_32BIT:
3941 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3942 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3943 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3944 case IEMMODE_64BIT:
3945 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3946 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3947 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3949 }
3950 }
3951}
3952
3953
3954/**
3955 * @opcode 0x6f
3956 * @opfltest iopl,df
3957 */
3958FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3959{
3960 IEMOP_HLP_MIN_186();
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3962 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3963 {
3964 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3965 switch (pVCpu->iem.s.enmEffOpSize)
3966 {
3967 case IEMMODE_16BIT:
3968 switch (pVCpu->iem.s.enmEffAddrMode)
3969 {
3970 case IEMMODE_16BIT:
3971 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3972 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3974 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3975 case IEMMODE_32BIT:
3976 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3977 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3979 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3980 case IEMMODE_64BIT:
3981 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3982 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3984 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3986 }
3987 break;
3988 case IEMMODE_64BIT:
3989 case IEMMODE_32BIT:
3990 switch (pVCpu->iem.s.enmEffAddrMode)
3991 {
3992 case IEMMODE_16BIT:
3993 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3994 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3995 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3996 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3997 case IEMMODE_32BIT:
3998 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3999 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
4000 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
4001 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
4002 case IEMMODE_64BIT:
4003 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4004 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
4005 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
4006 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
4007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4008 }
4009 break;
4010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4011 }
4012 }
4013 else
4014 {
4015 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
4016 switch (pVCpu->iem.s.enmEffOpSize)
4017 {
4018 case IEMMODE_16BIT:
4019 switch (pVCpu->iem.s.enmEffAddrMode)
4020 {
4021 case IEMMODE_16BIT:
4022 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4023 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4024 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
4025 case IEMMODE_32BIT:
4026 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4027 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4028 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
4029 case IEMMODE_64BIT:
4030 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4031 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4032 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
4033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4034 }
4035 break;
4036 case IEMMODE_64BIT:
4037 case IEMMODE_32BIT:
4038 switch (pVCpu->iem.s.enmEffAddrMode)
4039 {
4040 case IEMMODE_16BIT:
4041 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4042 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4043 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
4044 case IEMMODE_32BIT:
4045 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4047 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
4048 case IEMMODE_64BIT:
4049 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
4050 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
4051 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
4052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4053 }
4054 break;
4055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4056 }
4057 }
4058}
4059
4060
4061/**
4062 * @opcode 0x70
4063 * @opfltest of
4064 */
4065FNIEMOP_DEF(iemOp_jo_Jb)
4066{
4067 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
4068 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4069 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4070
4071 IEM_MC_BEGIN(0, 0);
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4074 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4075 } IEM_MC_ELSE() {
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 } IEM_MC_ENDIF();
4078 IEM_MC_END();
4079}
4080
4081
4082/**
4083 * @opcode 0x71
4084 * @opfltest of
4085 */
4086FNIEMOP_DEF(iemOp_jno_Jb)
4087{
4088 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
4089 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4090 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4091
4092 IEM_MC_BEGIN(0, 0);
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4095 IEM_MC_ADVANCE_RIP_AND_FINISH();
4096 } IEM_MC_ELSE() {
4097 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4098 } IEM_MC_ENDIF();
4099 IEM_MC_END();
4100}
4101
4102/**
4103 * @opcode 0x72
4104 * @opfltest cf
4105 */
4106FNIEMOP_DEF(iemOp_jc_Jb)
4107{
4108 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
4109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4111
4112 IEM_MC_BEGIN(0, 0);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4115 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120}
4121
4122
4123/**
4124 * @opcode 0x73
4125 * @opfltest cf
4126 */
4127FNIEMOP_DEF(iemOp_jnc_Jb)
4128{
4129 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
4130 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4132
4133 IEM_MC_BEGIN(0, 0);
4134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 } IEM_MC_ELSE() {
4138 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4139 } IEM_MC_ENDIF();
4140 IEM_MC_END();
4141}
4142
4143
4144/**
4145 * @opcode 0x74
4146 * @opfltest zf
4147 */
4148FNIEMOP_DEF(iemOp_je_Jb)
4149{
4150 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
4151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4157 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4158 } IEM_MC_ELSE() {
4159 IEM_MC_ADVANCE_RIP_AND_FINISH();
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162}
4163
4164
4165/**
4166 * @opcode 0x75
4167 * @opfltest zf
4168 */
4169FNIEMOP_DEF(iemOp_jne_Jb)
4170{
4171 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
4172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4174
4175 IEM_MC_BEGIN(0, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4178 IEM_MC_ADVANCE_RIP_AND_FINISH();
4179 } IEM_MC_ELSE() {
4180 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4181 } IEM_MC_ENDIF();
4182 IEM_MC_END();
4183}
4184
4185
4186/**
4187 * @opcode 0x76
4188 * @opfltest cf,zf
4189 */
4190FNIEMOP_DEF(iemOp_jbe_Jb)
4191{
4192 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
4193 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4195
4196 IEM_MC_BEGIN(0, 0);
4197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4198 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4199 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4200 } IEM_MC_ELSE() {
4201 IEM_MC_ADVANCE_RIP_AND_FINISH();
4202 } IEM_MC_ENDIF();
4203 IEM_MC_END();
4204}
4205
4206
4207/**
4208 * @opcode 0x77
4209 * @opfltest cf,zf
4210 */
4211FNIEMOP_DEF(iemOp_jnbe_Jb)
4212{
4213 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
4214 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4216
4217 IEM_MC_BEGIN(0, 0);
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4220 IEM_MC_ADVANCE_RIP_AND_FINISH();
4221 } IEM_MC_ELSE() {
4222 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_END();
4225}
4226
4227
4228/**
4229 * @opcode 0x78
4230 * @opfltest sf
4231 */
4232FNIEMOP_DEF(iemOp_js_Jb)
4233{
4234 IEMOP_MNEMONIC(js_Jb, "js Jb");
4235 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4236 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4237
4238 IEM_MC_BEGIN(0, 0);
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4241 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4242 } IEM_MC_ELSE() {
4243 IEM_MC_ADVANCE_RIP_AND_FINISH();
4244 } IEM_MC_ENDIF();
4245 IEM_MC_END();
4246}
4247
4248
4249/**
4250 * @opcode 0x79
4251 * @opfltest sf
4252 */
4253FNIEMOP_DEF(iemOp_jns_Jb)
4254{
4255 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
4256 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4258
4259 IEM_MC_BEGIN(0, 0);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4262 IEM_MC_ADVANCE_RIP_AND_FINISH();
4263 } IEM_MC_ELSE() {
4264 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4265 } IEM_MC_ENDIF();
4266 IEM_MC_END();
4267}
4268
4269
4270/**
4271 * @opcode 0x7a
4272 * @opfltest pf
4273 */
4274FNIEMOP_DEF(iemOp_jp_Jb)
4275{
4276 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
4277 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4278 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4279
4280 IEM_MC_BEGIN(0, 0);
4281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4283 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4284 } IEM_MC_ELSE() {
4285 IEM_MC_ADVANCE_RIP_AND_FINISH();
4286 } IEM_MC_ENDIF();
4287 IEM_MC_END();
4288}
4289
4290
4291/**
4292 * @opcode 0x7b
4293 * @opfltest pf
4294 */
4295FNIEMOP_DEF(iemOp_jnp_Jb)
4296{
4297 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
4298 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4304 IEM_MC_ADVANCE_RIP_AND_FINISH();
4305 } IEM_MC_ELSE() {
4306 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4307 } IEM_MC_ENDIF();
4308 IEM_MC_END();
4309}
4310
4311
4312/**
4313 * @opcode 0x7c
4314 * @opfltest sf,of
4315 */
4316FNIEMOP_DEF(iemOp_jl_Jb)
4317{
4318 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
4319 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4321
4322 IEM_MC_BEGIN(0, 0);
4323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4324 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4326 } IEM_MC_ELSE() {
4327 IEM_MC_ADVANCE_RIP_AND_FINISH();
4328 } IEM_MC_ENDIF();
4329 IEM_MC_END();
4330}
4331
4332
4333/**
4334 * @opcode 0x7d
4335 * @opfltest sf,of
4336 */
4337FNIEMOP_DEF(iemOp_jnl_Jb)
4338{
4339 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4340 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4342
4343 IEM_MC_BEGIN(0, 0);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4346 IEM_MC_ADVANCE_RIP_AND_FINISH();
4347 } IEM_MC_ELSE() {
4348 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4349 } IEM_MC_ENDIF();
4350 IEM_MC_END();
4351}
4352
4353
4354/**
4355 * @opcode 0x7e
4356 * @opfltest zf,sf,of
4357 */
4358FNIEMOP_DEF(iemOp_jle_Jb)
4359{
4360 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4361 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4362 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4363
4364 IEM_MC_BEGIN(0, 0);
4365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4366 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4367 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4368 } IEM_MC_ELSE() {
4369 IEM_MC_ADVANCE_RIP_AND_FINISH();
4370 } IEM_MC_ENDIF();
4371 IEM_MC_END();
4372}
4373
4374
4375/**
4376 * @opcode 0x7f
4377 * @opfltest zf,sf,of
4378 */
4379FNIEMOP_DEF(iemOp_jnle_Jb)
4380{
4381 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4382 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4384
4385 IEM_MC_BEGIN(0, 0);
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4387 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4388 IEM_MC_ADVANCE_RIP_AND_FINISH();
4389 } IEM_MC_ELSE() {
4390 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4391 } IEM_MC_ENDIF();
4392 IEM_MC_END();
4393}
4394
4395
4396/**
4397 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4398 * iemOp_Grp1_Eb_Ib_80.
4399 */
4400#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4401 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4402 { \
4403 /* register target */ \
4404 IEM_MC_BEGIN(0, 0); \
4405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4407 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4408 IEM_MC_LOCAL(uint8_t, u8Dst); \
4409 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4410 IEM_MC_LOCAL_EFLAGS( uEFlags); \
4411 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4412 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
4413 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4414 } IEM_MC_NATIVE_ELSE() { \
4415 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4416 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4417 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4418 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4419 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4420 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4421 } IEM_MC_NATIVE_ENDIF(); \
4422 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4423 IEM_MC_END(); \
4424 } \
4425 else \
4426 { \
4427 /* memory target */ \
4428 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4429 { \
4430 IEM_MC_BEGIN(0, 0); \
4431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4434 IEMOP_HLP_DONE_DECODING(); \
4435 \
4436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4437 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4438 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4439 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4440 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4441 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 } \
4448 else \
4449 { \
4450 IEM_MC_BEGIN(0, 0); \
4451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4453 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4454 IEMOP_HLP_DONE_DECODING(); \
4455 \
4456 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4457 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
4458 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4459 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4460 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4461 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), fEFlagsIn, pu8Dst, u8Src); \
4462 \
4463 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4464 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4465 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4466 IEM_MC_END(); \
4467 } \
4468 } \
4469 (void)0
4470
4471#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_InsNm, a_fNativeArchs) \
4472 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4473 { \
4474 /* register target */ \
4475 IEM_MC_BEGIN(0, 0); \
4476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4478 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4479 IEM_MC_LOCAL(uint8_t, u8Dst); \
4480 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4481 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4482 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4483 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4484 } IEM_MC_NATIVE_ELSE() { \
4485 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
4486 IEM_MC_REF_GREG_U8_CONST(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4487 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4488 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4489 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4490 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4491 } IEM_MC_NATIVE_ENDIF(); \
4492 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4493 IEM_MC_END(); \
4494 } \
4495 else \
4496 { \
4497 /* memory target */ \
4498 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4499 { \
4500 IEM_MC_BEGIN(0, 0); \
4501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4503 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4504 IEMOP_HLP_DONE_DECODING(); \
4505 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4506 IEM_MC_LOCAL(uint8_t, u8Dst); \
4507 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4508 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4509 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4510 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4511 } IEM_MC_NATIVE_ELSE() { \
4512 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4513 IEM_MC_ARG(uint8_t const *, pu8Dst, 1); \
4514 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4515 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4516 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 2); \
4517 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u8), fEFlagsIn, pu8Dst, u8Src); \
4518 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4519 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4520 } IEM_MC_NATIVE_ENDIF(); \
4521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4522 IEM_MC_END(); \
4523 } \
4524 else \
4525 { \
4526 IEMOP_HLP_DONE_DECODING(); \
4527 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4528 } \
4529 } \
4530 (void)0
4531
4532
4533
4534/**
4535 * @opmaps grp1_80,grp1_83
4536 * @opcode /0
4537 * @opflclass arithmetic
4538 */
4539FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4540{
4541 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4542 IEMOP_BODY_BINARY_Eb_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_80,grp1_83
4548 * @opcode /1
4549 * @opflclass logical
4550 */
4551FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4552{
4553 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4554 IEMOP_BODY_BINARY_Eb_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_80,grp1_83
4560 * @opcode /2
4561 * @opflclass arithmetic_carry
4562 */
4563FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4564{
4565 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4566 IEMOP_BODY_BINARY_Eb_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_80,grp1_83
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4578 IEMOP_BODY_BINARY_Eb_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4579}
4580
4581
4582/**
4583 * @opmaps grp1_80,grp1_83
4584 * @opcode /4
4585 * @opflclass logical
4586 */
4587FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4588{
4589 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4590 IEMOP_BODY_BINARY_Eb_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4591}
4592
4593
4594/**
4595 * @opmaps grp1_80,grp1_83
4596 * @opcode /5
4597 * @opflclass arithmetic
4598 */
4599FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4600{
4601 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4602 IEMOP_BODY_BINARY_Eb_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4603}
4604
4605
4606/**
4607 * @opmaps grp1_80,grp1_83
4608 * @opcode /6
4609 * @opflclass logical
4610 */
4611FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4612{
4613 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4614 IEMOP_BODY_BINARY_Eb_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4615}
4616
4617
4618/**
4619 * @opmaps grp1_80,grp1_83
4620 * @opcode /7
4621 * @opflclass arithmetic
4622 */
4623FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4624{
4625 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4626 IEMOP_BODY_BINARY_Eb_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4627}
4628
4629
4630/**
4631 * @opcode 0x80
4632 */
4633FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4634{
4635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4636 switch (IEM_GET_MODRM_REG_8(bRm))
4637 {
4638 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4639 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4640 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4641 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4642 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4643 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4644 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4645 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4647 }
4648}
4649
4650
4651/**
4652 * Body for a group 1 binary operator.
4653 */
4654#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4655 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4656 { \
4657 /* register target */ \
4658 switch (pVCpu->iem.s.enmEffOpSize) \
4659 { \
4660 case IEMMODE_16BIT: \
4661 { \
4662 IEM_MC_BEGIN(0, 0); \
4663 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4665 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4666 IEM_MC_LOCAL(uint16_t, u16Dst); \
4667 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4668 IEM_MC_LOCAL(uint32_t, uEFlags); \
4669 IEM_MC_FETCH_EFLAGS(uEFlags); \
4670 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4671 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
4672 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4673 } IEM_MC_NATIVE_ELSE() { \
4674 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4675 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4676 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4677 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 2); \
4678 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4679 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4680 } IEM_MC_NATIVE_ENDIF(); \
4681 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4682 IEM_MC_END(); \
4683 break; \
4684 } \
4685 \
4686 case IEMMODE_32BIT: \
4687 { \
4688 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4689 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4691 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4692 IEM_MC_LOCAL(uint32_t, u32Dst); \
4693 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4694 IEM_MC_LOCAL(uint32_t, uEFlags); \
4695 IEM_MC_FETCH_EFLAGS(uEFlags); \
4696 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4697 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
4698 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4699 } IEM_MC_NATIVE_ELSE() { \
4700 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4701 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4702 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4703 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 2); \
4704 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4707 } IEM_MC_NATIVE_ENDIF(); \
4708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4709 IEM_MC_END(); \
4710 break; \
4711 } \
4712 \
4713 case IEMMODE_64BIT: \
4714 { \
4715 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4716 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4718 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4719 IEM_MC_LOCAL(uint64_t, u64Dst); \
4720 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4721 IEM_MC_LOCAL(uint32_t, uEFlags); \
4722 IEM_MC_FETCH_EFLAGS(uEFlags); \
4723 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4724 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
4725 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4726 } IEM_MC_NATIVE_ELSE() { \
4727 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4728 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4729 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4730 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 2); \
4731 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4732 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4733 } IEM_MC_NATIVE_ENDIF(); \
4734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4735 IEM_MC_END(); \
4736 break; \
4737 } \
4738 \
4739 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4740 } \
4741 } \
4742 else \
4743 { \
4744 /* memory target */ \
4745 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4746 { \
4747 switch (pVCpu->iem.s.enmEffOpSize) \
4748 { \
4749 case IEMMODE_16BIT: \
4750 { \
4751 IEM_MC_BEGIN(0, 0); \
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4754 \
4755 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4756 IEMOP_HLP_DONE_DECODING(); \
4757 \
4758 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4759 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4760 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4761 \
4762 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
4763 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4764 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4765 \
4766 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4767 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4769 IEM_MC_END(); \
4770 break; \
4771 } \
4772 \
4773 case IEMMODE_32BIT: \
4774 { \
4775 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4778 \
4779 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4780 IEMOP_HLP_DONE_DECODING(); \
4781 \
4782 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4783 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4784 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4785 \
4786 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
4787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4788 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4789 \
4790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4791 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4793 IEM_MC_END(); \
4794 break; \
4795 } \
4796 \
4797 case IEMMODE_64BIT: \
4798 { \
4799 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4800 \
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4803 \
4804 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4805 IEMOP_HLP_DONE_DECODING(); \
4806 \
4807 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4808 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4809 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4810 \
4811 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
4812 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4813 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4814 \
4815 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4816 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4818 IEM_MC_END(); \
4819 break; \
4820 } \
4821 \
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4823 } \
4824 } \
4825 else \
4826 { \
4827 switch (pVCpu->iem.s.enmEffOpSize) \
4828 { \
4829 case IEMMODE_16BIT: \
4830 { \
4831 IEM_MC_BEGIN(0, 0); \
4832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4834 \
4835 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4836 IEMOP_HLP_DONE_DECODING(); \
4837 \
4838 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4839 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
4840 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4841 \
4842 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
4843 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4844 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
4845 \
4846 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4847 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4848 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4849 IEM_MC_END(); \
4850 break; \
4851 } \
4852 \
4853 case IEMMODE_32BIT: \
4854 { \
4855 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4858 \
4859 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4860 IEMOP_HLP_DONE_DECODING(); \
4861 \
4862 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4863 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
4864 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4865 \
4866 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
4867 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4868 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
4869 \
4870 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4871 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4872 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4873 IEM_MC_END(); \
4874 break; \
4875 } \
4876 \
4877 case IEMMODE_64BIT: \
4878 { \
4879 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4882 \
4883 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4884 IEMOP_HLP_DONE_DECODING(); \
4885 \
4886 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4887 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
4888 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4889 \
4890 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
4891 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4892 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
4893 \
4894 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4895 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4897 IEM_MC_END(); \
4898 break; \
4899 } \
4900 \
4901 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4902 } \
4903 } \
4904 } \
4905 (void)0
4906
4907/* read-only version */
4908#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_InsNm, a_fNativeArchs) \
4909 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4910 { \
4911 /* register target */ \
4912 switch (pVCpu->iem.s.enmEffOpSize) \
4913 { \
4914 case IEMMODE_16BIT: \
4915 { \
4916 IEM_MC_BEGIN(0, 0); \
4917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4919 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4920 IEM_MC_LOCAL(uint16_t, u16Dst); \
4921 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4922 IEM_MC_LOCAL(uint32_t, uEFlags); \
4923 IEM_MC_FETCH_EFLAGS(uEFlags); \
4924 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4925 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4926 } IEM_MC_NATIVE_ELSE() { \
4927 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
4928 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4929 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4930 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 2); \
4931 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
4932 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4933 } IEM_MC_NATIVE_ENDIF(); \
4934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4935 IEM_MC_END(); \
4936 break; \
4937 } \
4938 \
4939 case IEMMODE_32BIT: \
4940 { \
4941 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4944 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4945 IEM_MC_LOCAL(uint32_t, u32Dst); \
4946 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4947 IEM_MC_LOCAL(uint32_t, uEFlags); \
4948 IEM_MC_FETCH_EFLAGS(uEFlags); \
4949 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4950 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4951 } IEM_MC_NATIVE_ELSE() { \
4952 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
4953 IEM_MC_REF_GREG_U32_CONST (pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4954 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4955 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 2); \
4956 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
4957 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4958 } IEM_MC_NATIVE_ENDIF(); \
4959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4960 IEM_MC_END(); \
4961 break; \
4962 } \
4963 \
4964 case IEMMODE_64BIT: \
4965 { \
4966 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4969 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4970 IEM_MC_LOCAL(uint64_t, u64Dst); \
4971 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4972 IEM_MC_LOCAL(uint32_t, uEFlags); \
4973 IEM_MC_FETCH_EFLAGS(uEFlags); \
4974 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4975 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4976 } IEM_MC_NATIVE_ELSE() { \
4977 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
4978 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4979 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
4980 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 2); \
4981 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
4982 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
4983 } IEM_MC_NATIVE_ENDIF(); \
4984 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4985 IEM_MC_END(); \
4986 break; \
4987 } \
4988 \
4989 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4990 } \
4991 } \
4992 else \
4993 { \
4994 /* memory target */ \
4995 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4996 { \
4997 switch (pVCpu->iem.s.enmEffOpSize) \
4998 { \
4999 case IEMMODE_16BIT: \
5000 { \
5001 IEM_MC_BEGIN(0, 0); \
5002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
5004 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
5005 IEMOP_HLP_DONE_DECODING(); \
5006 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5007 IEM_MC_LOCAL(uint16_t, u16Dst); \
5008 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5009 IEM_MC_LOCAL_EFLAGS(uEFlags); \
5010 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
5011 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5012 } IEM_MC_NATIVE_ELSE() { \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
5015 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5017 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 2); \
5018 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5019 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5020 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5021 } IEM_MC_NATIVE_ENDIF(); \
5022 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5023 IEM_MC_END(); \
5024 break; \
5025 } \
5026 \
5027 case IEMMODE_32BIT: \
5028 { \
5029 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
5032 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5035 IEM_MC_LOCAL(uint32_t, u32Dst); \
5036 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5037 IEM_MC_LOCAL_EFLAGS(uEFlags); \
5038 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
5039 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5040 } IEM_MC_NATIVE_ELSE() { \
5041 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5042 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
5043 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5044 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5045 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 2); \
5046 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5047 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5048 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5049 } IEM_MC_NATIVE_ENDIF(); \
5050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5051 IEM_MC_END(); \
5052 break; \
5053 } \
5054 \
5055 case IEMMODE_64BIT: \
5056 { \
5057 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
5060 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
5061 IEMOP_HLP_DONE_DECODING(); \
5062 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5063 IEM_MC_LOCAL(uint64_t, u64Dst); \
5064 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5065 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5066 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
5067 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5068 } IEM_MC_NATIVE_ELSE() { \
5069 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5070 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
5071 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5072 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5073 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 2); \
5074 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5075 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5076 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5077 } IEM_MC_NATIVE_ENDIF(); \
5078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5079 IEM_MC_END(); \
5080 break; \
5081 } \
5082 \
5083 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5084 } \
5085 } \
5086 else \
5087 { \
5088 IEMOP_HLP_DONE_DECODING(); \
5089 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5090 } \
5091 } \
5092 (void)0
5093
5094
5095/**
5096 * @opmaps grp1_81
5097 * @opcode /0
5098 * @opflclass arithmetic
5099 */
5100FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
5101{
5102 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
5103 IEMOP_BODY_BINARY_Ev_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5104}
5105
5106
5107/**
5108 * @opmaps grp1_81
5109 * @opcode /1
5110 * @opflclass logical
5111 */
5112FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
5113{
5114 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
5115 IEMOP_BODY_BINARY_Ev_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5116}
5117
5118
5119/**
5120 * @opmaps grp1_81
5121 * @opcode /2
5122 * @opflclass arithmetic_carry
5123 */
5124FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
5125{
5126 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
5127 IEMOP_BODY_BINARY_Ev_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5128}
5129
5130
5131/**
5132 * @opmaps grp1_81
5133 * @opcode /3
5134 * @opflclass arithmetic_carry
5135 */
5136FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
5137{
5138 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
5139 IEMOP_BODY_BINARY_Ev_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5140}
5141
5142
5143/**
5144 * @opmaps grp1_81
5145 * @opcode /4
5146 * @opflclass logical
5147 */
5148FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
5149{
5150 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
5151 IEMOP_BODY_BINARY_Ev_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5152}
5153
5154
5155/**
5156 * @opmaps grp1_81
5157 * @opcode /5
5158 * @opflclass arithmetic
5159 */
5160FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
5161{
5162 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
5163 IEMOP_BODY_BINARY_Ev_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5164}
5165
5166
5167/**
5168 * @opmaps grp1_81
5169 * @opcode /6
5170 * @opflclass logical
5171 */
5172FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
5173{
5174 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
5175 IEMOP_BODY_BINARY_Ev_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5176}
5177
5178
5179/**
5180 * @opmaps grp1_81
5181 * @opcode /7
5182 * @opflclass arithmetic
5183 */
5184FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
5185{
5186 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
5187 IEMOP_BODY_BINARY_Ev_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5188}
5189
5190
5191/**
5192 * @opcode 0x81
5193 */
5194FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
5195{
5196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5197 switch (IEM_GET_MODRM_REG_8(bRm))
5198 {
5199 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
5200 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
5201 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
5202 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
5203 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
5204 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
5205 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
5206 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
5207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5208 }
5209}
5210
5211
5212/**
5213 * @opcode 0x82
5214 * @opmnemonic grp1_82
5215 * @opgroup og_groups
5216 */
5217FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
5218{
5219 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
5220 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
5221}
5222
5223
5224/**
5225 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
5226 * iemOp_Grp1_Ev_Ib.
5227 */
5228#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
5229 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5230 { \
5231 /* \
5232 * Register target \
5233 */ \
5234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5235 switch (pVCpu->iem.s.enmEffOpSize) \
5236 { \
5237 case IEMMODE_16BIT: \
5238 IEM_MC_BEGIN(0, 0); \
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5240 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5241 IEM_MC_LOCAL(uint16_t, u16Dst); \
5242 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5243 IEM_MC_LOCAL(uint32_t, uEFlags); \
5244 IEM_MC_FETCH_EFLAGS(uEFlags); \
5245 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5246 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
5247 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5248 } IEM_MC_NATIVE_ELSE() { \
5249 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5250 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5251 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5252 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5253 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5254 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5255 } IEM_MC_NATIVE_ENDIF(); \
5256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5257 IEM_MC_END(); \
5258 break; \
5259 \
5260 case IEMMODE_32BIT: \
5261 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5263 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5264 IEM_MC_LOCAL(uint32_t, u32Dst); \
5265 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5266 IEM_MC_LOCAL(uint32_t, uEFlags); \
5267 IEM_MC_FETCH_EFLAGS(uEFlags); \
5268 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5269 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
5270 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5271 } IEM_MC_NATIVE_ELSE() { \
5272 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5273 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5275 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5276 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5277 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
5278 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5279 } IEM_MC_NATIVE_ENDIF(); \
5280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5281 IEM_MC_END(); \
5282 break; \
5283 \
5284 case IEMMODE_64BIT: \
5285 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5287 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5288 IEM_MC_LOCAL(uint64_t, u64Dst); \
5289 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5290 IEM_MC_LOCAL(uint32_t, uEFlags); \
5291 IEM_MC_FETCH_EFLAGS(uEFlags); \
5292 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5293 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
5294 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5295 } IEM_MC_NATIVE_ELSE() { \
5296 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5297 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5298 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5299 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5300 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5301 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5302 } IEM_MC_NATIVE_ENDIF(); \
5303 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5304 IEM_MC_END(); \
5305 break; \
5306 \
5307 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5308 } \
5309 } \
5310 else \
5311 { \
5312 /* \
5313 * Memory target. \
5314 */ \
5315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5316 { \
5317 switch (pVCpu->iem.s.enmEffOpSize) \
5318 { \
5319 case IEMMODE_16BIT: \
5320 IEM_MC_BEGIN(0, 0); \
5321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5323 \
5324 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5325 IEMOP_HLP_DONE_DECODING(); \
5326 \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5329 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5330 \
5331 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5332 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5333 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5334 \
5335 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5336 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5338 IEM_MC_END(); \
5339 break; \
5340 \
5341 case IEMMODE_32BIT: \
5342 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5345 \
5346 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5347 IEMOP_HLP_DONE_DECODING(); \
5348 \
5349 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5350 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5351 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5352 \
5353 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5354 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5355 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5356 \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5358 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5359 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5360 IEM_MC_END(); \
5361 break; \
5362 \
5363 case IEMMODE_64BIT: \
5364 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5367 \
5368 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5369 IEMOP_HLP_DONE_DECODING(); \
5370 \
5371 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5372 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5373 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5374 \
5375 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5376 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5377 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5378 \
5379 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5380 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5381 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5382 IEM_MC_END(); \
5383 break; \
5384 \
5385 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5386 } \
5387 } \
5388 else \
5389 { \
5390 switch (pVCpu->iem.s.enmEffOpSize) \
5391 { \
5392 case IEMMODE_16BIT: \
5393 IEM_MC_BEGIN(0, 0); \
5394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5396 \
5397 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5398 IEMOP_HLP_DONE_DECODING(); \
5399 \
5400 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5401 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
5402 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5403 \
5404 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5405 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5406 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), fEFlagsIn, pu16Dst, u16Src); \
5407 \
5408 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5409 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5411 IEM_MC_END(); \
5412 break; \
5413 \
5414 case IEMMODE_32BIT: \
5415 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5418 \
5419 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5420 IEMOP_HLP_DONE_DECODING(); \
5421 \
5422 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5423 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
5424 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5425 \
5426 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5427 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5428 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), fEFlagsIn, pu32Dst, u32Src); \
5429 \
5430 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5431 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5433 IEM_MC_END(); \
5434 break; \
5435 \
5436 case IEMMODE_64BIT: \
5437 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5440 \
5441 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5442 IEMOP_HLP_DONE_DECODING(); \
5443 \
5444 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5445 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
5446 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5447 \
5448 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5449 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5450 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), fEFlagsIn, pu64Dst, u64Src); \
5451 \
5452 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5453 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5455 IEM_MC_END(); \
5456 break; \
5457 \
5458 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5459 } \
5460 } \
5461 } \
5462 (void)0
5463
5464/* read-only variant */
5465#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_InsNm, a_fNativeArchs) \
5466 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5467 { \
5468 /* \
5469 * Register target \
5470 */ \
5471 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5472 switch (pVCpu->iem.s.enmEffOpSize) \
5473 { \
5474 case IEMMODE_16BIT: \
5475 IEM_MC_BEGIN(0, 0); \
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5477 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5478 IEM_MC_LOCAL(uint16_t, u16Dst); \
5479 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5480 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5481 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5482 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5483 } IEM_MC_NATIVE_ELSE() { \
5484 IEM_MC_ARG(uint16_t const *,pu16Dst, 1); \
5485 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5486 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5487 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5488 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5489 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5490 } IEM_MC_NATIVE_ENDIF(); \
5491 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5492 IEM_MC_END(); \
5493 break; \
5494 \
5495 case IEMMODE_32BIT: \
5496 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5498 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5499 IEM_MC_LOCAL(uint32_t, u32Dst); \
5500 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5501 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5502 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5503 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5504 } IEM_MC_NATIVE_ELSE() { \
5505 IEM_MC_ARG(uint32_t const *,pu32Dst, 1); \
5506 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5507 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5508 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5509 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5510 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5511 } IEM_MC_NATIVE_ENDIF(); \
5512 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5513 IEM_MC_END(); \
5514 break; \
5515 \
5516 case IEMMODE_64BIT: \
5517 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5519 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5520 IEM_MC_LOCAL(uint64_t, u64Dst); \
5521 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5522 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5523 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5524 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5525 } IEM_MC_NATIVE_ELSE() { \
5526 IEM_MC_ARG(uint64_t const *,pu64Dst, 1); \
5527 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5528 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5529 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5530 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5531 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5532 } IEM_MC_NATIVE_ENDIF(); \
5533 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5534 IEM_MC_END(); \
5535 break; \
5536 \
5537 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5538 } \
5539 } \
5540 else \
5541 { \
5542 /* \
5543 * Memory target. \
5544 */ \
5545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5546 { \
5547 switch (pVCpu->iem.s.enmEffOpSize) \
5548 { \
5549 case IEMMODE_16BIT: \
5550 IEM_MC_BEGIN(0, 0); \
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5554 IEMOP_HLP_DONE_DECODING(); \
5555 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5556 IEM_MC_LOCAL(uint16_t, u16Dst); \
5557 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5558 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5559 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5560 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5561 } IEM_MC_NATIVE_ELSE() { \
5562 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5563 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
5564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5565 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5566 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 2); \
5567 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u16), fEFlagsIn, pu16Dst, u16Src); \
5568 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5569 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5570 } IEM_MC_NATIVE_ENDIF(); \
5571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5572 IEM_MC_END(); \
5573 break; \
5574 \
5575 case IEMMODE_32BIT: \
5576 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5579 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5580 IEMOP_HLP_DONE_DECODING(); \
5581 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5582 IEM_MC_LOCAL(uint32_t, u32Dst); \
5583 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5584 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5585 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5586 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5587 } IEM_MC_NATIVE_ELSE() { \
5588 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5589 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
5590 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5591 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5592 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 2); \
5593 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u32), fEFlagsIn, pu32Dst, u32Src); \
5594 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5595 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5596 } IEM_MC_NATIVE_ENDIF(); \
5597 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5598 IEM_MC_END(); \
5599 break; \
5600 \
5601 case IEMMODE_64BIT: \
5602 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5605 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5606 IEMOP_HLP_DONE_DECODING(); \
5607 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5608 IEM_MC_LOCAL(uint64_t, u64Dst); \
5609 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5610 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5611 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5612 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5613 } IEM_MC_NATIVE_ELSE() { \
5614 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5615 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
5616 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5617 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
5618 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 2); \
5619 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, RT_CONCAT3(iemAImpl_,a_InsNm,_u64), fEFlagsIn, pu64Dst, u64Src); \
5620 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5621 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
5622 } IEM_MC_NATIVE_ENDIF(); \
5623 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5624 IEM_MC_END(); \
5625 break; \
5626 \
5627 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5628 } \
5629 } \
5630 else \
5631 { \
5632 IEMOP_HLP_DONE_DECODING(); \
5633 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5634 } \
5635 } \
5636 (void)0
5637
5638/**
5639 * @opmaps grp1_83
5640 * @opcode /0
5641 * @opflclass arithmetic
5642 */
5643FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5644{
5645 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5646 IEMOP_BODY_BINARY_Ev_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5647}
5648
5649
5650/**
5651 * @opmaps grp1_83
5652 * @opcode /1
5653 * @opflclass logical
5654 */
5655FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5656{
5657 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5658 IEMOP_BODY_BINARY_Ev_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5659}
5660
5661
5662/**
5663 * @opmaps grp1_83
5664 * @opcode /2
5665 * @opflclass arithmetic_carry
5666 */
5667FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5668{
5669 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5670 IEMOP_BODY_BINARY_Ev_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5671}
5672
5673
5674/**
5675 * @opmaps grp1_83
5676 * @opcode /3
5677 * @opflclass arithmetic_carry
5678 */
5679FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5680{
5681 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5682 IEMOP_BODY_BINARY_Ev_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5683}
5684
5685
5686/**
5687 * @opmaps grp1_83
5688 * @opcode /4
5689 * @opflclass logical
5690 */
5691FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5692{
5693 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5694 IEMOP_BODY_BINARY_Ev_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5695}
5696
5697
5698/**
5699 * @opmaps grp1_83
5700 * @opcode /5
5701 * @opflclass arithmetic
5702 */
5703FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5704{
5705 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5706 IEMOP_BODY_BINARY_Ev_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5707}
5708
5709
5710/**
5711 * @opmaps grp1_83
5712 * @opcode /6
5713 * @opflclass logical
5714 */
5715FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5716{
5717 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5718 IEMOP_BODY_BINARY_Ev_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5719}
5720
5721
5722/**
5723 * @opmaps grp1_83
5724 * @opcode /7
5725 * @opflclass arithmetic
5726 */
5727FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5728{
5729 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5730 IEMOP_BODY_BINARY_Ev_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5731}
5732
5733
5734/**
5735 * @opcode 0x83
5736 */
5737FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5738{
5739 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5740 to the 386 even if absent in the intel reference manuals and some
5741 3rd party opcode listings. */
5742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5743 switch (IEM_GET_MODRM_REG_8(bRm))
5744 {
5745 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5746 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5747 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5748 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5749 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5750 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5751 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5752 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5754 }
5755}
5756
5757
5758/**
5759 * @opcode 0x84
5760 * @opflclass logical
5761 */
5762FNIEMOP_DEF(iemOp_test_Eb_Gb)
5763{
5764 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5766
5767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5768
5769 /*
5770 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5771 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5772 */
5773 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5774 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5775 {
5776 IEM_MC_BEGIN(0, 0);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_ARG(uint8_t, u8Src, 2);
5779 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5780 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5781 IEM_MC_LOCAL_EFLAGS(uEFlags);
5782 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5783 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5784 } IEM_MC_NATIVE_ELSE() {
5785 IEM_MC_ARG(uint8_t *, pu8Dst, 1);
5786 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5787 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5788 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u8, fEFlagsIn, pu8Dst, u8Src);
5789 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5790 } IEM_MC_NATIVE_ENDIF();
5791 IEM_MC_ADVANCE_RIP_AND_FINISH();
5792 IEM_MC_END();
5793 }
5794
5795 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5796}
5797
5798
5799/**
5800 * @opcode 0x85
5801 * @opflclass logical
5802 */
5803FNIEMOP_DEF(iemOp_test_Ev_Gv)
5804{
5805 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5806 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5807
5808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5809
5810 /*
5811 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5812 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5813 */
5814 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5815 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5816 {
5817 switch (pVCpu->iem.s.enmEffOpSize)
5818 {
5819 case IEMMODE_16BIT:
5820 IEM_MC_BEGIN(0, 0);
5821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5822 IEM_MC_ARG(uint16_t, u16Src, 2);
5823 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5824 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5825 IEM_MC_LOCAL_EFLAGS(uEFlags);
5826 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5827 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5828 } IEM_MC_NATIVE_ELSE() {
5829 IEM_MC_ARG(uint16_t *, pu16Dst, 1);
5830 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5831 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5832 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u16, fEFlagsIn, pu16Dst, u16Src);
5833 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5834 } IEM_MC_NATIVE_ENDIF();
5835 IEM_MC_ADVANCE_RIP_AND_FINISH();
5836 IEM_MC_END();
5837 break;
5838
5839 case IEMMODE_32BIT:
5840 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_ARG(uint32_t, u32Src, 2);
5843 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5844 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5845 IEM_MC_LOCAL_EFLAGS(uEFlags);
5846 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5847 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5848 } IEM_MC_NATIVE_ELSE() {
5849 IEM_MC_ARG(uint32_t *, pu32Dst, 1);
5850 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5851 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5852 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u32, fEFlagsIn, pu32Dst, u32Src);
5853 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5854 } IEM_MC_NATIVE_ENDIF();
5855 IEM_MC_ADVANCE_RIP_AND_FINISH();
5856 IEM_MC_END();
5857 break;
5858
5859 case IEMMODE_64BIT:
5860 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5862 IEM_MC_ARG(uint64_t, u64Src, 2);
5863 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5864 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5865 IEM_MC_LOCAL_EFLAGS(uEFlags);
5866 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5867 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5868 } IEM_MC_NATIVE_ELSE() {
5869 IEM_MC_ARG(uint64_t *, pu64Dst, 1);
5870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5871 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0);
5872 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_test_u64, fEFlagsIn, pu64Dst, u64Src);
5873 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);
5874 } IEM_MC_NATIVE_ENDIF();
5875 IEM_MC_ADVANCE_RIP_AND_FINISH();
5876 IEM_MC_END();
5877 break;
5878
5879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5880 }
5881 }
5882
5883 IEMOP_BODY_BINARY_rm_rv_RO(bRm, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5884}
5885
5886
5887/**
5888 * @opcode 0x86
5889 */
5890FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5891{
5892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5893 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5894
5895 /*
5896 * If rm is denoting a register, no more instruction bytes.
5897 */
5898 if (IEM_IS_MODRM_REG_MODE(bRm))
5899 {
5900 IEM_MC_BEGIN(0, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_LOCAL(uint8_t, uTmp1);
5903 IEM_MC_LOCAL(uint8_t, uTmp2);
5904
5905 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5906 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5907 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5908 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5909
5910 IEM_MC_ADVANCE_RIP_AND_FINISH();
5911 IEM_MC_END();
5912 }
5913 else
5914 {
5915 /*
5916 * We're accessing memory.
5917 */
5918#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5919 IEM_MC_BEGIN(0, 0); \
5920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5921 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5922 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5923 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5924 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5925 \
5926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5927 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5928 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5929 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5930 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5931 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5932 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5933 \
5934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5935 IEM_MC_END()
5936
5937 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5938 {
5939 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5940 }
5941 else
5942 {
5943 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5944 }
5945 }
5946}
5947
5948
5949/**
5950 * @opcode 0x87
5951 */
5952FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5953{
5954 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5956
5957 /*
5958 * If rm is denoting a register, no more instruction bytes.
5959 */
5960 if (IEM_IS_MODRM_REG_MODE(bRm))
5961 {
5962 switch (pVCpu->iem.s.enmEffOpSize)
5963 {
5964 case IEMMODE_16BIT:
5965 IEM_MC_BEGIN(0, 0);
5966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5967 IEM_MC_LOCAL(uint16_t, uTmp1);
5968 IEM_MC_LOCAL(uint16_t, uTmp2);
5969
5970 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5971 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5972 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5973 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5974
5975 IEM_MC_ADVANCE_RIP_AND_FINISH();
5976 IEM_MC_END();
5977 break;
5978
5979 case IEMMODE_32BIT:
5980 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 IEM_MC_LOCAL(uint32_t, uTmp1);
5983 IEM_MC_LOCAL(uint32_t, uTmp2);
5984
5985 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5986 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5987 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5988 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5989
5990 IEM_MC_ADVANCE_RIP_AND_FINISH();
5991 IEM_MC_END();
5992 break;
5993
5994 case IEMMODE_64BIT:
5995 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997 IEM_MC_LOCAL(uint64_t, uTmp1);
5998 IEM_MC_LOCAL(uint64_t, uTmp2);
5999
6000 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
6001 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
6002 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
6003 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
6004
6005 IEM_MC_ADVANCE_RIP_AND_FINISH();
6006 IEM_MC_END();
6007 break;
6008
6009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6010 }
6011 }
6012 else
6013 {
6014 /*
6015 * We're accessing memory.
6016 */
6017#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
6018 do { \
6019 switch (pVCpu->iem.s.enmEffOpSize) \
6020 { \
6021 case IEMMODE_16BIT: \
6022 IEM_MC_BEGIN(0, 0); \
6023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6024 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6025 IEM_MC_LOCAL(uint16_t, uTmpReg); \
6026 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
6027 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
6028 \
6029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6030 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
6031 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6032 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6033 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
6034 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6035 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6036 \
6037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6038 IEM_MC_END(); \
6039 break; \
6040 \
6041 case IEMMODE_32BIT: \
6042 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
6043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6044 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6045 IEM_MC_LOCAL(uint32_t, uTmpReg); \
6046 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
6047 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
6048 \
6049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6050 IEMOP_HLP_DONE_DECODING(); \
6051 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6052 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6053 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
6054 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6055 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6056 \
6057 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6058 IEM_MC_END(); \
6059 break; \
6060 \
6061 case IEMMODE_64BIT: \
6062 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
6063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6064 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
6065 IEM_MC_LOCAL(uint64_t, uTmpReg); \
6066 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
6067 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
6068 \
6069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6070 IEMOP_HLP_DONE_DECODING(); \
6071 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6072 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6073 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
6074 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
6075 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
6076 \
6077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6078 IEM_MC_END(); \
6079 break; \
6080 \
6081 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6082 } \
6083 } while (0)
6084 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
6085 {
6086 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
6087 }
6088 else
6089 {
6090 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
6091 }
6092 }
6093}
6094
6095
6096/**
6097 * @opcode 0x88
6098 */
6099FNIEMOP_DEF(iemOp_mov_Eb_Gb)
6100{
6101 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
6102
6103 uint8_t bRm;
6104 IEM_OPCODE_GET_NEXT_U8(&bRm);
6105
6106 /*
6107 * If rm is denoting a register, no more instruction bytes.
6108 */
6109 if (IEM_IS_MODRM_REG_MODE(bRm))
6110 {
6111 IEM_MC_BEGIN(0, 0);
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6113 IEM_MC_LOCAL(uint8_t, u8Value);
6114 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6115 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
6116 IEM_MC_ADVANCE_RIP_AND_FINISH();
6117 IEM_MC_END();
6118 }
6119 else
6120 {
6121 /*
6122 * We're writing a register to memory.
6123 */
6124 IEM_MC_BEGIN(0, 0);
6125 IEM_MC_LOCAL(uint8_t, u8Value);
6126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6129 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6130 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
6131 IEM_MC_ADVANCE_RIP_AND_FINISH();
6132 IEM_MC_END();
6133 }
6134}
6135
6136
6137/**
6138 * @opcode 0x89
6139 */
6140FNIEMOP_DEF(iemOp_mov_Ev_Gv)
6141{
6142 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
6143
6144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6145
6146 /*
6147 * If rm is denoting a register, no more instruction bytes.
6148 */
6149 if (IEM_IS_MODRM_REG_MODE(bRm))
6150 {
6151 switch (pVCpu->iem.s.enmEffOpSize)
6152 {
6153 case IEMMODE_16BIT:
6154 IEM_MC_BEGIN(0, 0);
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 IEM_MC_LOCAL(uint16_t, u16Value);
6157 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6158 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6159 IEM_MC_ADVANCE_RIP_AND_FINISH();
6160 IEM_MC_END();
6161 break;
6162
6163 case IEMMODE_32BIT:
6164 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 IEM_MC_LOCAL(uint32_t, u32Value);
6167 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6168 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6169 IEM_MC_ADVANCE_RIP_AND_FINISH();
6170 IEM_MC_END();
6171 break;
6172
6173 case IEMMODE_64BIT:
6174 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6176 IEM_MC_LOCAL(uint64_t, u64Value);
6177 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6178 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6179 IEM_MC_ADVANCE_RIP_AND_FINISH();
6180 IEM_MC_END();
6181 break;
6182
6183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6184 }
6185 }
6186 else
6187 {
6188 /*
6189 * We're writing a register to memory.
6190 */
6191 switch (pVCpu->iem.s.enmEffOpSize)
6192 {
6193 case IEMMODE_16BIT:
6194 IEM_MC_BEGIN(0, 0);
6195 IEM_MC_LOCAL(uint16_t, u16Value);
6196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6199 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6200 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6201 IEM_MC_ADVANCE_RIP_AND_FINISH();
6202 IEM_MC_END();
6203 break;
6204
6205 case IEMMODE_32BIT:
6206 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6207 IEM_MC_LOCAL(uint32_t, u32Value);
6208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6212 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6213 IEM_MC_ADVANCE_RIP_AND_FINISH();
6214 IEM_MC_END();
6215 break;
6216
6217 case IEMMODE_64BIT:
6218 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6219 IEM_MC_LOCAL(uint64_t, u64Value);
6220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6223 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6224 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 break;
6228
6229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6230 }
6231 }
6232}
6233
6234
6235/**
6236 * @opcode 0x8a
6237 */
6238FNIEMOP_DEF(iemOp_mov_Gb_Eb)
6239{
6240 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
6241
6242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6243
6244 /*
6245 * If rm is denoting a register, no more instruction bytes.
6246 */
6247 if (IEM_IS_MODRM_REG_MODE(bRm))
6248 {
6249 IEM_MC_BEGIN(0, 0);
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251 IEM_MC_LOCAL(uint8_t, u8Value);
6252 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6253 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6254 IEM_MC_ADVANCE_RIP_AND_FINISH();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /*
6260 * We're loading a register from memory.
6261 */
6262 IEM_MC_BEGIN(0, 0);
6263 IEM_MC_LOCAL(uint8_t, u8Value);
6264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6267 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6268 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6269 IEM_MC_ADVANCE_RIP_AND_FINISH();
6270 IEM_MC_END();
6271 }
6272}
6273
6274
6275/**
6276 * @opcode 0x8b
6277 */
6278FNIEMOP_DEF(iemOp_mov_Gv_Ev)
6279{
6280 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
6281
6282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6283
6284 /*
6285 * If rm is denoting a register, no more instruction bytes.
6286 */
6287 if (IEM_IS_MODRM_REG_MODE(bRm))
6288 {
6289 switch (pVCpu->iem.s.enmEffOpSize)
6290 {
6291 case IEMMODE_16BIT:
6292 IEM_MC_BEGIN(0, 0);
6293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6294 IEM_MC_LOCAL(uint16_t, u16Value);
6295 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6296 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6297 IEM_MC_ADVANCE_RIP_AND_FINISH();
6298 IEM_MC_END();
6299 break;
6300
6301 case IEMMODE_32BIT:
6302 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6304 IEM_MC_LOCAL(uint32_t, u32Value);
6305 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6306 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6307 IEM_MC_ADVANCE_RIP_AND_FINISH();
6308 IEM_MC_END();
6309 break;
6310
6311 case IEMMODE_64BIT:
6312 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_LOCAL(uint64_t, u64Value);
6315 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6316 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6317 IEM_MC_ADVANCE_RIP_AND_FINISH();
6318 IEM_MC_END();
6319 break;
6320
6321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6322 }
6323 }
6324 else
6325 {
6326 /*
6327 * We're loading a register from memory.
6328 */
6329 switch (pVCpu->iem.s.enmEffOpSize)
6330 {
6331 case IEMMODE_16BIT:
6332 IEM_MC_BEGIN(0, 0);
6333 IEM_MC_LOCAL(uint16_t, u16Value);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6339 IEM_MC_ADVANCE_RIP_AND_FINISH();
6340 IEM_MC_END();
6341 break;
6342
6343 case IEMMODE_32BIT:
6344 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6345 IEM_MC_LOCAL(uint32_t, u32Value);
6346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6350 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6351 IEM_MC_ADVANCE_RIP_AND_FINISH();
6352 IEM_MC_END();
6353 break;
6354
6355 case IEMMODE_64BIT:
6356 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6357 IEM_MC_LOCAL(uint64_t, u64Value);
6358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6361 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6362 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6363 IEM_MC_ADVANCE_RIP_AND_FINISH();
6364 IEM_MC_END();
6365 break;
6366
6367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6368 }
6369 }
6370}
6371
6372
6373/**
6374 * opcode 0x63
6375 * @todo Table fixme
6376 */
6377FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
6378{
6379 if (!IEM_IS_64BIT_CODE(pVCpu))
6380 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
6381 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6382 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
6383 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
6384}
6385
6386
6387/**
6388 * @opcode 0x8c
6389 */
6390FNIEMOP_DEF(iemOp_mov_Ev_Sw)
6391{
6392 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
6393
6394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6395
6396 /*
6397 * Check that the destination register exists. The REX.R prefix is ignored.
6398 */
6399 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6400 if (iSegReg > X86_SREG_GS)
6401 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6402
6403 /*
6404 * If rm is denoting a register, no more instruction bytes.
6405 * In that case, the operand size is respected and the upper bits are
6406 * cleared (starting with some pentium).
6407 */
6408 if (IEM_IS_MODRM_REG_MODE(bRm))
6409 {
6410 switch (pVCpu->iem.s.enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 IEM_MC_BEGIN(0, 0);
6414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6415 IEM_MC_LOCAL(uint16_t, u16Value);
6416 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6417 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6418 IEM_MC_ADVANCE_RIP_AND_FINISH();
6419 IEM_MC_END();
6420 break;
6421
6422 case IEMMODE_32BIT:
6423 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_LOCAL(uint32_t, u32Value);
6426 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6427 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430 break;
6431
6432 case IEMMODE_64BIT:
6433 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6435 IEM_MC_LOCAL(uint64_t, u64Value);
6436 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6437 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6438 IEM_MC_ADVANCE_RIP_AND_FINISH();
6439 IEM_MC_END();
6440 break;
6441
6442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6443 }
6444 }
6445 else
6446 {
6447 /*
6448 * We're saving the register to memory. The access is word sized
6449 * regardless of operand size prefixes.
6450 */
6451#if 0 /* not necessary */
6452 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6453#endif
6454 IEM_MC_BEGIN(0, 0);
6455 IEM_MC_LOCAL(uint16_t, u16Value);
6456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6459 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6460 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6461 IEM_MC_ADVANCE_RIP_AND_FINISH();
6462 IEM_MC_END();
6463 }
6464}
6465
6466
6467
6468
6469/**
6470 * @opcode 0x8d
6471 */
6472FNIEMOP_DEF(iemOp_lea_Gv_M)
6473{
6474 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6476 if (IEM_IS_MODRM_REG_MODE(bRm))
6477 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6478
6479 switch (pVCpu->iem.s.enmEffOpSize)
6480 {
6481 case IEMMODE_16BIT:
6482 IEM_MC_BEGIN(0, 0);
6483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6487 * operand-size, which is usually the case. It'll save an instruction
6488 * and a register. */
6489 IEM_MC_LOCAL(uint16_t, u16Cast);
6490 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6492 IEM_MC_ADVANCE_RIP_AND_FINISH();
6493 IEM_MC_END();
6494 break;
6495
6496 case IEMMODE_32BIT:
6497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6501 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6502 * operand-size, which is usually the case. It'll save an instruction
6503 * and a register. */
6504 IEM_MC_LOCAL(uint32_t, u32Cast);
6505 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6507 IEM_MC_ADVANCE_RIP_AND_FINISH();
6508 IEM_MC_END();
6509 break;
6510
6511 case IEMMODE_64BIT:
6512 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6517 IEM_MC_ADVANCE_RIP_AND_FINISH();
6518 IEM_MC_END();
6519 break;
6520
6521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6522 }
6523}
6524
6525
6526/**
6527 * @opcode 0x8e
6528 */
6529FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6530{
6531 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6532
6533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6534
6535 /*
6536 * The practical operand size is 16-bit.
6537 */
6538#if 0 /* not necessary */
6539 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6540#endif
6541
6542 /*
6543 * Check that the destination register exists and can be used with this
6544 * instruction. The REX.R prefix is ignored.
6545 */
6546 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6547 /** @todo r=bird: What does 8086 do here wrt CS? */
6548 if ( iSegReg == X86_SREG_CS
6549 || iSegReg > X86_SREG_GS)
6550 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6551
6552 /*
6553 * If rm is denoting a register, no more instruction bytes.
6554 *
6555 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6556 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6557 * register. This is a restriction of the current recompiler
6558 * approach.
6559 */
6560 if (IEM_IS_MODRM_REG_MODE(bRm))
6561 {
6562#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6563 IEM_MC_BEGIN(0, a_fCImplFlags); \
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6565 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6566 IEM_MC_ARG(uint16_t, u16Value, 1); \
6567 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6568 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6569 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6570 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6571 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6572 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6573 iemCImpl_load_SReg, iSRegArg, u16Value); \
6574 IEM_MC_END()
6575
6576 if (iSegReg == X86_SREG_SS)
6577 {
6578 if (IEM_IS_32BIT_CODE(pVCpu))
6579 {
6580 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6581 }
6582 else
6583 {
6584 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6585 }
6586 }
6587 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6588 {
6589 IEMOP_MOV_SW_EV_REG_BODY(0);
6590 }
6591 else
6592 {
6593 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6594 }
6595#undef IEMOP_MOV_SW_EV_REG_BODY
6596 }
6597 else
6598 {
6599 /*
6600 * We're loading the register from memory. The access is word sized
6601 * regardless of operand size prefixes.
6602 */
6603#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6604 IEM_MC_BEGIN(0, a_fCImplFlags); \
6605 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6606 IEM_MC_ARG(uint16_t, u16Value, 1); \
6607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6610 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6611 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6612 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6613 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6614 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6615 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6616 iemCImpl_load_SReg, iSRegArg, u16Value); \
6617 IEM_MC_END()
6618
6619 if (iSegReg == X86_SREG_SS)
6620 {
6621 if (IEM_IS_32BIT_CODE(pVCpu))
6622 {
6623 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6624 }
6625 else
6626 {
6627 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6628 }
6629 }
6630 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6631 {
6632 IEMOP_MOV_SW_EV_MEM_BODY(0);
6633 }
6634 else
6635 {
6636 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6637 }
6638#undef IEMOP_MOV_SW_EV_MEM_BODY
6639 }
6640}
6641
6642
6643/** Opcode 0x8f /0. */
6644FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6645{
6646 /* This bugger is rather annoying as it requires rSP to be updated before
6647 doing the effective address calculations. Will eventually require a
6648 split between the R/M+SIB decoding and the effective address
6649 calculation - which is something that is required for any attempt at
6650 reusing this code for a recompiler. It may also be good to have if we
6651 need to delay #UD exception caused by invalid lock prefixes.
6652
6653 For now, we'll do a mostly safe interpreter-only implementation here. */
6654 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6655 * now until tests show it's checked.. */
6656 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6657
6658 /* Register access is relatively easy and can share code. */
6659 if (IEM_IS_MODRM_REG_MODE(bRm))
6660 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6661
6662 /*
6663 * Memory target.
6664 *
6665 * Intel says that RSP is incremented before it's used in any effective
6666 * address calcuations. This means some serious extra annoyance here since
6667 * we decode and calculate the effective address in one step and like to
6668 * delay committing registers till everything is done.
6669 *
6670 * So, we'll decode and calculate the effective address twice. This will
6671 * require some recoding if turned into a recompiler.
6672 */
6673 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6674
6675#if 1 /* This can be compiled, optimize later if needed. */
6676 switch (pVCpu->iem.s.enmEffOpSize)
6677 {
6678 case IEMMODE_16BIT:
6679 IEM_MC_BEGIN(0, 0);
6680 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6683 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6684 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6685 IEM_MC_END();
6686 break;
6687
6688 case IEMMODE_32BIT:
6689 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6690 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6693 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6694 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6695 IEM_MC_END();
6696 break;
6697
6698 case IEMMODE_64BIT:
6699 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6700 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6703 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6704 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6705 IEM_MC_END();
6706 break;
6707
6708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6709 }
6710
6711#else
6712# ifndef TST_IEM_CHECK_MC
6713 /* Calc effective address with modified ESP. */
6714/** @todo testcase */
6715 RTGCPTR GCPtrEff;
6716 VBOXSTRICTRC rcStrict;
6717 switch (pVCpu->iem.s.enmEffOpSize)
6718 {
6719 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6720 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6721 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6723 }
6724 if (rcStrict != VINF_SUCCESS)
6725 return rcStrict;
6726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6727
6728 /* Perform the operation - this should be CImpl. */
6729 RTUINT64U TmpRsp;
6730 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6731 switch (pVCpu->iem.s.enmEffOpSize)
6732 {
6733 case IEMMODE_16BIT:
6734 {
6735 uint16_t u16Value;
6736 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6737 if (rcStrict == VINF_SUCCESS)
6738 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6739 break;
6740 }
6741
6742 case IEMMODE_32BIT:
6743 {
6744 uint32_t u32Value;
6745 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6746 if (rcStrict == VINF_SUCCESS)
6747 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6748 break;
6749 }
6750
6751 case IEMMODE_64BIT:
6752 {
6753 uint64_t u64Value;
6754 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6755 if (rcStrict == VINF_SUCCESS)
6756 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6757 break;
6758 }
6759
6760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6761 }
6762 if (rcStrict == VINF_SUCCESS)
6763 {
6764 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6765 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6766 }
6767 return rcStrict;
6768
6769# else
6770 return VERR_IEM_IPE_2;
6771# endif
6772#endif
6773}
6774
6775
6776/**
6777 * @opcode 0x8f
6778 */
6779FNIEMOP_DEF(iemOp_Grp1A__xop)
6780{
6781 /*
6782 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6783 * three byte VEX prefix, except that the mmmmm field cannot have the values
6784 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6785 */
6786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6787 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6788 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6789
6790 IEMOP_MNEMONIC(xop, "xop");
6791 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6792 {
6793 /** @todo Test when exctly the XOP conformance checks kick in during
6794 * instruction decoding and fetching (using \#PF). */
6795 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6796 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6797 if ( ( pVCpu->iem.s.fPrefixes
6798 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6799 == 0)
6800 {
6801 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6802 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6804 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6805 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6806 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6807 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6808 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6809 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6810
6811 /** @todo XOP: Just use new tables and decoders. */
6812 switch (bRm & 0x1f)
6813 {
6814 case 8: /* xop opcode map 8. */
6815 IEMOP_BITCH_ABOUT_STUB();
6816 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6817
6818 case 9: /* xop opcode map 9. */
6819 IEMOP_BITCH_ABOUT_STUB();
6820 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6821
6822 case 10: /* xop opcode map 10. */
6823 IEMOP_BITCH_ABOUT_STUB();
6824 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6825
6826 default:
6827 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6828 IEMOP_RAISE_INVALID_OPCODE_RET();
6829 }
6830 }
6831 else
6832 Log(("XOP: Invalid prefix mix!\n"));
6833 }
6834 else
6835 Log(("XOP: XOP support disabled!\n"));
6836 IEMOP_RAISE_INVALID_OPCODE_RET();
6837}
6838
6839
6840/**
6841 * Common 'xchg reg,rAX' helper.
6842 */
6843FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6844{
6845 iReg |= pVCpu->iem.s.uRexB;
6846 switch (pVCpu->iem.s.enmEffOpSize)
6847 {
6848 case IEMMODE_16BIT:
6849 IEM_MC_BEGIN(0, 0);
6850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6851 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6852 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6853 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6854 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6855 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6856 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6857 IEM_MC_ADVANCE_RIP_AND_FINISH();
6858 IEM_MC_END();
6859 break;
6860
6861 case IEMMODE_32BIT:
6862 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6865 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6866 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6867 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6868 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6869 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6870 IEM_MC_ADVANCE_RIP_AND_FINISH();
6871 IEM_MC_END();
6872 break;
6873
6874 case IEMMODE_64BIT:
6875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6877 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6878 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6879 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6880 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6881 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6882 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6883 IEM_MC_ADVANCE_RIP_AND_FINISH();
6884 IEM_MC_END();
6885 break;
6886
6887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6888 }
6889}
6890
6891
6892/**
6893 * @opcode 0x90
6894 */
6895FNIEMOP_DEF(iemOp_nop)
6896{
6897 /* R8/R8D and RAX/EAX can be exchanged. */
6898 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6899 {
6900 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6901 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6902 }
6903
6904 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6905 {
6906 IEMOP_MNEMONIC(pause, "pause");
6907 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6908 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6909 if (!IEM_IS_IN_GUEST(pVCpu))
6910 { /* probable */ }
6911#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6912 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6914#endif
6915#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6916 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6917 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6918#endif
6919 }
6920 else
6921 IEMOP_MNEMONIC(nop, "nop");
6922 /** @todo testcase: lock nop; lock pause */
6923 IEM_MC_BEGIN(0, 0);
6924 IEMOP_HLP_DONE_DECODING();
6925 IEM_MC_ADVANCE_RIP_AND_FINISH();
6926 IEM_MC_END();
6927}
6928
6929
6930/**
6931 * @opcode 0x91
6932 */
6933FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6934{
6935 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6936 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6937}
6938
6939
6940/**
6941 * @opcode 0x92
6942 */
6943FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6944{
6945 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6946 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6947}
6948
6949
6950/**
6951 * @opcode 0x93
6952 */
6953FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6954{
6955 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6956 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6957}
6958
6959
6960/**
6961 * @opcode 0x94
6962 */
6963FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6964{
6965 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6966 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6967}
6968
6969
6970/**
6971 * @opcode 0x95
6972 */
6973FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6974{
6975 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6976 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6977}
6978
6979
6980/**
6981 * @opcode 0x96
6982 */
6983FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6984{
6985 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6986 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6987}
6988
6989
6990/**
6991 * @opcode 0x97
6992 */
6993FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6994{
6995 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6996 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6997}
6998
6999
7000/**
7001 * @opcode 0x98
7002 */
7003FNIEMOP_DEF(iemOp_cbw)
7004{
7005 switch (pVCpu->iem.s.enmEffOpSize)
7006 {
7007 case IEMMODE_16BIT:
7008 IEMOP_MNEMONIC(cbw, "cbw");
7009 IEM_MC_BEGIN(0, 0);
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
7012 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
7013 } IEM_MC_ELSE() {
7014 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
7015 } IEM_MC_ENDIF();
7016 IEM_MC_ADVANCE_RIP_AND_FINISH();
7017 IEM_MC_END();
7018 break;
7019
7020 case IEMMODE_32BIT:
7021 IEMOP_MNEMONIC(cwde, "cwde");
7022 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7024 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
7025 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
7026 } IEM_MC_ELSE() {
7027 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
7028 } IEM_MC_ENDIF();
7029 IEM_MC_ADVANCE_RIP_AND_FINISH();
7030 IEM_MC_END();
7031 break;
7032
7033 case IEMMODE_64BIT:
7034 IEMOP_MNEMONIC(cdqe, "cdqe");
7035 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7037 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
7038 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
7039 } IEM_MC_ELSE() {
7040 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
7041 } IEM_MC_ENDIF();
7042 IEM_MC_ADVANCE_RIP_AND_FINISH();
7043 IEM_MC_END();
7044 break;
7045
7046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7047 }
7048}
7049
7050
7051/**
7052 * @opcode 0x99
7053 */
7054FNIEMOP_DEF(iemOp_cwd)
7055{
7056 switch (pVCpu->iem.s.enmEffOpSize)
7057 {
7058 case IEMMODE_16BIT:
7059 IEMOP_MNEMONIC(cwd, "cwd");
7060 IEM_MC_BEGIN(0, 0);
7061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7062 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
7063 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
7064 } IEM_MC_ELSE() {
7065 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
7066 } IEM_MC_ENDIF();
7067 IEM_MC_ADVANCE_RIP_AND_FINISH();
7068 IEM_MC_END();
7069 break;
7070
7071 case IEMMODE_32BIT:
7072 IEMOP_MNEMONIC(cdq, "cdq");
7073 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7075 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
7076 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
7077 } IEM_MC_ELSE() {
7078 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
7079 } IEM_MC_ENDIF();
7080 IEM_MC_ADVANCE_RIP_AND_FINISH();
7081 IEM_MC_END();
7082 break;
7083
7084 case IEMMODE_64BIT:
7085 IEMOP_MNEMONIC(cqo, "cqo");
7086 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7088 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
7089 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
7090 } IEM_MC_ELSE() {
7091 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
7092 } IEM_MC_ENDIF();
7093 IEM_MC_ADVANCE_RIP_AND_FINISH();
7094 IEM_MC_END();
7095 break;
7096
7097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7098 }
7099}
7100
7101
7102/**
7103 * @opcode 0x9a
7104 */
7105FNIEMOP_DEF(iemOp_call_Ap)
7106{
7107 IEMOP_MNEMONIC(call_Ap, "call Ap");
7108 IEMOP_HLP_NO_64BIT();
7109
7110 /* Decode the far pointer address and pass it on to the far call C implementation. */
7111 uint32_t off32Seg;
7112 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
7113 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
7114 else
7115 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
7116 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
7119 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
7120 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
7121 /** @todo make task-switches, ring-switches, ++ return non-zero status */
7122}
7123
7124
7125/** Opcode 0x9b. (aka fwait) */
7126FNIEMOP_DEF(iemOp_wait)
7127{
7128 IEMOP_MNEMONIC(wait, "wait");
7129 IEM_MC_BEGIN(0, 0);
7130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7131 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
7132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7133 IEM_MC_ADVANCE_RIP_AND_FINISH();
7134 IEM_MC_END();
7135}
7136
7137
7138/**
7139 * @opcode 0x9c
7140 */
7141FNIEMOP_DEF(iemOp_pushf_Fv)
7142{
7143 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
7144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7146 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
7147 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
7148}
7149
7150
7151/**
7152 * @opcode 0x9d
7153 */
7154FNIEMOP_DEF(iemOp_popf_Fv)
7155{
7156 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
7157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7158 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7159 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
7160 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
7161 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
7162}
7163
7164
7165/**
7166 * @opcode 0x9e
7167 * @opflmodify cf,pf,af,zf,sf
7168 */
7169FNIEMOP_DEF(iemOp_sahf)
7170{
7171 IEMOP_MNEMONIC(sahf, "sahf");
7172 if ( IEM_IS_64BIT_CODE(pVCpu)
7173 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7174 IEMOP_RAISE_INVALID_OPCODE_RET();
7175 IEM_MC_BEGIN(0, 0);
7176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7177 IEM_MC_LOCAL(uint32_t, u32Flags);
7178 IEM_MC_LOCAL(uint32_t, EFlags);
7179 IEM_MC_FETCH_EFLAGS(EFlags);
7180 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
7181 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7182 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
7183 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
7184 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
7185 IEM_MC_COMMIT_EFLAGS(EFlags);
7186 IEM_MC_ADVANCE_RIP_AND_FINISH();
7187 IEM_MC_END();
7188}
7189
7190
7191/**
7192 * @opcode 0x9f
7193 * @opfltest cf,pf,af,zf,sf
7194 */
7195FNIEMOP_DEF(iemOp_lahf)
7196{
7197 IEMOP_MNEMONIC(lahf, "lahf");
7198 if ( IEM_IS_64BIT_CODE(pVCpu)
7199 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7200 IEMOP_RAISE_INVALID_OPCODE_RET();
7201 IEM_MC_BEGIN(0, 0);
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7203 IEM_MC_LOCAL(uint8_t, u8Flags);
7204 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
7205 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
7206 IEM_MC_ADVANCE_RIP_AND_FINISH();
7207 IEM_MC_END();
7208}
7209
7210
7211/**
7212 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
7213 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
7214 * Will return/throw on failures.
7215 * @param a_GCPtrMemOff The variable to store the offset in.
7216 */
7217#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
7218 do \
7219 { \
7220 switch (pVCpu->iem.s.enmEffAddrMode) \
7221 { \
7222 case IEMMODE_16BIT: \
7223 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
7224 break; \
7225 case IEMMODE_32BIT: \
7226 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
7227 break; \
7228 case IEMMODE_64BIT: \
7229 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
7230 break; \
7231 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
7232 } \
7233 } while (0)
7234
7235/**
7236 * @opcode 0xa0
7237 */
7238FNIEMOP_DEF(iemOp_mov_AL_Ob)
7239{
7240 /*
7241 * Get the offset.
7242 */
7243 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
7244 RTGCPTR GCPtrMemOffDecode;
7245 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7246
7247 /*
7248 * Fetch AL.
7249 */
7250 IEM_MC_BEGIN(0, 0);
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252 IEM_MC_LOCAL(uint8_t, u8Tmp);
7253 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7254 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7255 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7256 IEM_MC_ADVANCE_RIP_AND_FINISH();
7257 IEM_MC_END();
7258}
7259
7260
7261/**
7262 * @opcode 0xa1
7263 */
7264FNIEMOP_DEF(iemOp_mov_rAX_Ov)
7265{
7266 /*
7267 * Get the offset.
7268 */
7269 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
7270 RTGCPTR GCPtrMemOffDecode;
7271 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7272
7273 /*
7274 * Fetch rAX.
7275 */
7276 switch (pVCpu->iem.s.enmEffOpSize)
7277 {
7278 case IEMMODE_16BIT:
7279 IEM_MC_BEGIN(0, 0);
7280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7281 IEM_MC_LOCAL(uint16_t, u16Tmp);
7282 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7283 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7284 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
7285 IEM_MC_ADVANCE_RIP_AND_FINISH();
7286 IEM_MC_END();
7287 break;
7288
7289 case IEMMODE_32BIT:
7290 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7292 IEM_MC_LOCAL(uint32_t, u32Tmp);
7293 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7294 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7295 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
7296 IEM_MC_ADVANCE_RIP_AND_FINISH();
7297 IEM_MC_END();
7298 break;
7299
7300 case IEMMODE_64BIT:
7301 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303 IEM_MC_LOCAL(uint64_t, u64Tmp);
7304 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7305 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7306 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
7307 IEM_MC_ADVANCE_RIP_AND_FINISH();
7308 IEM_MC_END();
7309 break;
7310
7311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7312 }
7313}
7314
7315
7316/**
7317 * @opcode 0xa2
7318 */
7319FNIEMOP_DEF(iemOp_mov_Ob_AL)
7320{
7321 /*
7322 * Get the offset.
7323 */
7324 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
7325 RTGCPTR GCPtrMemOffDecode;
7326 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7327
7328 /*
7329 * Store AL.
7330 */
7331 IEM_MC_BEGIN(0, 0);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 IEM_MC_LOCAL(uint8_t, u8Tmp);
7334 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
7335 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7336 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
7337 IEM_MC_ADVANCE_RIP_AND_FINISH();
7338 IEM_MC_END();
7339}
7340
7341
7342/**
7343 * @opcode 0xa3
7344 */
7345FNIEMOP_DEF(iemOp_mov_Ov_rAX)
7346{
7347 /*
7348 * Get the offset.
7349 */
7350 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
7351 RTGCPTR GCPtrMemOffDecode;
7352 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7353
7354 /*
7355 * Store rAX.
7356 */
7357 switch (pVCpu->iem.s.enmEffOpSize)
7358 {
7359 case IEMMODE_16BIT:
7360 IEM_MC_BEGIN(0, 0);
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362 IEM_MC_LOCAL(uint16_t, u16Tmp);
7363 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
7364 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7365 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
7366 IEM_MC_ADVANCE_RIP_AND_FINISH();
7367 IEM_MC_END();
7368 break;
7369
7370 case IEMMODE_32BIT:
7371 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373 IEM_MC_LOCAL(uint32_t, u32Tmp);
7374 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
7375 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7376 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
7377 IEM_MC_ADVANCE_RIP_AND_FINISH();
7378 IEM_MC_END();
7379 break;
7380
7381 case IEMMODE_64BIT:
7382 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7384 IEM_MC_LOCAL(uint64_t, u64Tmp);
7385 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
7386 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7387 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
7388 IEM_MC_ADVANCE_RIP_AND_FINISH();
7389 IEM_MC_END();
7390 break;
7391
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394}
7395
7396/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7397#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7398 IEM_MC_BEGIN(a_fMcFlags, 0); \
7399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7400 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7401 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7402 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7403 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7404 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7405 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7406 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7407 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7408 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7409 } IEM_MC_ELSE() { \
7410 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7411 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7412 } IEM_MC_ENDIF(); \
7413 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7414 IEM_MC_END() \
7415
7416/**
7417 * @opcode 0xa4
7418 * @opfltest df
7419 */
7420FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7421{
7422 /*
7423 * Use the C implementation if a repeat prefix is encountered.
7424 */
7425 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7426 {
7427 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7429 switch (pVCpu->iem.s.enmEffAddrMode)
7430 {
7431 case IEMMODE_16BIT:
7432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7436 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7437 case IEMMODE_32BIT:
7438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7440 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7441 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7442 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7443 case IEMMODE_64BIT:
7444 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7445 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7446 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7448 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7450 }
7451 }
7452
7453 /*
7454 * Sharing case implementation with movs[wdq] below.
7455 */
7456 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7457 switch (pVCpu->iem.s.enmEffAddrMode)
7458 {
7459 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7460 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7461 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7463 }
7464}
7465
7466
7467/**
7468 * @opcode 0xa5
7469 * @opfltest df
7470 */
7471FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7472{
7473
7474 /*
7475 * Use the C implementation if a repeat prefix is encountered.
7476 */
7477 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7478 {
7479 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481 switch (pVCpu->iem.s.enmEffOpSize)
7482 {
7483 case IEMMODE_16BIT:
7484 switch (pVCpu->iem.s.enmEffAddrMode)
7485 {
7486 case IEMMODE_16BIT:
7487 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7488 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7489 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7491 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7492 case IEMMODE_32BIT:
7493 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7494 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7495 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7497 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7498 case IEMMODE_64BIT:
7499 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7503 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7505 }
7506 break;
7507 case IEMMODE_32BIT:
7508 switch (pVCpu->iem.s.enmEffAddrMode)
7509 {
7510 case IEMMODE_16BIT:
7511 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7515 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7516 case IEMMODE_32BIT:
7517 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7519 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7521 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7522 case IEMMODE_64BIT:
7523 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7524 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7525 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7526 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7527 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7529 }
7530 case IEMMODE_64BIT:
7531 switch (pVCpu->iem.s.enmEffAddrMode)
7532 {
7533 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7534 case IEMMODE_32BIT:
7535 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7539 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7540 case IEMMODE_64BIT:
7541 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7542 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7545 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7547 }
7548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7549 }
7550 }
7551
7552 /*
7553 * Annoying double switch here.
7554 * Using ugly macro for implementing the cases, sharing it with movsb.
7555 */
7556 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7557 switch (pVCpu->iem.s.enmEffOpSize)
7558 {
7559 case IEMMODE_16BIT:
7560 switch (pVCpu->iem.s.enmEffAddrMode)
7561 {
7562 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7563 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7564 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7566 }
7567 break;
7568
7569 case IEMMODE_32BIT:
7570 switch (pVCpu->iem.s.enmEffAddrMode)
7571 {
7572 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7573 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7574 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7576 }
7577 break;
7578
7579 case IEMMODE_64BIT:
7580 switch (pVCpu->iem.s.enmEffAddrMode)
7581 {
7582 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7583 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7584 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7586 }
7587 break;
7588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7589 }
7590}
7591
7592#undef IEM_MOVS_CASE
7593
7594/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7595#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7596 IEM_MC_BEGIN(a_fMcFlags, 0); \
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7598 \
7599 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7600 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7601 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7602 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7603 \
7604 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7605 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7606 IEM_MC_ARG(uint##ValBits##_t, uValue2, 2); \
7607 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7608 \
7609 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 1); \
7610 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
7611 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_cmp_u##ValBits, fEFlagsIn, puValue1, uValue2); \
7612 \
7613 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
7614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7615 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7616 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7617 } IEM_MC_ELSE() { \
7618 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7619 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7620 } IEM_MC_ENDIF(); \
7621 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7622 IEM_MC_END() \
7623
7624/**
7625 * @opcode 0xa6
7626 * @opflclass arithmetic
7627 * @opfltest df
7628 */
7629FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7630{
7631
7632 /*
7633 * Use the C implementation if a repeat prefix is encountered.
7634 */
7635 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7636 {
7637 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639 switch (pVCpu->iem.s.enmEffAddrMode)
7640 {
7641 case IEMMODE_16BIT:
7642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7643 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7644 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7646 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7647 case IEMMODE_32BIT:
7648 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7652 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7653 case IEMMODE_64BIT:
7654 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7657 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7658 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7660 }
7661 }
7662 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7663 {
7664 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7666 switch (pVCpu->iem.s.enmEffAddrMode)
7667 {
7668 case IEMMODE_16BIT:
7669 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7670 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7671 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7672 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7673 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7674 case IEMMODE_32BIT:
7675 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7676 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7677 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7679 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7680 case IEMMODE_64BIT:
7681 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7682 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7683 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7685 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7687 }
7688 }
7689
7690 /*
7691 * Sharing case implementation with cmps[wdq] below.
7692 */
7693 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7694 switch (pVCpu->iem.s.enmEffAddrMode)
7695 {
7696 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7697 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7698 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0xa7
7706 * @opflclass arithmetic
7707 * @opfltest df
7708 */
7709FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7710{
7711 /*
7712 * Use the C implementation if a repeat prefix is encountered.
7713 */
7714 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7715 {
7716 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 switch (pVCpu->iem.s.enmEffOpSize)
7719 {
7720 case IEMMODE_16BIT:
7721 switch (pVCpu->iem.s.enmEffAddrMode)
7722 {
7723 case IEMMODE_16BIT:
7724 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7727 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7728 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7729 case IEMMODE_32BIT:
7730 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7731 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7732 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7733 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7734 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7735 case IEMMODE_64BIT:
7736 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7737 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7738 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7739 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7740 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7742 }
7743 break;
7744 case IEMMODE_32BIT:
7745 switch (pVCpu->iem.s.enmEffAddrMode)
7746 {
7747 case IEMMODE_16BIT:
7748 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7751 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7752 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7753 case IEMMODE_32BIT:
7754 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7755 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7759 case IEMMODE_64BIT:
7760 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7761 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7763 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7764 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7766 }
7767 case IEMMODE_64BIT:
7768 switch (pVCpu->iem.s.enmEffAddrMode)
7769 {
7770 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7771 case IEMMODE_32BIT:
7772 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7773 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7775 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7776 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7777 case IEMMODE_64BIT:
7778 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7779 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7780 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7781 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7782 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7784 }
7785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7786 }
7787 }
7788
7789 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7790 {
7791 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7793 switch (pVCpu->iem.s.enmEffOpSize)
7794 {
7795 case IEMMODE_16BIT:
7796 switch (pVCpu->iem.s.enmEffAddrMode)
7797 {
7798 case IEMMODE_16BIT:
7799 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7800 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7801 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7803 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7804 case IEMMODE_32BIT:
7805 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7806 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7807 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7809 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7810 case IEMMODE_64BIT:
7811 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7815 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7817 }
7818 break;
7819 case IEMMODE_32BIT:
7820 switch (pVCpu->iem.s.enmEffAddrMode)
7821 {
7822 case IEMMODE_16BIT:
7823 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7824 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7825 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7827 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7828 case IEMMODE_32BIT:
7829 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7830 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7831 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7834 case IEMMODE_64BIT:
7835 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7838 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7839 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7841 }
7842 case IEMMODE_64BIT:
7843 switch (pVCpu->iem.s.enmEffAddrMode)
7844 {
7845 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7846 case IEMMODE_32BIT:
7847 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7848 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7849 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7850 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7851 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7852 case IEMMODE_64BIT:
7853 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7856 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7857 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 }
7863
7864 /*
7865 * Annoying double switch here.
7866 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7867 */
7868 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7869 switch (pVCpu->iem.s.enmEffOpSize)
7870 {
7871 case IEMMODE_16BIT:
7872 switch (pVCpu->iem.s.enmEffAddrMode)
7873 {
7874 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7875 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7876 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7878 }
7879 break;
7880
7881 case IEMMODE_32BIT:
7882 switch (pVCpu->iem.s.enmEffAddrMode)
7883 {
7884 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7885 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7886 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7888 }
7889 break;
7890
7891 case IEMMODE_64BIT:
7892 switch (pVCpu->iem.s.enmEffAddrMode)
7893 {
7894 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7895 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7896 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7898 }
7899 break;
7900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7901 }
7902}
7903
7904#undef IEM_CMPS_CASE
7905
7906/**
7907 * @opcode 0xa8
7908 * @opflclass logical
7909 */
7910FNIEMOP_DEF(iemOp_test_AL_Ib)
7911{
7912 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7914 IEMOP_BODY_BINARY_AL_Ib(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7915}
7916
7917
7918/**
7919 * @opcode 0xa9
7920 * @opflclass logical
7921 */
7922FNIEMOP_DEF(iemOp_test_eAX_Iz)
7923{
7924 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7926 IEMOP_BODY_BINARY_rAX_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7927}
7928
7929
7930/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7931#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7932 IEM_MC_BEGIN(a_fMcFlags, 0); \
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7934 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7935 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7936 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7937 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7938 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7940 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7941 } IEM_MC_ELSE() { \
7942 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7943 } IEM_MC_ENDIF(); \
7944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7945 IEM_MC_END() \
7946
7947/**
7948 * @opcode 0xaa
7949 */
7950FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7951{
7952 /*
7953 * Use the C implementation if a repeat prefix is encountered.
7954 */
7955 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7956 {
7957 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7959 switch (pVCpu->iem.s.enmEffAddrMode)
7960 {
7961 case IEMMODE_16BIT:
7962 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7963 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7964 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7965 iemCImpl_stos_al_m16);
7966 case IEMMODE_32BIT:
7967 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7968 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7970 iemCImpl_stos_al_m32);
7971 case IEMMODE_64BIT:
7972 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7975 iemCImpl_stos_al_m64);
7976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7977 }
7978 }
7979
7980 /*
7981 * Sharing case implementation with stos[wdq] below.
7982 */
7983 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7984 switch (pVCpu->iem.s.enmEffAddrMode)
7985 {
7986 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7987 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7988 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7990 }
7991}
7992
7993
7994/**
7995 * @opcode 0xab
7996 */
7997FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7998{
7999 /*
8000 * Use the C implementation if a repeat prefix is encountered.
8001 */
8002 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8003 {
8004 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
8005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8006 switch (pVCpu->iem.s.enmEffOpSize)
8007 {
8008 case IEMMODE_16BIT:
8009 switch (pVCpu->iem.s.enmEffAddrMode)
8010 {
8011 case IEMMODE_16BIT:
8012 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8013 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8014 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8015 iemCImpl_stos_ax_m16);
8016 case IEMMODE_32BIT:
8017 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8018 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8019 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8020 iemCImpl_stos_ax_m32);
8021 case IEMMODE_64BIT:
8022 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8023 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8024 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8025 iemCImpl_stos_ax_m64);
8026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8027 }
8028 break;
8029 case IEMMODE_32BIT:
8030 switch (pVCpu->iem.s.enmEffAddrMode)
8031 {
8032 case IEMMODE_16BIT:
8033 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8036 iemCImpl_stos_eax_m16);
8037 case IEMMODE_32BIT:
8038 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8039 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8041 iemCImpl_stos_eax_m32);
8042 case IEMMODE_64BIT:
8043 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8044 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8045 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8046 iemCImpl_stos_eax_m64);
8047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8048 }
8049 case IEMMODE_64BIT:
8050 switch (pVCpu->iem.s.enmEffAddrMode)
8051 {
8052 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
8053 case IEMMODE_32BIT:
8054 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8055 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8056 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8057 iemCImpl_stos_rax_m32);
8058 case IEMMODE_64BIT:
8059 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
8060 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8061 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8062 iemCImpl_stos_rax_m64);
8063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8064 }
8065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8066 }
8067 }
8068
8069 /*
8070 * Annoying double switch here.
8071 * Using ugly macro for implementing the cases, sharing it with stosb.
8072 */
8073 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
8074 switch (pVCpu->iem.s.enmEffOpSize)
8075 {
8076 case IEMMODE_16BIT:
8077 switch (pVCpu->iem.s.enmEffAddrMode)
8078 {
8079 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8080 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8081 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
8082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8083 }
8084 break;
8085
8086 case IEMMODE_32BIT:
8087 switch (pVCpu->iem.s.enmEffAddrMode)
8088 {
8089 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8090 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8091 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
8092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8093 }
8094 break;
8095
8096 case IEMMODE_64BIT:
8097 switch (pVCpu->iem.s.enmEffAddrMode)
8098 {
8099 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8100 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
8101 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
8102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8103 }
8104 break;
8105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8106 }
8107}
8108
8109#undef IEM_STOS_CASE
8110
8111/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
8112#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
8113 IEM_MC_BEGIN(a_fMcFlags, 0); \
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8115 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8116 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8117 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8118 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
8119 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
8120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8121 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8122 } IEM_MC_ELSE() { \
8123 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8124 } IEM_MC_ENDIF(); \
8125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8126 IEM_MC_END() \
8127
8128/**
8129 * @opcode 0xac
8130 * @opfltest df
8131 */
8132FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
8133{
8134 /*
8135 * Use the C implementation if a repeat prefix is encountered.
8136 */
8137 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8138 {
8139 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
8140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8141 switch (pVCpu->iem.s.enmEffAddrMode)
8142 {
8143 case IEMMODE_16BIT:
8144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8145 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8147 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8148 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
8149 case IEMMODE_32BIT:
8150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8151 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8154 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
8155 case IEMMODE_64BIT:
8156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8160 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
8161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8162 }
8163 }
8164
8165 /*
8166 * Sharing case implementation with stos[wdq] below.
8167 */
8168 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
8169 switch (pVCpu->iem.s.enmEffAddrMode)
8170 {
8171 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8172 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8173 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
8174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8175 }
8176}
8177
8178
8179/**
8180 * @opcode 0xad
8181 * @opfltest df
8182 */
8183FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
8184{
8185 /*
8186 * Use the C implementation if a repeat prefix is encountered.
8187 */
8188 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8189 {
8190 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 switch (pVCpu->iem.s.enmEffOpSize)
8193 {
8194 case IEMMODE_16BIT:
8195 switch (pVCpu->iem.s.enmEffAddrMode)
8196 {
8197 case IEMMODE_16BIT:
8198 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8199 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8200 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8201 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8202 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
8203 case IEMMODE_32BIT:
8204 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8206 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8207 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8208 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
8209 case IEMMODE_64BIT:
8210 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8211 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8212 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8213 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8214 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
8215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8216 }
8217 break;
8218 case IEMMODE_32BIT:
8219 switch (pVCpu->iem.s.enmEffAddrMode)
8220 {
8221 case IEMMODE_16BIT:
8222 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8223 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8224 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8225 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8226 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
8227 case IEMMODE_32BIT:
8228 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8229 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8230 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8231 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8232 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
8233 case IEMMODE_64BIT:
8234 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8235 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8236 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8237 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8238 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
8239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8240 }
8241 case IEMMODE_64BIT:
8242 switch (pVCpu->iem.s.enmEffAddrMode)
8243 {
8244 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
8245 case IEMMODE_32BIT:
8246 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8247 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8248 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8249 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8250 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
8251 case IEMMODE_64BIT:
8252 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8253 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8254 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8255 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8256 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
8257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8258 }
8259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8260 }
8261 }
8262
8263 /*
8264 * Annoying double switch here.
8265 * Using ugly macro for implementing the cases, sharing it with lodsb.
8266 */
8267 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
8268 switch (pVCpu->iem.s.enmEffOpSize)
8269 {
8270 case IEMMODE_16BIT:
8271 switch (pVCpu->iem.s.enmEffAddrMode)
8272 {
8273 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8274 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8275 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
8276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8277 }
8278 break;
8279
8280 case IEMMODE_32BIT:
8281 switch (pVCpu->iem.s.enmEffAddrMode)
8282 {
8283 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8284 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8285 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
8286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8287 }
8288 break;
8289
8290 case IEMMODE_64BIT:
8291 switch (pVCpu->iem.s.enmEffAddrMode)
8292 {
8293 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8294 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
8295 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
8296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8297 }
8298 break;
8299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8300 }
8301}
8302
8303#undef IEM_LODS_CASE
8304
8305/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8306#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
8307 IEM_MC_BEGIN(a_fMcFlags, 0); \
8308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8309 \
8310 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8311 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8312 \
8313 IEM_MC_ARG(uint##ValBits##_t, uValue, 2); \
8314 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
8315 IEM_MC_ARG(uint##ValBits##_t *, puRax, 1); \
8316 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
8317 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8318 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, iemAImpl_cmp_u##ValBits, fEFlagsIn, puRax, uValue); \
8319 \
8320 IEM_MC_COMMIT_EFLAGS(fEFlagsRet);\
8321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8322 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8323 } IEM_MC_ELSE() { \
8324 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8325 } IEM_MC_ENDIF(); \
8326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8327 IEM_MC_END();
8328
8329/**
8330 * @opcode 0xae
8331 * @opflclass arithmetic
8332 * @opfltest df
8333 */
8334FNIEMOP_DEF(iemOp_scasb_AL_Xb)
8335{
8336 /*
8337 * Use the C implementation if a repeat prefix is encountered.
8338 */
8339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8340 {
8341 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8343 switch (pVCpu->iem.s.enmEffAddrMode)
8344 {
8345 case IEMMODE_16BIT:
8346 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8347 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8348 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8349 iemCImpl_repe_scas_al_m16);
8350 case IEMMODE_32BIT:
8351 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8352 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8353 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8354 iemCImpl_repe_scas_al_m32);
8355 case IEMMODE_64BIT:
8356 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8358 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8359 iemCImpl_repe_scas_al_m64);
8360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8361 }
8362 }
8363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8364 {
8365 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
8366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8367 switch (pVCpu->iem.s.enmEffAddrMode)
8368 {
8369 case IEMMODE_16BIT:
8370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8371 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8372 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8373 iemCImpl_repne_scas_al_m16);
8374 case IEMMODE_32BIT:
8375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8376 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8377 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8378 iemCImpl_repne_scas_al_m32);
8379 case IEMMODE_64BIT:
8380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8382 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8383 iemCImpl_repne_scas_al_m64);
8384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8385 }
8386 }
8387
8388 /*
8389 * Sharing case implementation with stos[wdq] below.
8390 */
8391 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
8392 switch (pVCpu->iem.s.enmEffAddrMode)
8393 {
8394 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8395 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8396 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
8397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8398 }
8399}
8400
8401
8402/**
8403 * @opcode 0xaf
8404 * @opflclass arithmetic
8405 * @opfltest df
8406 */
8407FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8408{
8409 /*
8410 * Use the C implementation if a repeat prefix is encountered.
8411 */
8412 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8413 {
8414 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 switch (pVCpu->iem.s.enmEffOpSize)
8417 {
8418 case IEMMODE_16BIT:
8419 switch (pVCpu->iem.s.enmEffAddrMode)
8420 {
8421 case IEMMODE_16BIT:
8422 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8423 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8424 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8425 iemCImpl_repe_scas_ax_m16);
8426 case IEMMODE_32BIT:
8427 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8428 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8429 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8430 iemCImpl_repe_scas_ax_m32);
8431 case IEMMODE_64BIT:
8432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8435 iemCImpl_repe_scas_ax_m64);
8436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8437 }
8438 break;
8439 case IEMMODE_32BIT:
8440 switch (pVCpu->iem.s.enmEffAddrMode)
8441 {
8442 case IEMMODE_16BIT:
8443 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8444 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8445 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8446 iemCImpl_repe_scas_eax_m16);
8447 case IEMMODE_32BIT:
8448 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8449 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8451 iemCImpl_repe_scas_eax_m32);
8452 case IEMMODE_64BIT:
8453 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8456 iemCImpl_repe_scas_eax_m64);
8457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8458 }
8459 case IEMMODE_64BIT:
8460 switch (pVCpu->iem.s.enmEffAddrMode)
8461 {
8462 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8463 case IEMMODE_32BIT:
8464 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8467 iemCImpl_repe_scas_rax_m32);
8468 case IEMMODE_64BIT:
8469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8470 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8471 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8472 iemCImpl_repe_scas_rax_m64);
8473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8474 }
8475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8476 }
8477 }
8478 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8479 {
8480 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8482 switch (pVCpu->iem.s.enmEffOpSize)
8483 {
8484 case IEMMODE_16BIT:
8485 switch (pVCpu->iem.s.enmEffAddrMode)
8486 {
8487 case IEMMODE_16BIT:
8488 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8489 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8491 iemCImpl_repne_scas_ax_m16);
8492 case IEMMODE_32BIT:
8493 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8494 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8495 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8496 iemCImpl_repne_scas_ax_m32);
8497 case IEMMODE_64BIT:
8498 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8499 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8500 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8501 iemCImpl_repne_scas_ax_m64);
8502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8503 }
8504 break;
8505 case IEMMODE_32BIT:
8506 switch (pVCpu->iem.s.enmEffAddrMode)
8507 {
8508 case IEMMODE_16BIT:
8509 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8512 iemCImpl_repne_scas_eax_m16);
8513 case IEMMODE_32BIT:
8514 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8515 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8517 iemCImpl_repne_scas_eax_m32);
8518 case IEMMODE_64BIT:
8519 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8520 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8522 iemCImpl_repne_scas_eax_m64);
8523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8524 }
8525 case IEMMODE_64BIT:
8526 switch (pVCpu->iem.s.enmEffAddrMode)
8527 {
8528 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8529 case IEMMODE_32BIT:
8530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8532 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8533 iemCImpl_repne_scas_rax_m32);
8534 case IEMMODE_64BIT:
8535 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8538 iemCImpl_repne_scas_rax_m64);
8539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8540 }
8541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8542 }
8543 }
8544
8545 /*
8546 * Annoying double switch here.
8547 * Using ugly macro for implementing the cases, sharing it with scasb.
8548 */
8549 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8550 switch (pVCpu->iem.s.enmEffOpSize)
8551 {
8552 case IEMMODE_16BIT:
8553 switch (pVCpu->iem.s.enmEffAddrMode)
8554 {
8555 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8556 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8557 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8559 }
8560 break;
8561
8562 case IEMMODE_32BIT:
8563 switch (pVCpu->iem.s.enmEffAddrMode)
8564 {
8565 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8566 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8567 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8569 }
8570 break;
8571
8572 case IEMMODE_64BIT:
8573 switch (pVCpu->iem.s.enmEffAddrMode)
8574 {
8575 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8576 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8577 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8579 }
8580 break;
8581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8582 }
8583}
8584
8585#undef IEM_SCAS_CASE
8586
8587/**
8588 * Common 'mov r8, imm8' helper.
8589 */
8590FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8591{
8592 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8593 IEM_MC_BEGIN(0, 0);
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8596 IEM_MC_ADVANCE_RIP_AND_FINISH();
8597 IEM_MC_END();
8598}
8599
8600
8601/**
8602 * @opcode 0xb0
8603 */
8604FNIEMOP_DEF(iemOp_mov_AL_Ib)
8605{
8606 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8607 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8608}
8609
8610
8611/**
8612 * @opcode 0xb1
8613 */
8614FNIEMOP_DEF(iemOp_CL_Ib)
8615{
8616 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8617 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8618}
8619
8620
8621/**
8622 * @opcode 0xb2
8623 */
8624FNIEMOP_DEF(iemOp_DL_Ib)
8625{
8626 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8627 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8628}
8629
8630
8631/**
8632 * @opcode 0xb3
8633 */
8634FNIEMOP_DEF(iemOp_BL_Ib)
8635{
8636 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8637 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8638}
8639
8640
8641/**
8642 * @opcode 0xb4
8643 */
8644FNIEMOP_DEF(iemOp_mov_AH_Ib)
8645{
8646 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8647 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8648}
8649
8650
8651/**
8652 * @opcode 0xb5
8653 */
8654FNIEMOP_DEF(iemOp_CH_Ib)
8655{
8656 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8657 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8658}
8659
8660
8661/**
8662 * @opcode 0xb6
8663 */
8664FNIEMOP_DEF(iemOp_DH_Ib)
8665{
8666 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8667 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8668}
8669
8670
8671/**
8672 * @opcode 0xb7
8673 */
8674FNIEMOP_DEF(iemOp_BH_Ib)
8675{
8676 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8677 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8678}
8679
8680
8681/**
8682 * Common 'mov regX,immX' helper.
8683 */
8684FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8685{
8686 switch (pVCpu->iem.s.enmEffOpSize)
8687 {
8688 case IEMMODE_16BIT:
8689 IEM_MC_BEGIN(0, 0);
8690 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8693 IEM_MC_ADVANCE_RIP_AND_FINISH();
8694 IEM_MC_END();
8695 break;
8696
8697 case IEMMODE_32BIT:
8698 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8699 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8702 IEM_MC_ADVANCE_RIP_AND_FINISH();
8703 IEM_MC_END();
8704 break;
8705
8706 case IEMMODE_64BIT:
8707 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
8708 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8711 IEM_MC_ADVANCE_RIP_AND_FINISH();
8712 IEM_MC_END();
8713 break;
8714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8715 }
8716}
8717
8718
8719/**
8720 * @opcode 0xb8
8721 */
8722FNIEMOP_DEF(iemOp_eAX_Iv)
8723{
8724 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8725 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8726}
8727
8728
8729/**
8730 * @opcode 0xb9
8731 */
8732FNIEMOP_DEF(iemOp_eCX_Iv)
8733{
8734 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8735 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8736}
8737
8738
8739/**
8740 * @opcode 0xba
8741 */
8742FNIEMOP_DEF(iemOp_eDX_Iv)
8743{
8744 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8745 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8746}
8747
8748
8749/**
8750 * @opcode 0xbb
8751 */
8752FNIEMOP_DEF(iemOp_eBX_Iv)
8753{
8754 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8755 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8756}
8757
8758
8759/**
8760 * @opcode 0xbc
8761 */
8762FNIEMOP_DEF(iemOp_eSP_Iv)
8763{
8764 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8765 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8766}
8767
8768
8769/**
8770 * @opcode 0xbd
8771 */
8772FNIEMOP_DEF(iemOp_eBP_Iv)
8773{
8774 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8775 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8776}
8777
8778
8779/**
8780 * @opcode 0xbe
8781 */
8782FNIEMOP_DEF(iemOp_eSI_Iv)
8783{
8784 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8785 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8786}
8787
8788
8789/**
8790 * @opcode 0xbf
8791 */
8792FNIEMOP_DEF(iemOp_eDI_Iv)
8793{
8794 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8795 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8796}
8797
8798
8799/**
8800 * @opcode 0xc0
8801 */
8802FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8803{
8804 IEMOP_HLP_MIN_186();
8805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8806
8807 /* Need to use a body macro here since the EFLAGS behaviour differs between
8808 the shifts, rotates and rotate w/ carry. Sigh. */
8809#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8810 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8811 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8812 { \
8813 /* register */ \
8814 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8815 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8817 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
8818 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8819 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8820 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8821 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
8822 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8824 IEM_MC_END(); \
8825 } \
8826 else \
8827 { \
8828 /* memory */ \
8829 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8832 \
8833 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8835 \
8836 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8837 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
8838 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8839 \
8840 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8841 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8842 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
8843 \
8844 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8845 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8846 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8847 IEM_MC_END(); \
8848 } (void)0
8849
8850 switch (IEM_GET_MODRM_REG_8(bRm))
8851 {
8852 /**
8853 * @opdone
8854 * @opmaps grp2_c0
8855 * @opcode /0
8856 * @opflclass rotate_count
8857 */
8858 case 0:
8859 {
8860 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8862 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8863 break;
8864 }
8865 /**
8866 * @opdone
8867 * @opmaps grp2_c0
8868 * @opcode /1
8869 * @opflclass rotate_count
8870 */
8871 case 1:
8872 {
8873 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8875 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8876 break;
8877 }
8878 /**
8879 * @opdone
8880 * @opmaps grp2_c0
8881 * @opcode /2
8882 * @opflclass rotate_carry_count
8883 */
8884 case 2:
8885 {
8886 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8888 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8889 break;
8890 }
8891 /**
8892 * @opdone
8893 * @opmaps grp2_c0
8894 * @opcode /3
8895 * @opflclass rotate_carry_count
8896 */
8897 case 3:
8898 {
8899 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8901 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8902 break;
8903 }
8904 /**
8905 * @opdone
8906 * @opmaps grp2_c0
8907 * @opcode /4
8908 * @opflclass shift_count
8909 */
8910 case 4:
8911 {
8912 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8914 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8915 break;
8916 }
8917 /**
8918 * @opdone
8919 * @opmaps grp2_c0
8920 * @opcode /5
8921 * @opflclass shift_count
8922 */
8923 case 5:
8924 {
8925 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8927 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8928 break;
8929 }
8930 /**
8931 * @opdone
8932 * @opmaps grp2_c0
8933 * @opcode /7
8934 * @opflclass shift_count
8935 */
8936 case 7:
8937 {
8938 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8940 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8941 break;
8942 }
8943
8944 /** @opdone */
8945 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8946 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8947 }
8948#undef GRP2_BODY_Eb_Ib
8949}
8950
8951
8952/* Need to use a body macro here since the EFLAGS behaviour differs between
8953 the shifts, rotates and rotate w/ carry. Sigh. */
8954#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8955 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8956 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8957 { \
8958 /* register */ \
8959 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8960 switch (pVCpu->iem.s.enmEffOpSize) \
8961 { \
8962 case IEMMODE_16BIT: \
8963 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8965 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8966 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8967 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8968 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8969 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
8970 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8972 IEM_MC_END(); \
8973 break; \
8974 \
8975 case IEMMODE_32BIT: \
8976 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8978 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8979 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8980 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8981 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8982 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
8983 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8984 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8986 IEM_MC_END(); \
8987 break; \
8988 \
8989 case IEMMODE_64BIT: \
8990 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8992 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8993 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8994 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8995 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
8996 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
8997 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8998 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8999 IEM_MC_END(); \
9000 break; \
9001 \
9002 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9003 } \
9004 } \
9005 else \
9006 { \
9007 /* memory */ \
9008 switch (pVCpu->iem.s.enmEffOpSize) \
9009 { \
9010 case IEMMODE_16BIT: \
9011 IEM_MC_BEGIN(0, 0); \
9012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9014 \
9015 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9017 \
9018 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9019 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9020 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9021 \
9022 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
9024 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
9025 \
9026 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9027 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9029 IEM_MC_END(); \
9030 break; \
9031 \
9032 case IEMMODE_32BIT: \
9033 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9036 \
9037 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9039 \
9040 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9041 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9042 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9043 \
9044 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9045 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
9046 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
9047 \
9048 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9049 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9051 IEM_MC_END(); \
9052 break; \
9053 \
9054 case IEMMODE_64BIT: \
9055 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9058 \
9059 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9061 \
9062 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9063 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9064 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9065 \
9066 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9067 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/ cShift, 2); \
9068 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
9069 \
9070 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9071 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9072 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9073 IEM_MC_END(); \
9074 break; \
9075 \
9076 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9077 } \
9078 } (void)0
9079
9080/**
9081 * @opmaps grp2_c1
9082 * @opcode /0
9083 * @opflclass rotate_count
9084 */
9085FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
9086{
9087 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9088 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9089}
9090
9091
9092/**
9093 * @opmaps grp2_c1
9094 * @opcode /1
9095 * @opflclass rotate_count
9096 */
9097FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
9098{
9099 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9100 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9101}
9102
9103
9104/**
9105 * @opmaps grp2_c1
9106 * @opcode /2
9107 * @opflclass rotate_carry_count
9108 */
9109FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
9110{
9111 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9112 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9113}
9114
9115
9116/**
9117 * @opmaps grp2_c1
9118 * @opcode /3
9119 * @opflclass rotate_carry_count
9120 */
9121FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
9122{
9123 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9124 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9125}
9126
9127
9128/**
9129 * @opmaps grp2_c1
9130 * @opcode /4
9131 * @opflclass shift_count
9132 */
9133FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
9134{
9135 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9136 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9137}
9138
9139
9140/**
9141 * @opmaps grp2_c1
9142 * @opcode /5
9143 * @opflclass shift_count
9144 */
9145FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
9146{
9147 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9148 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9149}
9150
9151
9152/**
9153 * @opmaps grp2_c1
9154 * @opcode /7
9155 * @opflclass shift_count
9156 */
9157FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
9158{
9159 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
9160 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9161}
9162
9163#undef GRP2_BODY_Ev_Ib
9164
9165/**
9166 * @opcode 0xc1
9167 */
9168FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
9169{
9170 IEMOP_HLP_MIN_186();
9171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9172
9173 switch (IEM_GET_MODRM_REG_8(bRm))
9174 {
9175 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
9176 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
9177 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
9178 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
9179 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
9180 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
9181 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
9182 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9183 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9184 }
9185}
9186
9187
9188/**
9189 * @opcode 0xc2
9190 */
9191FNIEMOP_DEF(iemOp_retn_Iw)
9192{
9193 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
9194 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9195 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9196 IEM_MC_BEGIN(0, 0);
9197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9198 IEM_MC_RETN_AND_FINISH(u16Imm);
9199 IEM_MC_END();
9200}
9201
9202
9203/**
9204 * @opcode 0xc3
9205 */
9206FNIEMOP_DEF(iemOp_retn)
9207{
9208 IEMOP_MNEMONIC(retn, "retn");
9209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9210 IEM_MC_BEGIN(0, 0);
9211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9212 IEM_MC_RETN_AND_FINISH(0);
9213 IEM_MC_END();
9214}
9215
9216
9217/**
9218 * @opcode 0xc4
9219 */
9220FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
9221{
9222 /* The LDS instruction is invalid 64-bit mode. In legacy and
9223 compatability mode it is invalid with MOD=3.
9224 The use as a VEX prefix is made possible by assigning the inverted
9225 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
9226 outside of 64-bit mode. VEX is not available in real or v86 mode. */
9227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9228 if ( IEM_IS_64BIT_CODE(pVCpu)
9229 || IEM_IS_MODRM_REG_MODE(bRm) )
9230 {
9231 IEMOP_MNEMONIC(vex3_prefix, "vex3");
9232 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9233 {
9234 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9235 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9236 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
9237 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9239#if 1
9240 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
9241 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
9242#else
9243 if (bVex2 & 0x80 /* VEX.W */)
9244 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
9245#endif
9246 if (IEM_IS_64BIT_CODE(pVCpu))
9247 {
9248#if 1
9249 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
9250 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
9251#else
9252 if (~bRm & 0x20 /* VEX.~B */)
9253 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
9254 if (~bRm & 0x40 /* VEX.~X */)
9255 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
9256 if (~bRm & 0x80 /* VEX.~R */)
9257 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
9258#endif
9259 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9260 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
9261 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
9262 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
9263 }
9264 else
9265 {
9266 pVCpu->iem.s.uRexReg = 0;
9267 pVCpu->iem.s.uRexIndex = 0;
9268 pVCpu->iem.s.uRexB = 0;
9269 /** @todo testcase: Will attemps to access registers 8 thru 15 from 16&32 bit
9270 * code raise \#UD or just be ignored? We're ignoring for now... */
9271 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0x7;
9272 }
9273 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
9274 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
9275
9276 switch (bRm & 0x1f)
9277 {
9278 case 1: /* 0x0f lead opcode byte. */
9279#ifdef IEM_WITH_VEX
9280 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9281#else
9282 IEMOP_BITCH_ABOUT_STUB();
9283 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9284#endif
9285
9286 case 2: /* 0x0f 0x38 lead opcode bytes. */
9287#ifdef IEM_WITH_VEX
9288 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9289#else
9290 IEMOP_BITCH_ABOUT_STUB();
9291 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9292#endif
9293
9294 case 3: /* 0x0f 0x3a lead opcode bytes. */
9295#ifdef IEM_WITH_VEX
9296 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9297#else
9298 IEMOP_BITCH_ABOUT_STUB();
9299 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9300#endif
9301
9302 default:
9303 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
9304 IEMOP_RAISE_INVALID_OPCODE_RET();
9305 }
9306 }
9307 Log(("VEX3: VEX support disabled!\n"));
9308 IEMOP_RAISE_INVALID_OPCODE_RET();
9309 }
9310
9311 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
9312 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
9313}
9314
9315
9316/**
9317 * @opcode 0xc5
9318 */
9319FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
9320{
9321 /* The LES instruction is invalid 64-bit mode. In legacy and
9322 compatability mode it is invalid with MOD=3.
9323 The use as a VEX prefix is made possible by assigning the inverted
9324 REX.R to the top MOD bit, and the top bit in the inverted register
9325 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
9326 to accessing registers 0..7 in this VEX form. */
9327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9328 if ( IEM_IS_64BIT_CODE(pVCpu)
9329 || IEM_IS_MODRM_REG_MODE(bRm))
9330 {
9331 IEMOP_MNEMONIC(vex2_prefix, "vex2");
9332 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9333 {
9334 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9335 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9336 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9337 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9338 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
9339 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
9340 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9341 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
9342 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
9343 pVCpu->iem.s.idxPrefix = bRm & 0x3;
9344
9345#ifdef IEM_WITH_VEX
9346 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9347#else
9348 IEMOP_BITCH_ABOUT_STUB();
9349 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9350#endif
9351 }
9352
9353 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
9354 Log(("VEX2: VEX support disabled!\n"));
9355 IEMOP_RAISE_INVALID_OPCODE_RET();
9356 }
9357
9358 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
9359 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
9360}
9361
9362
9363/**
9364 * @opcode 0xc6
9365 */
9366FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9367{
9368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9369 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9370 IEMOP_RAISE_INVALID_OPCODE_RET();
9371 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
9372
9373 if (IEM_IS_MODRM_REG_MODE(bRm))
9374 {
9375 /* register access */
9376 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9377 IEM_MC_BEGIN(0, 0);
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9379 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
9380 IEM_MC_ADVANCE_RIP_AND_FINISH();
9381 IEM_MC_END();
9382 }
9383 else
9384 {
9385 /* memory access. */
9386 IEM_MC_BEGIN(0, 0);
9387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9389 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9391 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9392 IEM_MC_ADVANCE_RIP_AND_FINISH();
9393 IEM_MC_END();
9394 }
9395}
9396
9397
9398/**
9399 * @opcode 0xc7
9400 */
9401FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9402{
9403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9404 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9405 IEMOP_RAISE_INVALID_OPCODE_RET();
9406 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9407
9408 if (IEM_IS_MODRM_REG_MODE(bRm))
9409 {
9410 /* register access */
9411 switch (pVCpu->iem.s.enmEffOpSize)
9412 {
9413 case IEMMODE_16BIT:
9414 IEM_MC_BEGIN(0, 0);
9415 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9417 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9418 IEM_MC_ADVANCE_RIP_AND_FINISH();
9419 IEM_MC_END();
9420 break;
9421
9422 case IEMMODE_32BIT:
9423 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9424 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9426 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9427 IEM_MC_ADVANCE_RIP_AND_FINISH();
9428 IEM_MC_END();
9429 break;
9430
9431 case IEMMODE_64BIT:
9432 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9433 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9435 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9436 IEM_MC_ADVANCE_RIP_AND_FINISH();
9437 IEM_MC_END();
9438 break;
9439
9440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9441 }
9442 }
9443 else
9444 {
9445 /* memory access. */
9446 switch (pVCpu->iem.s.enmEffOpSize)
9447 {
9448 case IEMMODE_16BIT:
9449 IEM_MC_BEGIN(0, 0);
9450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9452 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9454 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9455 IEM_MC_ADVANCE_RIP_AND_FINISH();
9456 IEM_MC_END();
9457 break;
9458
9459 case IEMMODE_32BIT:
9460 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9463 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9465 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9466 IEM_MC_ADVANCE_RIP_AND_FINISH();
9467 IEM_MC_END();
9468 break;
9469
9470 case IEMMODE_64BIT:
9471 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9474 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9476 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9477 IEM_MC_ADVANCE_RIP_AND_FINISH();
9478 IEM_MC_END();
9479 break;
9480
9481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9482 }
9483 }
9484}
9485
9486
9487
9488
9489/**
9490 * @opcode 0xc8
9491 */
9492FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9493{
9494 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9495 IEMOP_HLP_MIN_186();
9496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9497 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9498 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9500 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9501 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9503 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9504}
9505
9506
9507/**
9508 * @opcode 0xc9
9509 */
9510FNIEMOP_DEF(iemOp_leave)
9511{
9512 IEMOP_MNEMONIC(leave, "leave");
9513 IEMOP_HLP_MIN_186();
9514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9516 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9517 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9519 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9520}
9521
9522
9523/**
9524 * @opcode 0xca
9525 */
9526FNIEMOP_DEF(iemOp_retf_Iw)
9527{
9528 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9529 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9531 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9532 | IEM_CIMPL_F_MODE,
9533 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9534 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9535 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9536 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9537 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9538 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9539 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9540 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9541 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9542 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9543 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9544 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9545 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9546 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9547 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9548 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9549 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9550 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9551}
9552
9553
9554/**
9555 * @opcode 0xcb
9556 */
9557FNIEMOP_DEF(iemOp_retf)
9558{
9559 IEMOP_MNEMONIC(retf, "retf");
9560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9561 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9562 | IEM_CIMPL_F_MODE,
9563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9564 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9565 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9566 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9567 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9568 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9569 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9570 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9571 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9572 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9573 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9574 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9575 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9576 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9577 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9578 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9579 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9580 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9581}
9582
9583
9584/**
9585 * @opcode 0xcc
9586 */
9587FNIEMOP_DEF(iemOp_int3)
9588{
9589 IEMOP_MNEMONIC(int3, "int3");
9590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9591 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9592 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9593 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9594}
9595
9596
9597/**
9598 * @opcode 0xcd
9599 */
9600FNIEMOP_DEF(iemOp_int_Ib)
9601{
9602 IEMOP_MNEMONIC(int_Ib, "int Ib");
9603 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9605 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9606 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9607 iemCImpl_int, u8Int, IEMINT_INTN);
9608 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9609}
9610
9611
9612/**
9613 * @opcode 0xce
9614 */
9615FNIEMOP_DEF(iemOp_into)
9616{
9617 IEMOP_MNEMONIC(into, "into");
9618 IEMOP_HLP_NO_64BIT();
9619 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9620 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9621 UINT64_MAX,
9622 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9623 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9624}
9625
9626
9627/**
9628 * @opcode 0xcf
9629 */
9630FNIEMOP_DEF(iemOp_iret)
9631{
9632 IEMOP_MNEMONIC(iret, "iret");
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9634 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9635 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9636 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9637 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9638 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9639 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9640 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9641 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9642 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9643 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9644 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9645 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9646 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9647 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9648 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9649 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9650 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9651 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9652 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9653 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9654 /* Segment registers are sanitized when returning to an outer ring, or fully
9655 reloaded when returning to v86 mode. Thus the large flush list above. */
9656}
9657
9658
9659/**
9660 * @opcode 0xd0
9661 */
9662FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9663{
9664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9665
9666 /* Need to use a body macro here since the EFLAGS behaviour differs between
9667 the shifts, rotates and rotate w/ carry. Sigh. */
9668#define GRP2_BODY_Eb_1(a_pImplExpr) \
9669 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9670 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9671 { \
9672 /* register */ \
9673 IEM_MC_BEGIN(0, 0); \
9674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9675 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
9676 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9677 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9678 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 2); \
9679 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
9680 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9681 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9682 IEM_MC_END(); \
9683 } \
9684 else \
9685 { \
9686 /* memory */ \
9687 IEM_MC_BEGIN(0, 0); \
9688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9691 \
9692 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9693 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
9694 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9695 \
9696 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9697 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 2); \
9698 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
9699 \
9700 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9701 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9702 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9703 IEM_MC_END(); \
9704 } (void)0
9705
9706 switch (IEM_GET_MODRM_REG_8(bRm))
9707 {
9708 /**
9709 * @opdone
9710 * @opmaps grp2_d0
9711 * @opcode /0
9712 * @opflclass rotate_1
9713 */
9714 case 0:
9715 {
9716 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9717 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9718 break;
9719 }
9720 /**
9721 * @opdone
9722 * @opmaps grp2_d0
9723 * @opcode /1
9724 * @opflclass rotate_1
9725 */
9726 case 1:
9727 {
9728 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9729 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9730 break;
9731 }
9732 /**
9733 * @opdone
9734 * @opmaps grp2_d0
9735 * @opcode /2
9736 * @opflclass rotate_carry_1
9737 */
9738 case 2:
9739 {
9740 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9741 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9742 break;
9743 }
9744 /**
9745 * @opdone
9746 * @opmaps grp2_d0
9747 * @opcode /3
9748 * @opflclass rotate_carry_1
9749 */
9750 case 3:
9751 {
9752 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9753 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9754 break;
9755 }
9756 /**
9757 * @opdone
9758 * @opmaps grp2_d0
9759 * @opcode /4
9760 * @opflclass shift_1
9761 */
9762 case 4:
9763 {
9764 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9765 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9766 break;
9767 }
9768 /**
9769 * @opdone
9770 * @opmaps grp2_d0
9771 * @opcode /5
9772 * @opflclass shift_1
9773 */
9774 case 5:
9775 {
9776 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9777 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9778 break;
9779 }
9780 /**
9781 * @opdone
9782 * @opmaps grp2_d0
9783 * @opcode /7
9784 * @opflclass shift_1
9785 */
9786 case 7:
9787 {
9788 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9789 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9790 break;
9791 }
9792 /** @opdone */
9793 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9795 }
9796#undef GRP2_BODY_Eb_1
9797}
9798
9799
9800/* Need to use a body macro here since the EFLAGS behaviour differs between
9801 the shifts, rotates and rotate w/ carry. Sigh. */
9802#define GRP2_BODY_Ev_1(a_pImplExpr) \
9803 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9804 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9805 { \
9806 /* register */ \
9807 switch (pVCpu->iem.s.enmEffOpSize) \
9808 { \
9809 case IEMMODE_16BIT: \
9810 IEM_MC_BEGIN(0, 0); \
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9812 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9813 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9814 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9815 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9816 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
9817 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9819 IEM_MC_END(); \
9820 break; \
9821 \
9822 case IEMMODE_32BIT: \
9823 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9825 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9826 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9827 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9828 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9829 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
9830 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9831 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9832 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9833 IEM_MC_END(); \
9834 break; \
9835 \
9836 case IEMMODE_64BIT: \
9837 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9839 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9840 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9841 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9842 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9843 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
9844 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9846 IEM_MC_END(); \
9847 break; \
9848 \
9849 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9850 } \
9851 } \
9852 else \
9853 { \
9854 /* memory */ \
9855 switch (pVCpu->iem.s.enmEffOpSize) \
9856 { \
9857 case IEMMODE_16BIT: \
9858 IEM_MC_BEGIN(0, 0); \
9859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9862 \
9863 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9864 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9865 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9866 \
9867 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9868 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9869 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
9870 \
9871 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9872 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9874 IEM_MC_END(); \
9875 break; \
9876 \
9877 case IEMMODE_32BIT: \
9878 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9882 \
9883 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9884 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9885 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9886 \
9887 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9888 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9889 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
9890 \
9891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9892 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9894 IEM_MC_END(); \
9895 break; \
9896 \
9897 case IEMMODE_64BIT: \
9898 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9902 \
9903 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9904 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9905 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9906 \
9907 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9908 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ 1, 2); \
9909 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
9910 \
9911 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9912 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9914 IEM_MC_END(); \
9915 break; \
9916 \
9917 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9918 } \
9919 } (void)0
9920
9921/**
9922 * @opmaps grp2_d1
9923 * @opcode /0
9924 * @opflclass rotate_1
9925 */
9926FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9927{
9928 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9929 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9930}
9931
9932
9933/**
9934 * @opmaps grp2_d1
9935 * @opcode /1
9936 * @opflclass rotate_1
9937 */
9938FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9939{
9940 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9941 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9942}
9943
9944
9945/**
9946 * @opmaps grp2_d1
9947 * @opcode /2
9948 * @opflclass rotate_carry_1
9949 */
9950FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9951{
9952 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9953 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9954}
9955
9956
9957/**
9958 * @opmaps grp2_d1
9959 * @opcode /3
9960 * @opflclass rotate_carry_1
9961 */
9962FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9963{
9964 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9965 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9966}
9967
9968
9969/**
9970 * @opmaps grp2_d1
9971 * @opcode /4
9972 * @opflclass shift_1
9973 */
9974FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9975{
9976 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9977 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9978}
9979
9980
9981/**
9982 * @opmaps grp2_d1
9983 * @opcode /5
9984 * @opflclass shift_1
9985 */
9986FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9987{
9988 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9989 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9990}
9991
9992
9993/**
9994 * @opmaps grp2_d1
9995 * @opcode /7
9996 * @opflclass shift_1
9997 */
9998FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9999{
10000 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
10001 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10002}
10003
10004#undef GRP2_BODY_Ev_1
10005
10006/**
10007 * @opcode 0xd1
10008 */
10009FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10010{
10011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10012 switch (IEM_GET_MODRM_REG_8(bRm))
10013 {
10014 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
10015 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
10016 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
10017 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
10018 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
10019 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
10020 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
10021 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10022 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10023 }
10024}
10025
10026
10027/**
10028 * @opcode 0xd2
10029 */
10030FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
10031{
10032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10033
10034 /* Need to use a body macro here since the EFLAGS behaviour differs between
10035 the shifts, rotates and rotate w/ carry. Sigh. */
10036#define GRP2_BODY_Eb_CL(a_pImplExpr) \
10037 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10038 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10039 { \
10040 /* register */ \
10041 IEM_MC_BEGIN(0, 0); \
10042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10043 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10044 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10045 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
10046 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10047 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10048 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
10049 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10051 IEM_MC_END(); \
10052 } \
10053 else \
10054 { \
10055 /* memory */ \
10056 IEM_MC_BEGIN(0, 0); \
10057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10060 \
10061 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10062 IEM_MC_ARG(uint8_t *, pu8Dst, 1); \
10063 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10064 \
10065 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10066 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10067 \
10068 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10069 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU8, fEFlagsIn, pu8Dst, cShiftArg); \
10070 \
10071 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10072 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10073 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10074 IEM_MC_END(); \
10075 } (void)0
10076
10077 switch (IEM_GET_MODRM_REG_8(bRm))
10078 {
10079 /**
10080 * @opdone
10081 * @opmaps grp2_d0
10082 * @opcode /0
10083 * @opflclass rotate_count
10084 */
10085 case 0:
10086 {
10087 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10088 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
10089 break;
10090 }
10091 /**
10092 * @opdone
10093 * @opmaps grp2_d0
10094 * @opcode /1
10095 * @opflclass rotate_count
10096 */
10097 case 1:
10098 {
10099 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10100 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
10101 break;
10102 }
10103 /**
10104 * @opdone
10105 * @opmaps grp2_d0
10106 * @opcode /2
10107 * @opflclass rotate_carry_count
10108 */
10109 case 2:
10110 {
10111 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10112 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
10113 break;
10114 }
10115 /**
10116 * @opdone
10117 * @opmaps grp2_d0
10118 * @opcode /3
10119 * @opflclass rotate_carry_count
10120 */
10121 case 3:
10122 {
10123 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10124 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
10125 break;
10126 }
10127 /**
10128 * @opdone
10129 * @opmaps grp2_d0
10130 * @opcode /4
10131 * @opflclass shift_count
10132 */
10133 case 4:
10134 {
10135 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10136 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
10137 break;
10138 }
10139 /**
10140 * @opdone
10141 * @opmaps grp2_d0
10142 * @opcode /5
10143 * @opflclass shift_count
10144 */
10145 case 5:
10146 {
10147 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10148 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10149 break;
10150 }
10151 /**
10152 * @opdone
10153 * @opmaps grp2_d0
10154 * @opcode /7
10155 * @opflclass shift_count
10156 */
10157 case 7:
10158 {
10159 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10160 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10161 break;
10162 }
10163 /** @opdone */
10164 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10165 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10166 }
10167#undef GRP2_BODY_Eb_CL
10168}
10169
10170
10171/* Need to use a body macro here since the EFLAGS behaviour differs between
10172 the shifts, rotates and rotate w/ carry. Sigh. */
10173#define GRP2_BODY_Ev_CL(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
10174 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10175 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10176 { \
10177 /* register */ \
10178 switch (pVCpu->iem.s.enmEffOpSize) \
10179 { \
10180 case IEMMODE_16BIT: \
10181 IEM_MC_BEGIN(0, 0); \
10182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10183 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10184 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10185 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10186 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10187 IEM_MC_LOCAL(uint16_t, u16Dst); \
10188 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10189 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10190 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u16Dst, cShiftArg, fEFlags, 16); \
10191 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
10192 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10193 } IEM_MC_NATIVE_ELSE() { \
10194 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10195 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10196 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10197 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10198 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
10199 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10200 } IEM_MC_NATIVE_ENDIF(); \
10201 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10202 IEM_MC_END(); \
10203 break; \
10204 \
10205 case IEMMODE_32BIT: \
10206 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10208 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10209 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10210 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10211 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10212 IEM_MC_LOCAL(uint32_t, u32Dst); \
10213 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10214 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10215 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u32Dst, cShiftArg, fEFlags, 32); \
10216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
10217 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10218 } IEM_MC_NATIVE_ELSE() { \
10219 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10220 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10221 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10222 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10223 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
10224 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10225 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10226 } IEM_MC_NATIVE_ENDIF(); \
10227 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10228 IEM_MC_END(); \
10229 break; \
10230 \
10231 case IEMMODE_64BIT: \
10232 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10234 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10235 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
10236 IEM_MC_NATIVE_SET_AMD64_HOST_REG_FOR_LOCAL(cShiftArg, X86_GREG_xCX); \
10237 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); /* we modify this on arm64 */ \
10238 IEM_MC_LOCAL(uint64_t, u64Dst); \
10239 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10240 IEM_MC_LOCAL_EFLAGS(fEFlags); \
10241 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_Ins,_r_CL_efl), u64Dst, cShiftArg, fEFlags, 64); \
10242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
10243 IEM_MC_COMMIT_EFLAGS(fEFlags); /** @todo IEM_MC_COMMIT_EFLAGS_OPT */ \
10244 } IEM_MC_NATIVE_ELSE() { \
10245 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10246 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10247 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10248 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10249 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
10250 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10251 } IEM_MC_NATIVE_ENDIF(); \
10252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10253 IEM_MC_END(); \
10254 break; \
10255 \
10256 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10257 } \
10258 } \
10259 else \
10260 { \
10261 /* memory */ \
10262 switch (pVCpu->iem.s.enmEffOpSize) \
10263 { \
10264 case IEMMODE_16BIT: \
10265 IEM_MC_BEGIN(0, 0); \
10266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10269 \
10270 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10271 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10272 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10273 \
10274 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10275 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10276 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10277 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, cShiftArg); \
10278 \
10279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10280 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10281 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10282 IEM_MC_END(); \
10283 break; \
10284 \
10285 case IEMMODE_32BIT: \
10286 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10290 \
10291 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10292 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10293 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10294 \
10295 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10296 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10297 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10298 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, cShiftArg); \
10299 \
10300 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10301 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10303 IEM_MC_END(); \
10304 break; \
10305 \
10306 case IEMMODE_64BIT: \
10307 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10311 \
10312 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10313 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10315 \
10316 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
10317 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10318 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10319 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, cShiftArg); \
10320 \
10321 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10322 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10324 IEM_MC_END(); \
10325 break; \
10326 \
10327 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10328 } \
10329 } (void)0
10330
10331
10332/**
10333 * @opmaps grp2_d0
10334 * @opcode /0
10335 * @opflclass rotate_count
10336 */
10337FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
10338{
10339 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10340 GRP2_BODY_Ev_CL(rol, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags), 0, 0);
10341}
10342
10343
10344/**
10345 * @opmaps grp2_d0
10346 * @opcode /1
10347 * @opflclass rotate_count
10348 */
10349FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
10350{
10351 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10352 GRP2_BODY_Ev_CL(ror, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags), 0, 0);
10353}
10354
10355
10356/**
10357 * @opmaps grp2_d0
10358 * @opcode /2
10359 * @opflclass rotate_carry_count
10360 */
10361FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
10362{
10363 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10364 GRP2_BODY_Ev_CL(rcl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags), 0, 0);
10365}
10366
10367
10368/**
10369 * @opmaps grp2_d0
10370 * @opcode /3
10371 * @opflclass rotate_carry_count
10372 */
10373FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
10374{
10375 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10376 GRP2_BODY_Ev_CL(rcr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags), 0, 0);
10377}
10378
10379
10380/**
10381 * @opmaps grp2_d0
10382 * @opcode /4
10383 * @opflclass shift_count
10384 */
10385FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
10386{
10387 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10388 GRP2_BODY_Ev_CL(shl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags), RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
10389}
10390
10391
10392/**
10393 * @opmaps grp2_d0
10394 * @opcode /5
10395 * @opflclass shift_count
10396 */
10397FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
10398{
10399 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10400 GRP2_BODY_Ev_CL(shr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags), 0, 0);
10401}
10402
10403
10404/**
10405 * @opmaps grp2_d0
10406 * @opcode /7
10407 * @opflclass shift_count
10408 */
10409FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
10410{
10411 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10412 GRP2_BODY_Ev_CL(sar, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags), 0, 0);
10413}
10414
10415#undef GRP2_BODY_Ev_CL
10416
10417/**
10418 * @opcode 0xd3
10419 */
10420FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10421{
10422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10423 switch (IEM_GET_MODRM_REG_8(bRm))
10424 {
10425 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10426 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10427 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10428 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10429 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10430 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10431 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10432 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10433 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10434 }
10435}
10436
10437
10438/**
10439 * @opcode 0xd4
10440 * @opflmodify cf,pf,af,zf,sf,of
10441 * @opflundef cf,af,of
10442 */
10443FNIEMOP_DEF(iemOp_aam_Ib)
10444{
10445/** @todo testcase: aam */
10446 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10447 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10449 IEMOP_HLP_NO_64BIT();
10450 if (!bImm)
10451 IEMOP_RAISE_DIVIDE_ERROR_RET();
10452 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10453}
10454
10455
10456/**
10457 * @opcode 0xd5
10458 * @opflmodify cf,pf,af,zf,sf,of
10459 * @opflundef cf,af,of
10460 */
10461FNIEMOP_DEF(iemOp_aad_Ib)
10462{
10463/** @todo testcase: aad? */
10464 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10465 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10467 IEMOP_HLP_NO_64BIT();
10468 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10469}
10470
10471
10472/**
10473 * @opcode 0xd6
10474 */
10475FNIEMOP_DEF(iemOp_salc)
10476{
10477 IEMOP_MNEMONIC(salc, "salc");
10478 IEMOP_HLP_NO_64BIT();
10479
10480 IEM_MC_BEGIN(0, 0);
10481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10483 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10484 } IEM_MC_ELSE() {
10485 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10486 } IEM_MC_ENDIF();
10487 IEM_MC_ADVANCE_RIP_AND_FINISH();
10488 IEM_MC_END();
10489}
10490
10491
10492/**
10493 * @opcode 0xd7
10494 */
10495FNIEMOP_DEF(iemOp_xlat)
10496{
10497 IEMOP_MNEMONIC(xlat, "xlat");
10498 switch (pVCpu->iem.s.enmEffAddrMode)
10499 {
10500 case IEMMODE_16BIT:
10501 IEM_MC_BEGIN(0, 0);
10502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10503 IEM_MC_LOCAL(uint8_t, u8Tmp);
10504 IEM_MC_LOCAL(uint16_t, u16Addr);
10505 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10506 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10507 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10508 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10509 IEM_MC_ADVANCE_RIP_AND_FINISH();
10510 IEM_MC_END();
10511 break;
10512
10513 case IEMMODE_32BIT:
10514 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10516 IEM_MC_LOCAL(uint8_t, u8Tmp);
10517 IEM_MC_LOCAL(uint32_t, u32Addr);
10518 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10519 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10520 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10521 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10522 IEM_MC_ADVANCE_RIP_AND_FINISH();
10523 IEM_MC_END();
10524 break;
10525
10526 case IEMMODE_64BIT:
10527 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10529 IEM_MC_LOCAL(uint8_t, u8Tmp);
10530 IEM_MC_LOCAL(uint64_t, u64Addr);
10531 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10532 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10533 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10534 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10535 IEM_MC_ADVANCE_RIP_AND_FINISH();
10536 IEM_MC_END();
10537 break;
10538
10539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10540 }
10541}
10542
10543
10544/**
10545 * Common worker for FPU instructions working on ST0 and STn, and storing the
10546 * result in ST0.
10547 *
10548 * @param bRm Mod R/M byte.
10549 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10550 */
10551FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10552{
10553 IEM_MC_BEGIN(0, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10556 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10557 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10558 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10559
10560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10562 IEM_MC_PREPARE_FPU_USAGE();
10563 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10564 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10565 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10566 } IEM_MC_ELSE() {
10567 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10568 } IEM_MC_ENDIF();
10569 IEM_MC_ADVANCE_RIP_AND_FINISH();
10570
10571 IEM_MC_END();
10572}
10573
10574
10575/**
10576 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10577 * flags.
10578 *
10579 * @param bRm Mod R/M byte.
10580 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10581 */
10582FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10583{
10584 IEM_MC_BEGIN(0, 0);
10585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10586 IEM_MC_LOCAL(uint16_t, u16Fsw);
10587 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10588 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10590
10591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10593 IEM_MC_PREPARE_FPU_USAGE();
10594 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10595 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10596 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10597 } IEM_MC_ELSE() {
10598 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10599 } IEM_MC_ENDIF();
10600 IEM_MC_ADVANCE_RIP_AND_FINISH();
10601
10602 IEM_MC_END();
10603}
10604
10605
10606/**
10607 * Common worker for FPU instructions working on ST0 and STn, only affecting
10608 * flags, and popping when done.
10609 *
10610 * @param bRm Mod R/M byte.
10611 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10612 */
10613FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10614{
10615 IEM_MC_BEGIN(0, 0);
10616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10617 IEM_MC_LOCAL(uint16_t, u16Fsw);
10618 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10619 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10620 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10621
10622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10624 IEM_MC_PREPARE_FPU_USAGE();
10625 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10626 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10627 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10628 } IEM_MC_ELSE() {
10629 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10630 } IEM_MC_ENDIF();
10631 IEM_MC_ADVANCE_RIP_AND_FINISH();
10632
10633 IEM_MC_END();
10634}
10635
10636
10637/** Opcode 0xd8 11/0. */
10638FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10639{
10640 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10641 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10642}
10643
10644
10645/** Opcode 0xd8 11/1. */
10646FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10647{
10648 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10649 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10650}
10651
10652
10653/** Opcode 0xd8 11/2. */
10654FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10655{
10656 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10657 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10658}
10659
10660
10661/** Opcode 0xd8 11/3. */
10662FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10663{
10664 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10665 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10666}
10667
10668
10669/** Opcode 0xd8 11/4. */
10670FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10671{
10672 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10673 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10674}
10675
10676
10677/** Opcode 0xd8 11/5. */
10678FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10679{
10680 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10681 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10682}
10683
10684
10685/** Opcode 0xd8 11/6. */
10686FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10687{
10688 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10689 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10690}
10691
10692
10693/** Opcode 0xd8 11/7. */
10694FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10695{
10696 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10697 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10698}
10699
10700
10701/**
10702 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10703 * the result in ST0.
10704 *
10705 * @param bRm Mod R/M byte.
10706 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10707 */
10708FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10709{
10710 IEM_MC_BEGIN(0, 0);
10711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10712 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10713 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10714 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10715 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10716 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10717
10718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10720
10721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10723 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10724
10725 IEM_MC_PREPARE_FPU_USAGE();
10726 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10727 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10728 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10729 } IEM_MC_ELSE() {
10730 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10731 } IEM_MC_ENDIF();
10732 IEM_MC_ADVANCE_RIP_AND_FINISH();
10733
10734 IEM_MC_END();
10735}
10736
10737
10738/** Opcode 0xd8 !11/0. */
10739FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10740{
10741 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10742 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10743}
10744
10745
10746/** Opcode 0xd8 !11/1. */
10747FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10748{
10749 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10750 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10751}
10752
10753
10754/** Opcode 0xd8 !11/2. */
10755FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10756{
10757 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10758
10759 IEM_MC_BEGIN(0, 0);
10760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10761 IEM_MC_LOCAL(uint16_t, u16Fsw);
10762 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10763 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10764 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10765 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10766
10767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10769
10770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10772 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10773
10774 IEM_MC_PREPARE_FPU_USAGE();
10775 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10776 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10777 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10778 } IEM_MC_ELSE() {
10779 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10780 } IEM_MC_ENDIF();
10781 IEM_MC_ADVANCE_RIP_AND_FINISH();
10782
10783 IEM_MC_END();
10784}
10785
10786
10787/** Opcode 0xd8 !11/3. */
10788FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10789{
10790 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10791
10792 IEM_MC_BEGIN(0, 0);
10793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10794 IEM_MC_LOCAL(uint16_t, u16Fsw);
10795 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10796 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10797 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10798 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10799
10800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10802
10803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10805 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10806
10807 IEM_MC_PREPARE_FPU_USAGE();
10808 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10809 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10810 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10811 } IEM_MC_ELSE() {
10812 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10813 } IEM_MC_ENDIF();
10814 IEM_MC_ADVANCE_RIP_AND_FINISH();
10815
10816 IEM_MC_END();
10817}
10818
10819
10820/** Opcode 0xd8 !11/4. */
10821FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10822{
10823 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10824 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10825}
10826
10827
10828/** Opcode 0xd8 !11/5. */
10829FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10830{
10831 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10832 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10833}
10834
10835
10836/** Opcode 0xd8 !11/6. */
10837FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10838{
10839 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10840 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10841}
10842
10843
10844/** Opcode 0xd8 !11/7. */
10845FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10846{
10847 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10848 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10849}
10850
10851
10852/**
10853 * @opcode 0xd8
10854 */
10855FNIEMOP_DEF(iemOp_EscF0)
10856{
10857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10858 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10859
10860 if (IEM_IS_MODRM_REG_MODE(bRm))
10861 {
10862 switch (IEM_GET_MODRM_REG_8(bRm))
10863 {
10864 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10865 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10866 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10867 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10868 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10869 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10870 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10871 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10873 }
10874 }
10875 else
10876 {
10877 switch (IEM_GET_MODRM_REG_8(bRm))
10878 {
10879 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10880 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10881 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10882 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10883 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10884 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10885 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10886 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10888 }
10889 }
10890}
10891
10892
10893/** Opcode 0xd9 /0 mem32real
10894 * @sa iemOp_fld_m64r */
10895FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10896{
10897 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10898
10899 IEM_MC_BEGIN(0, 0);
10900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10901 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10902 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10903 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10904 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10905
10906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10908
10909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10911 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10912 IEM_MC_PREPARE_FPU_USAGE();
10913 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10914 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10915 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10916 } IEM_MC_ELSE() {
10917 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10918 } IEM_MC_ENDIF();
10919 IEM_MC_ADVANCE_RIP_AND_FINISH();
10920
10921 IEM_MC_END();
10922}
10923
10924
10925/** Opcode 0xd9 !11/2 mem32real */
10926FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10927{
10928 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10929 IEM_MC_BEGIN(0, 0);
10930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10932
10933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10936 IEM_MC_PREPARE_FPU_USAGE();
10937
10938 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10939 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10940 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10941
10942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10943 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10944 IEM_MC_LOCAL(uint16_t, u16Fsw);
10945 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10946 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10947 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10948 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10949 } IEM_MC_ELSE() {
10950 IEM_MC_IF_FCW_IM() {
10951 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10952 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10953 } IEM_MC_ELSE() {
10954 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10955 } IEM_MC_ENDIF();
10956 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10957 } IEM_MC_ENDIF();
10958 IEM_MC_ADVANCE_RIP_AND_FINISH();
10959
10960 IEM_MC_END();
10961}
10962
10963
10964/** Opcode 0xd9 !11/3 */
10965FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10966{
10967 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10968 IEM_MC_BEGIN(0, 0);
10969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10971
10972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10975 IEM_MC_PREPARE_FPU_USAGE();
10976
10977 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10978 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10979 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10980
10981 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10982 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10983 IEM_MC_LOCAL(uint16_t, u16Fsw);
10984 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10985 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10986 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10987 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10988 } IEM_MC_ELSE() {
10989 IEM_MC_IF_FCW_IM() {
10990 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10991 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10992 } IEM_MC_ELSE() {
10993 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10994 } IEM_MC_ENDIF();
10995 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10996 } IEM_MC_ENDIF();
10997 IEM_MC_ADVANCE_RIP_AND_FINISH();
10998
10999 IEM_MC_END();
11000}
11001
11002
11003/** Opcode 0xd9 !11/4 */
11004FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11005{
11006 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
11007 IEM_MC_BEGIN(0, 0);
11008 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11010
11011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11013 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11014
11015 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11016 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11017 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
11018 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11019 IEM_MC_END();
11020}
11021
11022
11023/** Opcode 0xd9 !11/5 */
11024FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11025{
11026 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
11027 IEM_MC_BEGIN(0, 0);
11028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11030
11031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11034
11035 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11036 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11037
11038 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
11039 iemCImpl_fldcw, u16Fsw);
11040 IEM_MC_END();
11041}
11042
11043
11044/** Opcode 0xd9 !11/6 */
11045FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11046{
11047 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
11048 IEM_MC_BEGIN(0, 0);
11049 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11051
11052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11054 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11055
11056 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11057 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11058 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11059 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11060 IEM_MC_END();
11061}
11062
11063
11064/** Opcode 0xd9 !11/7 */
11065FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11066{
11067 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
11068 IEM_MC_BEGIN(0, 0);
11069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11070 IEM_MC_LOCAL(uint16_t, u16Fcw);
11071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11073 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11074 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11075 IEM_MC_FETCH_FCW(u16Fcw);
11076 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
11077 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11078 IEM_MC_END();
11079}
11080
11081
11082/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
11083FNIEMOP_DEF(iemOp_fnop)
11084{
11085 IEMOP_MNEMONIC(fnop, "fnop");
11086 IEM_MC_BEGIN(0, 0);
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11091 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11092 * intel optimizations. Investigate. */
11093 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11094 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11095 IEM_MC_END();
11096}
11097
11098
11099/** Opcode 0xd9 11/0 stN */
11100FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11101{
11102 IEMOP_MNEMONIC(fld_stN, "fld stN");
11103 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11104 * indicates that it does. */
11105 IEM_MC_BEGIN(0, 0);
11106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11107 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11108 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11111
11112 IEM_MC_PREPARE_FPU_USAGE();
11113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
11114 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11115 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11116 } IEM_MC_ELSE() {
11117 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
11118 } IEM_MC_ENDIF();
11119
11120 IEM_MC_ADVANCE_RIP_AND_FINISH();
11121 IEM_MC_END();
11122}
11123
11124
11125/** Opcode 0xd9 11/3 stN */
11126FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11127{
11128 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
11129 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11130 * indicates that it does. */
11131 IEM_MC_BEGIN(0, 0);
11132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11133 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11134 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11135 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11136 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
11137 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
11138 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11139 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11140
11141 IEM_MC_PREPARE_FPU_USAGE();
11142 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
11143 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11144 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
11145 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11146 } IEM_MC_ELSE() {
11147 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
11148 } IEM_MC_ENDIF();
11149
11150 IEM_MC_ADVANCE_RIP_AND_FINISH();
11151 IEM_MC_END();
11152}
11153
11154
11155/** Opcode 0xd9 11/4, 0xdd 11/2. */
11156FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11157{
11158 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
11159
11160 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
11161 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
11162 if (!iDstReg)
11163 {
11164 IEM_MC_BEGIN(0, 0);
11165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11166 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11169
11170 IEM_MC_PREPARE_FPU_USAGE();
11171 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
11172 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11173 } IEM_MC_ELSE() {
11174 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
11175 } IEM_MC_ENDIF();
11176
11177 IEM_MC_ADVANCE_RIP_AND_FINISH();
11178 IEM_MC_END();
11179 }
11180 else
11181 {
11182 IEM_MC_BEGIN(0, 0);
11183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11184 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11185 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11186 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11187 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11188
11189 IEM_MC_PREPARE_FPU_USAGE();
11190 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11191 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11192 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
11193 } IEM_MC_ELSE() {
11194 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
11195 } IEM_MC_ENDIF();
11196
11197 IEM_MC_ADVANCE_RIP_AND_FINISH();
11198 IEM_MC_END();
11199 }
11200}
11201
11202
11203/**
11204 * Common worker for FPU instructions working on ST0 and replaces it with the
11205 * result, i.e. unary operators.
11206 *
11207 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11208 */
11209FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11210{
11211 IEM_MC_BEGIN(0, 0);
11212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11213 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11214 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11215 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11216
11217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11219 IEM_MC_PREPARE_FPU_USAGE();
11220 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11221 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11222 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11223 } IEM_MC_ELSE() {
11224 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11225 } IEM_MC_ENDIF();
11226 IEM_MC_ADVANCE_RIP_AND_FINISH();
11227
11228 IEM_MC_END();
11229}
11230
11231
11232/** Opcode 0xd9 0xe0. */
11233FNIEMOP_DEF(iemOp_fchs)
11234{
11235 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
11236 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11237}
11238
11239
11240/** Opcode 0xd9 0xe1. */
11241FNIEMOP_DEF(iemOp_fabs)
11242{
11243 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
11244 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11245}
11246
11247
11248/** Opcode 0xd9 0xe4. */
11249FNIEMOP_DEF(iemOp_ftst)
11250{
11251 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
11252 IEM_MC_BEGIN(0, 0);
11253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11254 IEM_MC_LOCAL(uint16_t, u16Fsw);
11255 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11256 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11257
11258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11260 IEM_MC_PREPARE_FPU_USAGE();
11261 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11262 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
11263 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11264 } IEM_MC_ELSE() {
11265 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
11266 } IEM_MC_ENDIF();
11267 IEM_MC_ADVANCE_RIP_AND_FINISH();
11268
11269 IEM_MC_END();
11270}
11271
11272
11273/** Opcode 0xd9 0xe5. */
11274FNIEMOP_DEF(iemOp_fxam)
11275{
11276 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
11277 IEM_MC_BEGIN(0, 0);
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279 IEM_MC_LOCAL(uint16_t, u16Fsw);
11280 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11281 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11282
11283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11285 IEM_MC_PREPARE_FPU_USAGE();
11286 IEM_MC_REF_FPUREG(pr80Value, 0);
11287 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
11288 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11289 IEM_MC_ADVANCE_RIP_AND_FINISH();
11290
11291 IEM_MC_END();
11292}
11293
11294
11295/**
11296 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11297 *
11298 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11299 */
11300FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11301{
11302 IEM_MC_BEGIN(0, 0);
11303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11304 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11305 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11306
11307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11309 IEM_MC_PREPARE_FPU_USAGE();
11310 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11311 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11312 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11313 } IEM_MC_ELSE() {
11314 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
11315 } IEM_MC_ENDIF();
11316 IEM_MC_ADVANCE_RIP_AND_FINISH();
11317
11318 IEM_MC_END();
11319}
11320
11321
11322/** Opcode 0xd9 0xe8. */
11323FNIEMOP_DEF(iemOp_fld1)
11324{
11325 IEMOP_MNEMONIC(fld1, "fld1");
11326 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11327}
11328
11329
11330/** Opcode 0xd9 0xe9. */
11331FNIEMOP_DEF(iemOp_fldl2t)
11332{
11333 IEMOP_MNEMONIC(fldl2t, "fldl2t");
11334 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11335}
11336
11337
11338/** Opcode 0xd9 0xea. */
11339FNIEMOP_DEF(iemOp_fldl2e)
11340{
11341 IEMOP_MNEMONIC(fldl2e, "fldl2e");
11342 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11343}
11344
11345/** Opcode 0xd9 0xeb. */
11346FNIEMOP_DEF(iemOp_fldpi)
11347{
11348 IEMOP_MNEMONIC(fldpi, "fldpi");
11349 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11350}
11351
11352
11353/** Opcode 0xd9 0xec. */
11354FNIEMOP_DEF(iemOp_fldlg2)
11355{
11356 IEMOP_MNEMONIC(fldlg2, "fldlg2");
11357 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11358}
11359
11360/** Opcode 0xd9 0xed. */
11361FNIEMOP_DEF(iemOp_fldln2)
11362{
11363 IEMOP_MNEMONIC(fldln2, "fldln2");
11364 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11365}
11366
11367
11368/** Opcode 0xd9 0xee. */
11369FNIEMOP_DEF(iemOp_fldz)
11370{
11371 IEMOP_MNEMONIC(fldz, "fldz");
11372 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11373}
11374
11375
11376/** Opcode 0xd9 0xf0.
11377 *
11378 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
11379 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
11380 * to produce proper results for +Inf and -Inf.
11381 *
11382 * This is probably usful in the implementation pow() and similar.
11383 */
11384FNIEMOP_DEF(iemOp_f2xm1)
11385{
11386 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
11387 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11388}
11389
11390
11391/**
11392 * Common worker for FPU instructions working on STn and ST0, storing the result
11393 * in STn, and popping the stack unless IE, DE or ZE was raised.
11394 *
11395 * @param bRm Mod R/M byte.
11396 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11397 */
11398FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11399{
11400 IEM_MC_BEGIN(0, 0);
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11403 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11404 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11405 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11406
11407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11409
11410 IEM_MC_PREPARE_FPU_USAGE();
11411 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11412 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11413 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11414 } IEM_MC_ELSE() {
11415 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11416 } IEM_MC_ENDIF();
11417 IEM_MC_ADVANCE_RIP_AND_FINISH();
11418
11419 IEM_MC_END();
11420}
11421
11422
11423/** Opcode 0xd9 0xf1. */
11424FNIEMOP_DEF(iemOp_fyl2x)
11425{
11426 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11427 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11428}
11429
11430
11431/**
11432 * Common worker for FPU instructions working on ST0 and having two outputs, one
11433 * replacing ST0 and one pushed onto the stack.
11434 *
11435 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11436 */
11437FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11438{
11439 IEM_MC_BEGIN(0, 0);
11440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11441 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11442 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11443 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11444
11445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11447 IEM_MC_PREPARE_FPU_USAGE();
11448 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11449 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11450 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11451 } IEM_MC_ELSE() {
11452 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11453 } IEM_MC_ENDIF();
11454 IEM_MC_ADVANCE_RIP_AND_FINISH();
11455
11456 IEM_MC_END();
11457}
11458
11459
11460/** Opcode 0xd9 0xf2. */
11461FNIEMOP_DEF(iemOp_fptan)
11462{
11463 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11464 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11465}
11466
11467
11468/** Opcode 0xd9 0xf3. */
11469FNIEMOP_DEF(iemOp_fpatan)
11470{
11471 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11472 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11473}
11474
11475
11476/** Opcode 0xd9 0xf4. */
11477FNIEMOP_DEF(iemOp_fxtract)
11478{
11479 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11480 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11481}
11482
11483
11484/** Opcode 0xd9 0xf5. */
11485FNIEMOP_DEF(iemOp_fprem1)
11486{
11487 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11488 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11489}
11490
11491
11492/** Opcode 0xd9 0xf6. */
11493FNIEMOP_DEF(iemOp_fdecstp)
11494{
11495 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11496 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11497 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11498 * FINCSTP and FDECSTP. */
11499 IEM_MC_BEGIN(0, 0);
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501
11502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11504
11505 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11506 IEM_MC_FPU_STACK_DEC_TOP();
11507 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11508
11509 IEM_MC_ADVANCE_RIP_AND_FINISH();
11510 IEM_MC_END();
11511}
11512
11513
11514/** Opcode 0xd9 0xf7. */
11515FNIEMOP_DEF(iemOp_fincstp)
11516{
11517 IEMOP_MNEMONIC(fincstp, "fincstp");
11518 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11519 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11520 * FINCSTP and FDECSTP. */
11521 IEM_MC_BEGIN(0, 0);
11522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11523
11524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11526
11527 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11528 IEM_MC_FPU_STACK_INC_TOP();
11529 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11530
11531 IEM_MC_ADVANCE_RIP_AND_FINISH();
11532 IEM_MC_END();
11533}
11534
11535
11536/** Opcode 0xd9 0xf8. */
11537FNIEMOP_DEF(iemOp_fprem)
11538{
11539 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11541}
11542
11543
11544/** Opcode 0xd9 0xf9. */
11545FNIEMOP_DEF(iemOp_fyl2xp1)
11546{
11547 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11548 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11549}
11550
11551
11552/** Opcode 0xd9 0xfa. */
11553FNIEMOP_DEF(iemOp_fsqrt)
11554{
11555 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11556 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11557}
11558
11559
11560/** Opcode 0xd9 0xfb. */
11561FNIEMOP_DEF(iemOp_fsincos)
11562{
11563 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11564 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11565}
11566
11567
11568/** Opcode 0xd9 0xfc. */
11569FNIEMOP_DEF(iemOp_frndint)
11570{
11571 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11572 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11573}
11574
11575
11576/** Opcode 0xd9 0xfd. */
11577FNIEMOP_DEF(iemOp_fscale)
11578{
11579 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11580 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11581}
11582
11583
11584/** Opcode 0xd9 0xfe. */
11585FNIEMOP_DEF(iemOp_fsin)
11586{
11587 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11588 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11589}
11590
11591
11592/** Opcode 0xd9 0xff. */
11593FNIEMOP_DEF(iemOp_fcos)
11594{
11595 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11596 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11597}
11598
11599
11600/** Used by iemOp_EscF1. */
11601IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11602{
11603 /* 0xe0 */ iemOp_fchs,
11604 /* 0xe1 */ iemOp_fabs,
11605 /* 0xe2 */ iemOp_Invalid,
11606 /* 0xe3 */ iemOp_Invalid,
11607 /* 0xe4 */ iemOp_ftst,
11608 /* 0xe5 */ iemOp_fxam,
11609 /* 0xe6 */ iemOp_Invalid,
11610 /* 0xe7 */ iemOp_Invalid,
11611 /* 0xe8 */ iemOp_fld1,
11612 /* 0xe9 */ iemOp_fldl2t,
11613 /* 0xea */ iemOp_fldl2e,
11614 /* 0xeb */ iemOp_fldpi,
11615 /* 0xec */ iemOp_fldlg2,
11616 /* 0xed */ iemOp_fldln2,
11617 /* 0xee */ iemOp_fldz,
11618 /* 0xef */ iemOp_Invalid,
11619 /* 0xf0 */ iemOp_f2xm1,
11620 /* 0xf1 */ iemOp_fyl2x,
11621 /* 0xf2 */ iemOp_fptan,
11622 /* 0xf3 */ iemOp_fpatan,
11623 /* 0xf4 */ iemOp_fxtract,
11624 /* 0xf5 */ iemOp_fprem1,
11625 /* 0xf6 */ iemOp_fdecstp,
11626 /* 0xf7 */ iemOp_fincstp,
11627 /* 0xf8 */ iemOp_fprem,
11628 /* 0xf9 */ iemOp_fyl2xp1,
11629 /* 0xfa */ iemOp_fsqrt,
11630 /* 0xfb */ iemOp_fsincos,
11631 /* 0xfc */ iemOp_frndint,
11632 /* 0xfd */ iemOp_fscale,
11633 /* 0xfe */ iemOp_fsin,
11634 /* 0xff */ iemOp_fcos
11635};
11636
11637
11638/**
11639 * @opcode 0xd9
11640 */
11641FNIEMOP_DEF(iemOp_EscF1)
11642{
11643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11644 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11645
11646 if (IEM_IS_MODRM_REG_MODE(bRm))
11647 {
11648 switch (IEM_GET_MODRM_REG_8(bRm))
11649 {
11650 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11651 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11652 case 2:
11653 if (bRm == 0xd0)
11654 return FNIEMOP_CALL(iemOp_fnop);
11655 IEMOP_RAISE_INVALID_OPCODE_RET();
11656 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11657 case 4:
11658 case 5:
11659 case 6:
11660 case 7:
11661 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11662 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11664 }
11665 }
11666 else
11667 {
11668 switch (IEM_GET_MODRM_REG_8(bRm))
11669 {
11670 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11671 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11672 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11673 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11674 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11675 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11676 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11677 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11679 }
11680 }
11681}
11682
11683
11684/** Opcode 0xda 11/0. */
11685FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11686{
11687 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11688 IEM_MC_BEGIN(0, 0);
11689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11690 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11691
11692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11693 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11694
11695 IEM_MC_PREPARE_FPU_USAGE();
11696 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11698 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11699 } IEM_MC_ENDIF();
11700 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11701 } IEM_MC_ELSE() {
11702 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11703 } IEM_MC_ENDIF();
11704 IEM_MC_ADVANCE_RIP_AND_FINISH();
11705
11706 IEM_MC_END();
11707}
11708
11709
11710/** Opcode 0xda 11/1. */
11711FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11712{
11713 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11714 IEM_MC_BEGIN(0, 0);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11717
11718 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11719 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11720
11721 IEM_MC_PREPARE_FPU_USAGE();
11722 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11724 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11725 } IEM_MC_ENDIF();
11726 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11727 } IEM_MC_ELSE() {
11728 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11729 } IEM_MC_ENDIF();
11730 IEM_MC_ADVANCE_RIP_AND_FINISH();
11731
11732 IEM_MC_END();
11733}
11734
11735
11736/** Opcode 0xda 11/2. */
11737FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11738{
11739 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11740 IEM_MC_BEGIN(0, 0);
11741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11742 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11743
11744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11746
11747 IEM_MC_PREPARE_FPU_USAGE();
11748 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11749 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11750 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11751 } IEM_MC_ENDIF();
11752 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11753 } IEM_MC_ELSE() {
11754 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11755 } IEM_MC_ENDIF();
11756 IEM_MC_ADVANCE_RIP_AND_FINISH();
11757
11758 IEM_MC_END();
11759}
11760
11761
11762/** Opcode 0xda 11/3. */
11763FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11764{
11765 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11766 IEM_MC_BEGIN(0, 0);
11767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11768 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11769
11770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11772
11773 IEM_MC_PREPARE_FPU_USAGE();
11774 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11776 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11777 } IEM_MC_ENDIF();
11778 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11779 } IEM_MC_ELSE() {
11780 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11781 } IEM_MC_ENDIF();
11782 IEM_MC_ADVANCE_RIP_AND_FINISH();
11783
11784 IEM_MC_END();
11785}
11786
11787
11788/**
11789 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11790 * flags, and popping twice when done.
11791 *
11792 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11793 */
11794FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11795{
11796 IEM_MC_BEGIN(0, 0);
11797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11798 IEM_MC_LOCAL(uint16_t, u16Fsw);
11799 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11800 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11802
11803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11805
11806 IEM_MC_PREPARE_FPU_USAGE();
11807 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11808 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11809 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11810 } IEM_MC_ELSE() {
11811 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11812 } IEM_MC_ENDIF();
11813 IEM_MC_ADVANCE_RIP_AND_FINISH();
11814
11815 IEM_MC_END();
11816}
11817
11818
11819/** Opcode 0xda 0xe9. */
11820FNIEMOP_DEF(iemOp_fucompp)
11821{
11822 IEMOP_MNEMONIC(fucompp, "fucompp");
11823 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11824}
11825
11826
11827/**
11828 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11829 * the result in ST0.
11830 *
11831 * @param bRm Mod R/M byte.
11832 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11833 */
11834FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11835{
11836 IEM_MC_BEGIN(0, 0);
11837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11838 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11839 IEM_MC_LOCAL(int32_t, i32Val2);
11840 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11841 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11842 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11843
11844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11846
11847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11848 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11849 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11850
11851 IEM_MC_PREPARE_FPU_USAGE();
11852 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11853 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11854 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11855 } IEM_MC_ELSE() {
11856 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11857 } IEM_MC_ENDIF();
11858 IEM_MC_ADVANCE_RIP_AND_FINISH();
11859
11860 IEM_MC_END();
11861}
11862
11863
11864/** Opcode 0xda !11/0. */
11865FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11866{
11867 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11868 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11869}
11870
11871
11872/** Opcode 0xda !11/1. */
11873FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11874{
11875 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11876 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11877}
11878
11879
11880/** Opcode 0xda !11/2. */
11881FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11882{
11883 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11884
11885 IEM_MC_BEGIN(0, 0);
11886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11887 IEM_MC_LOCAL(uint16_t, u16Fsw);
11888 IEM_MC_LOCAL(int32_t, i32Val2);
11889 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11890 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11891 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11892
11893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11895
11896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11897 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11898 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11899
11900 IEM_MC_PREPARE_FPU_USAGE();
11901 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11902 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11903 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11904 } IEM_MC_ELSE() {
11905 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11906 } IEM_MC_ENDIF();
11907 IEM_MC_ADVANCE_RIP_AND_FINISH();
11908
11909 IEM_MC_END();
11910}
11911
11912
11913/** Opcode 0xda !11/3. */
11914FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11915{
11916 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11917
11918 IEM_MC_BEGIN(0, 0);
11919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11920 IEM_MC_LOCAL(uint16_t, u16Fsw);
11921 IEM_MC_LOCAL(int32_t, i32Val2);
11922 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11923 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11924 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11925
11926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11928
11929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11931 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11932
11933 IEM_MC_PREPARE_FPU_USAGE();
11934 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11935 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11936 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11937 } IEM_MC_ELSE() {
11938 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11939 } IEM_MC_ENDIF();
11940 IEM_MC_ADVANCE_RIP_AND_FINISH();
11941
11942 IEM_MC_END();
11943}
11944
11945
11946/** Opcode 0xda !11/4. */
11947FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11948{
11949 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11950 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11951}
11952
11953
11954/** Opcode 0xda !11/5. */
11955FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11956{
11957 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11958 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11959}
11960
11961
11962/** Opcode 0xda !11/6. */
11963FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11964{
11965 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11966 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11967}
11968
11969
11970/** Opcode 0xda !11/7. */
11971FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11972{
11973 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11974 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11975}
11976
11977
11978/**
11979 * @opcode 0xda
11980 */
11981FNIEMOP_DEF(iemOp_EscF2)
11982{
11983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11984 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11985 if (IEM_IS_MODRM_REG_MODE(bRm))
11986 {
11987 switch (IEM_GET_MODRM_REG_8(bRm))
11988 {
11989 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11990 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11991 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11992 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11993 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11994 case 5:
11995 if (bRm == 0xe9)
11996 return FNIEMOP_CALL(iemOp_fucompp);
11997 IEMOP_RAISE_INVALID_OPCODE_RET();
11998 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11999 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12001 }
12002 }
12003 else
12004 {
12005 switch (IEM_GET_MODRM_REG_8(bRm))
12006 {
12007 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12008 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12009 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12010 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12011 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12012 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12013 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12014 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12016 }
12017 }
12018}
12019
12020
12021/** Opcode 0xdb !11/0. */
12022FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12023{
12024 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
12025
12026 IEM_MC_BEGIN(0, 0);
12027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12029 IEM_MC_LOCAL(int32_t, i32Val);
12030 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12031 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12032
12033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12035
12036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12038 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12039
12040 IEM_MC_PREPARE_FPU_USAGE();
12041 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12042 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
12043 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12044 } IEM_MC_ELSE() {
12045 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12046 } IEM_MC_ENDIF();
12047 IEM_MC_ADVANCE_RIP_AND_FINISH();
12048
12049 IEM_MC_END();
12050}
12051
12052
12053/** Opcode 0xdb !11/1. */
12054FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12055{
12056 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
12057 IEM_MC_BEGIN(0, 0);
12058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12060
12061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12064 IEM_MC_PREPARE_FPU_USAGE();
12065
12066 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12067 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12068 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12069
12070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12071 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12072 IEM_MC_LOCAL(uint16_t, u16Fsw);
12073 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12074 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12075 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12076 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12077 } IEM_MC_ELSE() {
12078 IEM_MC_IF_FCW_IM() {
12079 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12080 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12081 } IEM_MC_ELSE() {
12082 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12083 } IEM_MC_ENDIF();
12084 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12085 } IEM_MC_ENDIF();
12086 IEM_MC_ADVANCE_RIP_AND_FINISH();
12087
12088 IEM_MC_END();
12089}
12090
12091
12092/** Opcode 0xdb !11/2. */
12093FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12094{
12095 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
12096 IEM_MC_BEGIN(0, 0);
12097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12099
12100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12101 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12102 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12103 IEM_MC_PREPARE_FPU_USAGE();
12104
12105 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12106 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12107 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12108
12109 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12110 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12111 IEM_MC_LOCAL(uint16_t, u16Fsw);
12112 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12113 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12114 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12115 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12116 } IEM_MC_ELSE() {
12117 IEM_MC_IF_FCW_IM() {
12118 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12119 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12120 } IEM_MC_ELSE() {
12121 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12122 } IEM_MC_ENDIF();
12123 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12124 } IEM_MC_ENDIF();
12125 IEM_MC_ADVANCE_RIP_AND_FINISH();
12126
12127 IEM_MC_END();
12128}
12129
12130
12131/** Opcode 0xdb !11/3. */
12132FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12133{
12134 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
12135 IEM_MC_BEGIN(0, 0);
12136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12138
12139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12140 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12141 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12142 IEM_MC_PREPARE_FPU_USAGE();
12143
12144 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12145 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12146 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12147
12148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12150 IEM_MC_LOCAL(uint16_t, u16Fsw);
12151 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12152 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12153 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12154 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12155 } IEM_MC_ELSE() {
12156 IEM_MC_IF_FCW_IM() {
12157 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12158 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12159 } IEM_MC_ELSE() {
12160 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12161 } IEM_MC_ENDIF();
12162 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12163 } IEM_MC_ENDIF();
12164 IEM_MC_ADVANCE_RIP_AND_FINISH();
12165
12166 IEM_MC_END();
12167}
12168
12169
12170/** Opcode 0xdb !11/5. */
12171FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12172{
12173 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
12174
12175 IEM_MC_BEGIN(0, 0);
12176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12177 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12178 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12179 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12180 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12181
12182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12184
12185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12187 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12188
12189 IEM_MC_PREPARE_FPU_USAGE();
12190 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12191 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12192 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12193 } IEM_MC_ELSE() {
12194 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12195 } IEM_MC_ENDIF();
12196 IEM_MC_ADVANCE_RIP_AND_FINISH();
12197
12198 IEM_MC_END();
12199}
12200
12201
12202/** Opcode 0xdb !11/7. */
12203FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12204{
12205 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
12206 IEM_MC_BEGIN(0, 0);
12207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12209
12210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12212 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12213 IEM_MC_PREPARE_FPU_USAGE();
12214
12215 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12216 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12217 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12218
12219 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12220 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12221 IEM_MC_LOCAL(uint16_t, u16Fsw);
12222 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12223 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12224 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12225 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12226 } IEM_MC_ELSE() {
12227 IEM_MC_IF_FCW_IM() {
12228 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12229 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12230 } IEM_MC_ELSE() {
12231 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12232 } IEM_MC_ENDIF();
12233 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12234 } IEM_MC_ENDIF();
12235 IEM_MC_ADVANCE_RIP_AND_FINISH();
12236
12237 IEM_MC_END();
12238}
12239
12240
12241/** Opcode 0xdb 11/0. */
12242FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12243{
12244 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
12245 IEM_MC_BEGIN(0, 0);
12246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12247 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12248
12249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12251
12252 IEM_MC_PREPARE_FPU_USAGE();
12253 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12254 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
12255 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12256 } IEM_MC_ENDIF();
12257 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12258 } IEM_MC_ELSE() {
12259 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12260 } IEM_MC_ENDIF();
12261 IEM_MC_ADVANCE_RIP_AND_FINISH();
12262
12263 IEM_MC_END();
12264}
12265
12266
12267/** Opcode 0xdb 11/1. */
12268FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12269{
12270 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
12271 IEM_MC_BEGIN(0, 0);
12272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12273 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12274
12275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12277
12278 IEM_MC_PREPARE_FPU_USAGE();
12279 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12280 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12281 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12282 } IEM_MC_ENDIF();
12283 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12284 } IEM_MC_ELSE() {
12285 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12286 } IEM_MC_ENDIF();
12287 IEM_MC_ADVANCE_RIP_AND_FINISH();
12288
12289 IEM_MC_END();
12290}
12291
12292
12293/** Opcode 0xdb 11/2. */
12294FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12295{
12296 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
12297 IEM_MC_BEGIN(0, 0);
12298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12299 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12300
12301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12303
12304 IEM_MC_PREPARE_FPU_USAGE();
12305 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12306 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
12307 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12308 } IEM_MC_ENDIF();
12309 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12310 } IEM_MC_ELSE() {
12311 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12312 } IEM_MC_ENDIF();
12313 IEM_MC_ADVANCE_RIP_AND_FINISH();
12314
12315 IEM_MC_END();
12316}
12317
12318
12319/** Opcode 0xdb 11/3. */
12320FNIEMOP_DEF_1(iemOp_fcmovnu_stN, uint8_t, bRm)
12321{
12322 IEMOP_MNEMONIC(fcmovnu_st0_stN, "fcmovnu st0,stN");
12323 IEM_MC_BEGIN(0, 0);
12324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12325 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12326
12327 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12328 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12329
12330 IEM_MC_PREPARE_FPU_USAGE();
12331 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12332 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
12333 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12334 } IEM_MC_ENDIF();
12335 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12336 } IEM_MC_ELSE() {
12337 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12338 } IEM_MC_ENDIF();
12339 IEM_MC_ADVANCE_RIP_AND_FINISH();
12340
12341 IEM_MC_END();
12342}
12343
12344
12345/** Opcode 0xdb 0xe0. */
12346FNIEMOP_DEF(iemOp_fneni)
12347{
12348 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
12349 IEM_MC_BEGIN(0, 0);
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12352 IEM_MC_ADVANCE_RIP_AND_FINISH();
12353 IEM_MC_END();
12354}
12355
12356
12357/** Opcode 0xdb 0xe1. */
12358FNIEMOP_DEF(iemOp_fndisi)
12359{
12360 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
12361 IEM_MC_BEGIN(0, 0);
12362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12364 IEM_MC_ADVANCE_RIP_AND_FINISH();
12365 IEM_MC_END();
12366}
12367
12368
12369/** Opcode 0xdb 0xe2. */
12370FNIEMOP_DEF(iemOp_fnclex)
12371{
12372 IEMOP_MNEMONIC(fnclex, "fnclex");
12373 IEM_MC_BEGIN(0, 0);
12374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12376 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12377 IEM_MC_CLEAR_FSW_EX();
12378 IEM_MC_ADVANCE_RIP_AND_FINISH();
12379 IEM_MC_END();
12380}
12381
12382
12383/** Opcode 0xdb 0xe3. */
12384FNIEMOP_DEF(iemOp_fninit)
12385{
12386 IEMOP_MNEMONIC(fninit, "fninit");
12387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12388 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12389 iemCImpl_finit, false /*fCheckXcpts*/);
12390}
12391
12392
12393/** Opcode 0xdb 0xe4. */
12394FNIEMOP_DEF(iemOp_fnsetpm)
12395{
12396 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12397 IEM_MC_BEGIN(0, 0);
12398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12400 IEM_MC_ADVANCE_RIP_AND_FINISH();
12401 IEM_MC_END();
12402}
12403
12404
12405/** Opcode 0xdb 0xe5. */
12406FNIEMOP_DEF(iemOp_frstpm)
12407{
12408 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12409#if 0 /* #UDs on newer CPUs */
12410 IEM_MC_BEGIN(0, 0);
12411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12413 IEM_MC_ADVANCE_RIP_AND_FINISH();
12414 IEM_MC_END();
12415 return VINF_SUCCESS;
12416#else
12417 IEMOP_RAISE_INVALID_OPCODE_RET();
12418#endif
12419}
12420
12421
12422/** Opcode 0xdb 11/5. */
12423FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12424{
12425 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12426 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12427 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12428 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12429}
12430
12431
12432/** Opcode 0xdb 11/6. */
12433FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12434{
12435 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12436 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12437 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12438 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12439}
12440
12441
12442/**
12443 * @opcode 0xdb
12444 */
12445FNIEMOP_DEF(iemOp_EscF3)
12446{
12447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12448 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12449 if (IEM_IS_MODRM_REG_MODE(bRm))
12450 {
12451 switch (IEM_GET_MODRM_REG_8(bRm))
12452 {
12453 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12454 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12455 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12456 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnu_stN, bRm);
12457 case 4:
12458 switch (bRm)
12459 {
12460 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12461 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12462 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12463 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12464 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12465 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12466 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12467 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12469 }
12470 break;
12471 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12472 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12473 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12475 }
12476 }
12477 else
12478 {
12479 switch (IEM_GET_MODRM_REG_8(bRm))
12480 {
12481 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12482 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12483 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12484 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12485 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12486 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12487 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12488 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12490 }
12491 }
12492}
12493
12494
12495/**
12496 * Common worker for FPU instructions working on STn and ST0, and storing the
12497 * result in STn unless IE, DE or ZE was raised.
12498 *
12499 * @param bRm Mod R/M byte.
12500 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12501 */
12502FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12503{
12504 IEM_MC_BEGIN(0, 0);
12505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12506 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12507 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12510
12511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12513
12514 IEM_MC_PREPARE_FPU_USAGE();
12515 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12516 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12517 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12518 } IEM_MC_ELSE() {
12519 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12520 } IEM_MC_ENDIF();
12521 IEM_MC_ADVANCE_RIP_AND_FINISH();
12522
12523 IEM_MC_END();
12524}
12525
12526
12527/** Opcode 0xdc 11/0. */
12528FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12529{
12530 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12531 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12532}
12533
12534
12535/** Opcode 0xdc 11/1. */
12536FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12537{
12538 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12539 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12540}
12541
12542
12543/** Opcode 0xdc 11/4. */
12544FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12545{
12546 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12547 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12548}
12549
12550
12551/** Opcode 0xdc 11/5. */
12552FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12553{
12554 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12555 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12556}
12557
12558
12559/** Opcode 0xdc 11/6. */
12560FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12561{
12562 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12563 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12564}
12565
12566
12567/** Opcode 0xdc 11/7. */
12568FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12569{
12570 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12571 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12572}
12573
12574
12575/**
12576 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12577 * memory operand, and storing the result in ST0.
12578 *
12579 * @param bRm Mod R/M byte.
12580 * @param pfnImpl Pointer to the instruction implementation (assembly).
12581 */
12582FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12583{
12584 IEM_MC_BEGIN(0, 0);
12585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12586 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12587 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12588 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12589 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12590 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12591
12592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12596
12597 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12598 IEM_MC_PREPARE_FPU_USAGE();
12599 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12600 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12601 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12602 } IEM_MC_ELSE() {
12603 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12604 } IEM_MC_ENDIF();
12605 IEM_MC_ADVANCE_RIP_AND_FINISH();
12606
12607 IEM_MC_END();
12608}
12609
12610
12611/** Opcode 0xdc !11/0. */
12612FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12613{
12614 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12615 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12616}
12617
12618
12619/** Opcode 0xdc !11/1. */
12620FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12621{
12622 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12623 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12624}
12625
12626
12627/** Opcode 0xdc !11/2. */
12628FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12629{
12630 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12631
12632 IEM_MC_BEGIN(0, 0);
12633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12634 IEM_MC_LOCAL(uint16_t, u16Fsw);
12635 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12636 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12637 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12638 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12639
12640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12642
12643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12644 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12645 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12646
12647 IEM_MC_PREPARE_FPU_USAGE();
12648 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12649 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12650 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12651 } IEM_MC_ELSE() {
12652 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12653 } IEM_MC_ENDIF();
12654 IEM_MC_ADVANCE_RIP_AND_FINISH();
12655
12656 IEM_MC_END();
12657}
12658
12659
12660/** Opcode 0xdc !11/3. */
12661FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12662{
12663 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12664
12665 IEM_MC_BEGIN(0, 0);
12666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12667 IEM_MC_LOCAL(uint16_t, u16Fsw);
12668 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12669 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12670 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12671 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12672
12673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12675
12676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12678 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12679
12680 IEM_MC_PREPARE_FPU_USAGE();
12681 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12682 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12683 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12684 } IEM_MC_ELSE() {
12685 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12686 } IEM_MC_ENDIF();
12687 IEM_MC_ADVANCE_RIP_AND_FINISH();
12688
12689 IEM_MC_END();
12690}
12691
12692
12693/** Opcode 0xdc !11/4. */
12694FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12695{
12696 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12697 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12698}
12699
12700
12701/** Opcode 0xdc !11/5. */
12702FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12703{
12704 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12705 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12706}
12707
12708
12709/** Opcode 0xdc !11/6. */
12710FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12711{
12712 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12713 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12714}
12715
12716
12717/** Opcode 0xdc !11/7. */
12718FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12719{
12720 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12721 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12722}
12723
12724
12725/**
12726 * @opcode 0xdc
12727 */
12728FNIEMOP_DEF(iemOp_EscF4)
12729{
12730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12731 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12732 if (IEM_IS_MODRM_REG_MODE(bRm))
12733 {
12734 switch (IEM_GET_MODRM_REG_8(bRm))
12735 {
12736 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12737 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12738 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12739 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12740 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12741 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12742 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12743 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12745 }
12746 }
12747 else
12748 {
12749 switch (IEM_GET_MODRM_REG_8(bRm))
12750 {
12751 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12752 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12753 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12754 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12755 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12756 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12757 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12758 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12760 }
12761 }
12762}
12763
12764
12765/** Opcode 0xdd !11/0.
12766 * @sa iemOp_fld_m32r */
12767FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12768{
12769 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12770
12771 IEM_MC_BEGIN(0, 0);
12772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12773 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12774 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12775 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12776 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12777
12778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12782
12783 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12784 IEM_MC_PREPARE_FPU_USAGE();
12785 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12786 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12787 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12788 } IEM_MC_ELSE() {
12789 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12790 } IEM_MC_ENDIF();
12791 IEM_MC_ADVANCE_RIP_AND_FINISH();
12792
12793 IEM_MC_END();
12794}
12795
12796
12797/** Opcode 0xdd !11/0. */
12798FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12799{
12800 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12801 IEM_MC_BEGIN(0, 0);
12802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12804
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12807 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12808 IEM_MC_PREPARE_FPU_USAGE();
12809
12810 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12811 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12812 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12813
12814 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12815 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12816 IEM_MC_LOCAL(uint16_t, u16Fsw);
12817 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12818 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12819 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12820 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12821 } IEM_MC_ELSE() {
12822 IEM_MC_IF_FCW_IM() {
12823 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12824 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12825 } IEM_MC_ELSE() {
12826 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12827 } IEM_MC_ENDIF();
12828 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12829 } IEM_MC_ENDIF();
12830 IEM_MC_ADVANCE_RIP_AND_FINISH();
12831
12832 IEM_MC_END();
12833}
12834
12835
12836/** Opcode 0xdd !11/0. */
12837FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12838{
12839 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12840 IEM_MC_BEGIN(0, 0);
12841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12843
12844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12846 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12847 IEM_MC_PREPARE_FPU_USAGE();
12848
12849 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12850 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12851 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12852
12853 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12854 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12855 IEM_MC_LOCAL(uint16_t, u16Fsw);
12856 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12857 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12858 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12859 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12860 } IEM_MC_ELSE() {
12861 IEM_MC_IF_FCW_IM() {
12862 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12863 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12864 } IEM_MC_ELSE() {
12865 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12866 } IEM_MC_ENDIF();
12867 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12868 } IEM_MC_ENDIF();
12869 IEM_MC_ADVANCE_RIP_AND_FINISH();
12870
12871 IEM_MC_END();
12872}
12873
12874
12875
12876
12877/** Opcode 0xdd !11/0. */
12878FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12879{
12880 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12881 IEM_MC_BEGIN(0, 0);
12882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12884
12885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12888 IEM_MC_PREPARE_FPU_USAGE();
12889
12890 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12891 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12892 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12893
12894 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12895 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12896 IEM_MC_LOCAL(uint16_t, u16Fsw);
12897 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12898 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12899 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12900 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12901 } IEM_MC_ELSE() {
12902 IEM_MC_IF_FCW_IM() {
12903 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12904 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12905 } IEM_MC_ELSE() {
12906 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12907 } IEM_MC_ENDIF();
12908 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12909 } IEM_MC_ENDIF();
12910 IEM_MC_ADVANCE_RIP_AND_FINISH();
12911
12912 IEM_MC_END();
12913}
12914
12915
12916/** Opcode 0xdd !11/0. */
12917FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12918{
12919 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12920 IEM_MC_BEGIN(0, 0);
12921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12923
12924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12926 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12927
12928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12929 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12930 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12931 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12932 IEM_MC_END();
12933}
12934
12935
12936/** Opcode 0xdd !11/0. */
12937FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12938{
12939 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12940 IEM_MC_BEGIN(0, 0);
12941 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12943
12944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12947
12948 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12949 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12950 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12951 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12952 IEM_MC_END();
12953}
12954
12955/** Opcode 0xdd !11/0. */
12956FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12957{
12958 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12959
12960 IEM_MC_BEGIN(0, 0);
12961 IEM_MC_LOCAL(uint16_t, u16Tmp);
12962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12963
12964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12967
12968 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12969 IEM_MC_FETCH_FSW(u16Tmp);
12970 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12971 IEM_MC_ADVANCE_RIP_AND_FINISH();
12972
12973/** @todo Debug / drop a hint to the verifier that things may differ
12974 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12975 * NT4SP1. (X86_FSW_PE) */
12976 IEM_MC_END();
12977}
12978
12979
12980/** Opcode 0xdd 11/0. */
12981FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12982{
12983 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12984 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12985 unmodified. */
12986 IEM_MC_BEGIN(0, 0);
12987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12988
12989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12991
12992 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12993 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12994 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12995
12996 IEM_MC_ADVANCE_RIP_AND_FINISH();
12997 IEM_MC_END();
12998}
12999
13000
13001/** Opcode 0xdd 11/1. */
13002FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13003{
13004 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
13005 IEM_MC_BEGIN(0, 0);
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13007 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13008 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13009 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13010 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13011
13012 IEM_MC_PREPARE_FPU_USAGE();
13013 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13014 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13015 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
13016 } IEM_MC_ELSE() {
13017 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
13018 } IEM_MC_ENDIF();
13019
13020 IEM_MC_ADVANCE_RIP_AND_FINISH();
13021 IEM_MC_END();
13022}
13023
13024
13025/** Opcode 0xdd 11/3. */
13026FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13027{
13028 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
13029 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13030}
13031
13032
13033/** Opcode 0xdd 11/4. */
13034FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13035{
13036 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
13037 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13038}
13039
13040
13041/**
13042 * @opcode 0xdd
13043 */
13044FNIEMOP_DEF(iemOp_EscF5)
13045{
13046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13047 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
13048 if (IEM_IS_MODRM_REG_MODE(bRm))
13049 {
13050 switch (IEM_GET_MODRM_REG_8(bRm))
13051 {
13052 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13053 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13054 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13055 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13056 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13057 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13058 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
13059 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13061 }
13062 }
13063 else
13064 {
13065 switch (IEM_GET_MODRM_REG_8(bRm))
13066 {
13067 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13068 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13069 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13070 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13071 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13072 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
13073 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13074 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13076 }
13077 }
13078}
13079
13080
13081/** Opcode 0xde 11/0. */
13082FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13083{
13084 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
13085 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13086}
13087
13088
13089/** Opcode 0xde 11/0. */
13090FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13091{
13092 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
13093 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13094}
13095
13096
13097/** Opcode 0xde 0xd9. */
13098FNIEMOP_DEF(iemOp_fcompp)
13099{
13100 IEMOP_MNEMONIC(fcompp, "fcompp");
13101 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
13102}
13103
13104
13105/** Opcode 0xde 11/4. */
13106FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13107{
13108 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
13109 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13110}
13111
13112
13113/** Opcode 0xde 11/5. */
13114FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13115{
13116 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
13117 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13118}
13119
13120
13121/** Opcode 0xde 11/6. */
13122FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13123{
13124 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
13125 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13126}
13127
13128
13129/** Opcode 0xde 11/7. */
13130FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13131{
13132 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
13133 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13134}
13135
13136
13137/**
13138 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13139 * the result in ST0.
13140 *
13141 * @param bRm Mod R/M byte.
13142 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13143 */
13144FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13145{
13146 IEM_MC_BEGIN(0, 0);
13147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13148 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13149 IEM_MC_LOCAL(int16_t, i16Val2);
13150 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13151 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13152 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13153
13154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13156
13157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13158 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13159 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13160
13161 IEM_MC_PREPARE_FPU_USAGE();
13162 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13163 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13164 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
13165 } IEM_MC_ELSE() {
13166 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
13167 } IEM_MC_ENDIF();
13168 IEM_MC_ADVANCE_RIP_AND_FINISH();
13169
13170 IEM_MC_END();
13171}
13172
13173
13174/** Opcode 0xde !11/0. */
13175FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13176{
13177 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
13178 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13179}
13180
13181
13182/** Opcode 0xde !11/1. */
13183FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13184{
13185 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
13186 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13187}
13188
13189
13190/** Opcode 0xde !11/2. */
13191FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13192{
13193 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
13194
13195 IEM_MC_BEGIN(0, 0);
13196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13197 IEM_MC_LOCAL(uint16_t, u16Fsw);
13198 IEM_MC_LOCAL(int16_t, i16Val2);
13199 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13201 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13202
13203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13205
13206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13208 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13209
13210 IEM_MC_PREPARE_FPU_USAGE();
13211 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13212 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13213 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13214 } IEM_MC_ELSE() {
13215 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13216 } IEM_MC_ENDIF();
13217 IEM_MC_ADVANCE_RIP_AND_FINISH();
13218
13219 IEM_MC_END();
13220}
13221
13222
13223/** Opcode 0xde !11/3. */
13224FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13225{
13226 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
13227
13228 IEM_MC_BEGIN(0, 0);
13229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13230 IEM_MC_LOCAL(uint16_t, u16Fsw);
13231 IEM_MC_LOCAL(int16_t, i16Val2);
13232 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13233 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13234 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13235
13236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13238
13239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13241 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13242
13243 IEM_MC_PREPARE_FPU_USAGE();
13244 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13245 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13246 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13247 } IEM_MC_ELSE() {
13248 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13249 } IEM_MC_ENDIF();
13250 IEM_MC_ADVANCE_RIP_AND_FINISH();
13251
13252 IEM_MC_END();
13253}
13254
13255
13256/** Opcode 0xde !11/4. */
13257FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13258{
13259 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
13260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13261}
13262
13263
13264/** Opcode 0xde !11/5. */
13265FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13266{
13267 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
13268 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13269}
13270
13271
13272/** Opcode 0xde !11/6. */
13273FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13274{
13275 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
13276 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13277}
13278
13279
13280/** Opcode 0xde !11/7. */
13281FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13282{
13283 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
13284 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13285}
13286
13287
13288/**
13289 * @opcode 0xde
13290 */
13291FNIEMOP_DEF(iemOp_EscF6)
13292{
13293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13294 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
13295 if (IEM_IS_MODRM_REG_MODE(bRm))
13296 {
13297 switch (IEM_GET_MODRM_REG_8(bRm))
13298 {
13299 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13300 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13301 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13302 case 3: if (bRm == 0xd9)
13303 return FNIEMOP_CALL(iemOp_fcompp);
13304 IEMOP_RAISE_INVALID_OPCODE_RET();
13305 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13306 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13307 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13308 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13310 }
13311 }
13312 else
13313 {
13314 switch (IEM_GET_MODRM_REG_8(bRm))
13315 {
13316 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13317 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13318 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13319 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13320 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13321 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13322 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13323 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13325 }
13326 }
13327}
13328
13329
13330/** Opcode 0xdf 11/0.
13331 * Undocument instruction, assumed to work like ffree + fincstp. */
13332FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13333{
13334 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
13335 IEM_MC_BEGIN(0, 0);
13336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13337
13338 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13339 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13340
13341 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13342 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13343 IEM_MC_FPU_STACK_INC_TOP();
13344 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13345
13346 IEM_MC_ADVANCE_RIP_AND_FINISH();
13347 IEM_MC_END();
13348}
13349
13350
13351/** Opcode 0xdf 0xe0. */
13352FNIEMOP_DEF(iemOp_fnstsw_ax)
13353{
13354 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
13355 IEM_MC_BEGIN(0, 0);
13356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13357 IEM_MC_LOCAL(uint16_t, u16Tmp);
13358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13359 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13360 IEM_MC_FETCH_FSW(u16Tmp);
13361 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13362 IEM_MC_ADVANCE_RIP_AND_FINISH();
13363 IEM_MC_END();
13364}
13365
13366
13367/** Opcode 0xdf 11/5. */
13368FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13369{
13370 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
13371 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13372 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13373 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13374}
13375
13376
13377/** Opcode 0xdf 11/6. */
13378FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13379{
13380 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
13381 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13382 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13383 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13384}
13385
13386
13387/** Opcode 0xdf !11/0. */
13388FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
13389{
13390 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
13391
13392 IEM_MC_BEGIN(0, 0);
13393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13394 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13395 IEM_MC_LOCAL(int16_t, i16Val);
13396 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13397 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
13398
13399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13401
13402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13404 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13405
13406 IEM_MC_PREPARE_FPU_USAGE();
13407 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13408 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
13409 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13410 } IEM_MC_ELSE() {
13411 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13412 } IEM_MC_ENDIF();
13413 IEM_MC_ADVANCE_RIP_AND_FINISH();
13414
13415 IEM_MC_END();
13416}
13417
13418
13419/** Opcode 0xdf !11/1. */
13420FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13421{
13422 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13423 IEM_MC_BEGIN(0, 0);
13424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13426
13427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13430 IEM_MC_PREPARE_FPU_USAGE();
13431
13432 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13433 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13434 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13435
13436 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13437 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13438 IEM_MC_LOCAL(uint16_t, u16Fsw);
13439 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13440 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13441 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13442 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13443 } IEM_MC_ELSE() {
13444 IEM_MC_IF_FCW_IM() {
13445 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13446 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13447 } IEM_MC_ELSE() {
13448 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13449 } IEM_MC_ENDIF();
13450 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13451 } IEM_MC_ENDIF();
13452 IEM_MC_ADVANCE_RIP_AND_FINISH();
13453
13454 IEM_MC_END();
13455}
13456
13457
13458/** Opcode 0xdf !11/2. */
13459FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13460{
13461 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13462 IEM_MC_BEGIN(0, 0);
13463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13465
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13467 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13468 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13469 IEM_MC_PREPARE_FPU_USAGE();
13470
13471 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13472 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13473 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13474
13475 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13476 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13477 IEM_MC_LOCAL(uint16_t, u16Fsw);
13478 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13479 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13480 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13481 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13482 } IEM_MC_ELSE() {
13483 IEM_MC_IF_FCW_IM() {
13484 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13485 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13486 } IEM_MC_ELSE() {
13487 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13488 } IEM_MC_ENDIF();
13489 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13490 } IEM_MC_ENDIF();
13491 IEM_MC_ADVANCE_RIP_AND_FINISH();
13492
13493 IEM_MC_END();
13494}
13495
13496
13497/** Opcode 0xdf !11/3. */
13498FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13499{
13500 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13501 IEM_MC_BEGIN(0, 0);
13502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13504
13505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13507 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13508 IEM_MC_PREPARE_FPU_USAGE();
13509
13510 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13511 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13512 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13513
13514 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13515 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13516 IEM_MC_LOCAL(uint16_t, u16Fsw);
13517 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13518 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13519 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13520 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13521 } IEM_MC_ELSE() {
13522 IEM_MC_IF_FCW_IM() {
13523 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13524 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13525 } IEM_MC_ELSE() {
13526 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13527 } IEM_MC_ENDIF();
13528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13529 } IEM_MC_ENDIF();
13530 IEM_MC_ADVANCE_RIP_AND_FINISH();
13531
13532 IEM_MC_END();
13533}
13534
13535
13536/** Opcode 0xdf !11/4. */
13537FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13538{
13539 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13540
13541 IEM_MC_BEGIN(0, 0);
13542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13543 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13544 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13545 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13546 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13547
13548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13550
13551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13553 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13554
13555 IEM_MC_PREPARE_FPU_USAGE();
13556 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13557 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13558 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13559 } IEM_MC_ELSE() {
13560 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13561 } IEM_MC_ENDIF();
13562 IEM_MC_ADVANCE_RIP_AND_FINISH();
13563
13564 IEM_MC_END();
13565}
13566
13567
13568/** Opcode 0xdf !11/5. */
13569FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13570{
13571 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13572
13573 IEM_MC_BEGIN(0, 0);
13574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13575 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13576 IEM_MC_LOCAL(int64_t, i64Val);
13577 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13578 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13579
13580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13582
13583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13585 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13586
13587 IEM_MC_PREPARE_FPU_USAGE();
13588 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13589 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13590 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13591 } IEM_MC_ELSE() {
13592 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13593 } IEM_MC_ENDIF();
13594 IEM_MC_ADVANCE_RIP_AND_FINISH();
13595
13596 IEM_MC_END();
13597}
13598
13599
13600/** Opcode 0xdf !11/6. */
13601FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13602{
13603 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13604 IEM_MC_BEGIN(0, 0);
13605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13607
13608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13611 IEM_MC_PREPARE_FPU_USAGE();
13612
13613 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13614 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13615 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13616
13617 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13618 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13619 IEM_MC_LOCAL(uint16_t, u16Fsw);
13620 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13621 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13622 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13623 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13624 } IEM_MC_ELSE() {
13625 IEM_MC_IF_FCW_IM() {
13626 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13627 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13628 } IEM_MC_ELSE() {
13629 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13630 } IEM_MC_ENDIF();
13631 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13632 } IEM_MC_ENDIF();
13633 IEM_MC_ADVANCE_RIP_AND_FINISH();
13634
13635 IEM_MC_END();
13636}
13637
13638
13639/** Opcode 0xdf !11/7. */
13640FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13641{
13642 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13643 IEM_MC_BEGIN(0, 0);
13644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13646
13647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13650 IEM_MC_PREPARE_FPU_USAGE();
13651
13652 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13653 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13654 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13655
13656 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13657 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13658 IEM_MC_LOCAL(uint16_t, u16Fsw);
13659 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13660 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13661 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13662 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13663 } IEM_MC_ELSE() {
13664 IEM_MC_IF_FCW_IM() {
13665 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13666 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13667 } IEM_MC_ELSE() {
13668 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13669 } IEM_MC_ENDIF();
13670 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13671 } IEM_MC_ENDIF();
13672 IEM_MC_ADVANCE_RIP_AND_FINISH();
13673
13674 IEM_MC_END();
13675}
13676
13677
13678/**
13679 * @opcode 0xdf
13680 */
13681FNIEMOP_DEF(iemOp_EscF7)
13682{
13683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13684 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13685 if (IEM_IS_MODRM_REG_MODE(bRm))
13686 {
13687 switch (IEM_GET_MODRM_REG_8(bRm))
13688 {
13689 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13690 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13691 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13692 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13693 case 4: if (bRm == 0xe0)
13694 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13695 IEMOP_RAISE_INVALID_OPCODE_RET();
13696 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13697 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13698 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13700 }
13701 }
13702 else
13703 {
13704 switch (IEM_GET_MODRM_REG_8(bRm))
13705 {
13706 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13707 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13708 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13709 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13710 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13711 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13712 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13713 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13715 }
13716 }
13717}
13718
13719
13720/**
13721 * @opcode 0xe0
13722 * @opfltest zf
13723 */
13724FNIEMOP_DEF(iemOp_loopne_Jb)
13725{
13726 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13727 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13729
13730 switch (pVCpu->iem.s.enmEffAddrMode)
13731 {
13732 case IEMMODE_16BIT:
13733 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13735 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13736 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13737 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13738 } IEM_MC_ELSE() {
13739 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13740 IEM_MC_ADVANCE_RIP_AND_FINISH();
13741 } IEM_MC_ENDIF();
13742 IEM_MC_END();
13743 break;
13744
13745 case IEMMODE_32BIT:
13746 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13748 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13749 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13750 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13751 } IEM_MC_ELSE() {
13752 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13753 IEM_MC_ADVANCE_RIP_AND_FINISH();
13754 } IEM_MC_ENDIF();
13755 IEM_MC_END();
13756 break;
13757
13758 case IEMMODE_64BIT:
13759 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13761 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13762 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13764 } IEM_MC_ELSE() {
13765 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13766 IEM_MC_ADVANCE_RIP_AND_FINISH();
13767 } IEM_MC_ENDIF();
13768 IEM_MC_END();
13769 break;
13770
13771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13772 }
13773}
13774
13775
13776/**
13777 * @opcode 0xe1
13778 * @opfltest zf
13779 */
13780FNIEMOP_DEF(iemOp_loope_Jb)
13781{
13782 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13783 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13785
13786 switch (pVCpu->iem.s.enmEffAddrMode)
13787 {
13788 case IEMMODE_16BIT:
13789 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13791 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13792 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13793 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13794 } IEM_MC_ELSE() {
13795 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13796 IEM_MC_ADVANCE_RIP_AND_FINISH();
13797 } IEM_MC_ENDIF();
13798 IEM_MC_END();
13799 break;
13800
13801 case IEMMODE_32BIT:
13802 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13804 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13805 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13806 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13807 } IEM_MC_ELSE() {
13808 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13809 IEM_MC_ADVANCE_RIP_AND_FINISH();
13810 } IEM_MC_ENDIF();
13811 IEM_MC_END();
13812 break;
13813
13814 case IEMMODE_64BIT:
13815 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13817 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13818 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13819 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13820 } IEM_MC_ELSE() {
13821 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13822 IEM_MC_ADVANCE_RIP_AND_FINISH();
13823 } IEM_MC_ENDIF();
13824 IEM_MC_END();
13825 break;
13826
13827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13828 }
13829}
13830
13831
13832/**
13833 * @opcode 0xe2
13834 */
13835FNIEMOP_DEF(iemOp_loop_Jb)
13836{
13837 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13838 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13840
13841 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13842 * using the 32-bit operand size override. How can that be restarted? See
13843 * weird pseudo code in intel manual. */
13844
13845 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13846 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13847 * the loop causes guest crashes, but when logging it's nice to skip a few million
13848 * lines of useless output. */
13849#if defined(LOG_ENABLED)
13850 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13851 switch (pVCpu->iem.s.enmEffAddrMode)
13852 {
13853 case IEMMODE_16BIT:
13854 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13856 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13857 IEM_MC_ADVANCE_RIP_AND_FINISH();
13858 IEM_MC_END();
13859 break;
13860
13861 case IEMMODE_32BIT:
13862 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13864 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13865 IEM_MC_ADVANCE_RIP_AND_FINISH();
13866 IEM_MC_END();
13867 break;
13868
13869 case IEMMODE_64BIT:
13870 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13872 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13873 IEM_MC_ADVANCE_RIP_AND_FINISH();
13874 IEM_MC_END();
13875 break;
13876
13877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13878 }
13879#endif
13880
13881 switch (pVCpu->iem.s.enmEffAddrMode)
13882 {
13883 case IEMMODE_16BIT:
13884 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13886 IEM_MC_IF_CX_IS_NOT_ONE() {
13887 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13888 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13889 } IEM_MC_ELSE() {
13890 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13891 IEM_MC_ADVANCE_RIP_AND_FINISH();
13892 } IEM_MC_ENDIF();
13893 IEM_MC_END();
13894 break;
13895
13896 case IEMMODE_32BIT:
13897 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13899 IEM_MC_IF_ECX_IS_NOT_ONE() {
13900 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13901 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13902 } IEM_MC_ELSE() {
13903 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13904 IEM_MC_ADVANCE_RIP_AND_FINISH();
13905 } IEM_MC_ENDIF();
13906 IEM_MC_END();
13907 break;
13908
13909 case IEMMODE_64BIT:
13910 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13912 IEM_MC_IF_RCX_IS_NOT_ONE() {
13913 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13914 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13915 } IEM_MC_ELSE() {
13916 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13917 IEM_MC_ADVANCE_RIP_AND_FINISH();
13918 } IEM_MC_ENDIF();
13919 IEM_MC_END();
13920 break;
13921
13922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13923 }
13924}
13925
13926
13927/**
13928 * @opcode 0xe3
13929 */
13930FNIEMOP_DEF(iemOp_jecxz_Jb)
13931{
13932 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13933 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13935
13936 switch (pVCpu->iem.s.enmEffAddrMode)
13937 {
13938 case IEMMODE_16BIT:
13939 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13941 IEM_MC_IF_CX_IS_NZ() {
13942 IEM_MC_ADVANCE_RIP_AND_FINISH();
13943 } IEM_MC_ELSE() {
13944 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13945 } IEM_MC_ENDIF();
13946 IEM_MC_END();
13947 break;
13948
13949 case IEMMODE_32BIT:
13950 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13952 IEM_MC_IF_ECX_IS_NZ() {
13953 IEM_MC_ADVANCE_RIP_AND_FINISH();
13954 } IEM_MC_ELSE() {
13955 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13956 } IEM_MC_ENDIF();
13957 IEM_MC_END();
13958 break;
13959
13960 case IEMMODE_64BIT:
13961 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13963 IEM_MC_IF_RCX_IS_NZ() {
13964 IEM_MC_ADVANCE_RIP_AND_FINISH();
13965 } IEM_MC_ELSE() {
13966 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13967 } IEM_MC_ENDIF();
13968 IEM_MC_END();
13969 break;
13970
13971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13972 }
13973}
13974
13975
13976/**
13977 * @opcode 0xe4
13978 * @opfltest iopl
13979 */
13980FNIEMOP_DEF(iemOp_in_AL_Ib)
13981{
13982 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13983 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13985 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13986 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13987}
13988
13989
13990/**
13991 * @opcode 0xe5
13992 * @opfltest iopl
13993 */
13994FNIEMOP_DEF(iemOp_in_eAX_Ib)
13995{
13996 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13997 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13999 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14000 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14001 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14002}
14003
14004
14005/**
14006 * @opcode 0xe6
14007 * @opfltest iopl
14008 */
14009FNIEMOP_DEF(iemOp_out_Ib_AL)
14010{
14011 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
14012 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14014 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14015 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14016}
14017
14018
14019/**
14020 * @opcode 0xe7
14021 * @opfltest iopl
14022 */
14023FNIEMOP_DEF(iemOp_out_Ib_eAX)
14024{
14025 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
14026 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14028 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14029 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14030 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
14031}
14032
14033
14034/**
14035 * @opcode 0xe8
14036 */
14037FNIEMOP_DEF(iemOp_call_Jv)
14038{
14039 IEMOP_MNEMONIC(call_Jv, "call Jv");
14040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14041 switch (pVCpu->iem.s.enmEffOpSize)
14042 {
14043 case IEMMODE_16BIT:
14044 {
14045 IEM_MC_BEGIN(0, 0);
14046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14048 IEM_MC_REL_CALL_S16_AND_FINISH(i16Imm);
14049 IEM_MC_END();
14050 break;
14051 }
14052
14053 case IEMMODE_32BIT:
14054 {
14055 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14056 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14058 IEM_MC_REL_CALL_S32_AND_FINISH(i32Imm);
14059 IEM_MC_END();
14060 break;
14061 }
14062
14063 case IEMMODE_64BIT:
14064 {
14065 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14066 int64_t i64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64((uint64_t *)&i64Imm);
14067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14068 IEM_MC_REL_CALL_S64_AND_FINISH(i64Imm);
14069 IEM_MC_END();
14070 break;
14071 }
14072
14073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14074 }
14075}
14076
14077
14078/**
14079 * @opcode 0xe9
14080 */
14081FNIEMOP_DEF(iemOp_jmp_Jv)
14082{
14083 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
14084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14085 switch (pVCpu->iem.s.enmEffOpSize)
14086 {
14087 case IEMMODE_16BIT:
14088 IEM_MC_BEGIN(0, 0);
14089 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14091 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
14092 IEM_MC_END();
14093 break;
14094
14095 case IEMMODE_64BIT:
14096 case IEMMODE_32BIT:
14097 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14098 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14100 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
14101 IEM_MC_END();
14102 break;
14103
14104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14105 }
14106}
14107
14108
14109/**
14110 * @opcode 0xea
14111 */
14112FNIEMOP_DEF(iemOp_jmp_Ap)
14113{
14114 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
14115 IEMOP_HLP_NO_64BIT();
14116
14117 /* Decode the far pointer address and pass it on to the far call C implementation. */
14118 uint32_t off32Seg;
14119 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
14120 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
14121 else
14122 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
14123 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
14124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14125 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
14126 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
14127 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
14128 /** @todo make task-switches, ring-switches, ++ return non-zero status */
14129}
14130
14131
14132/**
14133 * @opcode 0xeb
14134 */
14135FNIEMOP_DEF(iemOp_jmp_Jb)
14136{
14137 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
14138 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14140
14141 IEM_MC_BEGIN(0, 0);
14142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14143 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
14144 IEM_MC_END();
14145}
14146
14147
14148/**
14149 * @opcode 0xec
14150 * @opfltest iopl
14151 */
14152FNIEMOP_DEF(iemOp_in_AL_DX)
14153{
14154 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
14155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14156 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
14157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14158 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
14159}
14160
14161
14162/**
14163 * @opcode 0xed
14164 * @opfltest iopl
14165 */
14166FNIEMOP_DEF(iemOp_in_eAX_DX)
14167{
14168 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
14169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14170 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
14171 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
14172 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14173 pVCpu->iem.s.enmEffAddrMode);
14174}
14175
14176
14177/**
14178 * @opcode 0xee
14179 * @opfltest iopl
14180 */
14181FNIEMOP_DEF(iemOp_out_DX_AL)
14182{
14183 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
14184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14185 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14186 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
14187}
14188
14189
14190/**
14191 * @opcode 0xef
14192 * @opfltest iopl
14193 */
14194FNIEMOP_DEF(iemOp_out_DX_eAX)
14195{
14196 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
14197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14198 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14199 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14200 pVCpu->iem.s.enmEffAddrMode);
14201}
14202
14203
14204/**
14205 * @opcode 0xf0
14206 */
14207FNIEMOP_DEF(iemOp_lock)
14208{
14209 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
14210 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
14211
14212 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14213 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14214}
14215
14216
14217/**
14218 * @opcode 0xf1
14219 */
14220FNIEMOP_DEF(iemOp_int1)
14221{
14222 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
14223 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
14224 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
14225 * LOADALL memo. Needs some testing. */
14226 IEMOP_HLP_MIN_386();
14227 /** @todo testcase! */
14228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
14229 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
14230 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
14231}
14232
14233
14234/**
14235 * @opcode 0xf2
14236 */
14237FNIEMOP_DEF(iemOp_repne)
14238{
14239 /* This overrides any previous REPE prefix. */
14240 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
14241 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
14242 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
14243
14244 /* For the 4 entry opcode tables, REPNZ overrides any previous
14245 REPZ and operand size prefixes. */
14246 pVCpu->iem.s.idxPrefix = 3;
14247
14248 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14249 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14250}
14251
14252
14253/**
14254 * @opcode 0xf3
14255 */
14256FNIEMOP_DEF(iemOp_repe)
14257{
14258 /* This overrides any previous REPNE prefix. */
14259 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
14260 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
14261 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
14262
14263 /* For the 4 entry opcode tables, REPNZ overrides any previous
14264 REPNZ and operand size prefixes. */
14265 pVCpu->iem.s.idxPrefix = 2;
14266
14267 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14268 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14269}
14270
14271
14272/**
14273 * @opcode 0xf4
14274 */
14275FNIEMOP_DEF(iemOp_hlt)
14276{
14277 IEMOP_MNEMONIC(hlt, "hlt");
14278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14279 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
14280}
14281
14282
14283/**
14284 * @opcode 0xf5
14285 * @opflmodify cf
14286 */
14287FNIEMOP_DEF(iemOp_cmc)
14288{
14289 IEMOP_MNEMONIC(cmc, "cmc");
14290 IEM_MC_BEGIN(0, 0);
14291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14292 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14293 IEM_MC_ADVANCE_RIP_AND_FINISH();
14294 IEM_MC_END();
14295}
14296
14297
14298/**
14299 * Body for of 'inc/dec/not/neg Eb'.
14300 */
14301#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
14302 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
14303 { \
14304 /* register access */ \
14305 IEM_MC_BEGIN(0, 0); \
14306 IEMOP_HLP_DONE_DECODING(); \
14307 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14308 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14309 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
14310 IEM_MC_REF_EFLAGS(pEFlags); \
14311 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14313 IEM_MC_END(); \
14314 } \
14315 else \
14316 { \
14317 /* memory access. */ \
14318 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14319 { \
14320 IEM_MC_BEGIN(0, 0); \
14321 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14323 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14324 \
14325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14326 IEMOP_HLP_DONE_DECODING(); \
14327 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14329 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14330 \
14331 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14332 IEM_MC_COMMIT_EFLAGS(EFlags); \
14333 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14334 IEM_MC_END(); \
14335 } \
14336 else \
14337 { \
14338 IEM_MC_BEGIN(0, 0); \
14339 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14341 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14342 \
14343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14344 IEMOP_HLP_DONE_DECODING(); \
14345 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14346 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14347 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
14348 \
14349 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14350 IEM_MC_COMMIT_EFLAGS(EFlags); \
14351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14352 IEM_MC_END(); \
14353 } \
14354 } \
14355 (void)0
14356
14357
14358/**
14359 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
14360 */
14361#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
14362 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14363 { \
14364 /* \
14365 * Register target \
14366 */ \
14367 switch (pVCpu->iem.s.enmEffOpSize) \
14368 { \
14369 case IEMMODE_16BIT: \
14370 IEM_MC_BEGIN(0, 0); \
14371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14372 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14373 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14374 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14375 IEM_MC_REF_EFLAGS(pEFlags); \
14376 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14377 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14378 IEM_MC_END(); \
14379 break; \
14380 \
14381 case IEMMODE_32BIT: \
14382 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14384 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14385 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14386 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14387 IEM_MC_REF_EFLAGS(pEFlags); \
14388 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14389 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
14390 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14391 IEM_MC_END(); \
14392 break; \
14393 \
14394 case IEMMODE_64BIT: \
14395 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14397 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14398 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14399 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14400 IEM_MC_REF_EFLAGS(pEFlags); \
14401 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14402 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14403 IEM_MC_END(); \
14404 break; \
14405 \
14406 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14407 } \
14408 } \
14409 else \
14410 { \
14411 /* \
14412 * Memory target. \
14413 */ \
14414 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14415 { \
14416 switch (pVCpu->iem.s.enmEffOpSize) \
14417 { \
14418 case IEMMODE_16BIT: \
14419 IEM_MC_BEGIN(0, 0); \
14420 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14422 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14423 \
14424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14425 IEMOP_HLP_DONE_DECODING(); \
14426 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14428 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14429 \
14430 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14431 IEM_MC_COMMIT_EFLAGS(EFlags); \
14432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14433 IEM_MC_END(); \
14434 break; \
14435 \
14436 case IEMMODE_32BIT: \
14437 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14438 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14440 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14441 \
14442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14443 IEMOP_HLP_DONE_DECODING(); \
14444 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14446 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14447 \
14448 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14449 IEM_MC_COMMIT_EFLAGS(EFlags); \
14450 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14451 IEM_MC_END(); \
14452 break; \
14453 \
14454 case IEMMODE_64BIT: \
14455 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14456 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14458 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14459 \
14460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14461 IEMOP_HLP_DONE_DECODING(); \
14462 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14463 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14464 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14465 \
14466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14467 IEM_MC_COMMIT_EFLAGS(EFlags); \
14468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14469 IEM_MC_END(); \
14470 break; \
14471 \
14472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14473 } \
14474 } \
14475 else \
14476 { \
14477 (void)0
14478
14479#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14480 switch (pVCpu->iem.s.enmEffOpSize) \
14481 { \
14482 case IEMMODE_16BIT: \
14483 IEM_MC_BEGIN(0, 0); \
14484 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14486 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14487 \
14488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14489 IEMOP_HLP_DONE_DECODING(); \
14490 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14492 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14493 \
14494 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14495 IEM_MC_COMMIT_EFLAGS(EFlags); \
14496 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14497 IEM_MC_END(); \
14498 break; \
14499 \
14500 case IEMMODE_32BIT: \
14501 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14502 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14504 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14505 \
14506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14507 IEMOP_HLP_DONE_DECODING(); \
14508 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14510 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14511 \
14512 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14513 IEM_MC_COMMIT_EFLAGS(EFlags); \
14514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14515 IEM_MC_END(); \
14516 break; \
14517 \
14518 case IEMMODE_64BIT: \
14519 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14520 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14522 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14523 \
14524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14525 IEMOP_HLP_DONE_DECODING(); \
14526 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14527 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14528 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14529 \
14530 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14531 IEM_MC_COMMIT_EFLAGS(EFlags); \
14532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14533 IEM_MC_END(); \
14534 break; \
14535 \
14536 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14537 } \
14538 } \
14539 } \
14540 (void)0
14541
14542
14543/**
14544 * @opmaps grp3_f6
14545 * @opcode /0
14546 * @opflclass logical
14547 * @todo also /1
14548 */
14549FNIEMOP_DEF_1(iemOp_grp3_test_Eb_Ib, uint8_t, bRm)
14550{
14551 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14552 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14553 IEMOP_BODY_BINARY_Eb_Ib_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14554}
14555
14556
14557/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14558#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14559 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14560 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14561 { \
14562 /* register access */ \
14563 IEM_MC_BEGIN(0, 0); \
14564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14565 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14566 IEM_MC_ARG(uint8_t, u8Value, 1); \
14567 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14568 \
14569 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14570 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14571 IEM_MC_REF_EFLAGS(pEFlags); \
14572 IEM_MC_CALL_AIMPL_3(int32_t, rc, pfnU8, pu16AX, u8Value, pEFlags); \
14573 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14574 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14575 } IEM_MC_ELSE() { \
14576 IEM_MC_RAISE_DIVIDE_ERROR(); \
14577 } IEM_MC_ENDIF(); \
14578 \
14579 IEM_MC_END(); \
14580 } \
14581 else \
14582 { \
14583 /* memory access. */ \
14584 IEM_MC_BEGIN(0, 0); \
14585 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14586 IEM_MC_ARG(uint8_t, u8Value, 1); \
14587 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14589 \
14590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14592 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14593 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14594 IEM_MC_REF_EFLAGS(pEFlags); \
14595 IEM_MC_CALL_AIMPL_3(int32_t, rc, pfnU8, pu16AX, u8Value, pEFlags); \
14596 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14597 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14598 } IEM_MC_ELSE() { \
14599 IEM_MC_RAISE_DIVIDE_ERROR(); \
14600 } IEM_MC_ENDIF(); \
14601 \
14602 IEM_MC_END(); \
14603 } (void)0
14604
14605
14606/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14607#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14608 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14609 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14610 { \
14611 /* register access */ \
14612 switch (pVCpu->iem.s.enmEffOpSize) \
14613 { \
14614 case IEMMODE_16BIT: \
14615 IEM_MC_BEGIN(0, 0); \
14616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14617 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14618 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14619 IEM_MC_ARG(uint16_t, u16Value, 2); \
14620 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14621 \
14622 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14623 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14624 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14625 IEM_MC_REF_EFLAGS(pEFlags); \
14626 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14627 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14628 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14629 } IEM_MC_ELSE() { \
14630 IEM_MC_RAISE_DIVIDE_ERROR(); \
14631 } IEM_MC_ENDIF(); \
14632 \
14633 IEM_MC_END(); \
14634 break; \
14635 \
14636 case IEMMODE_32BIT: \
14637 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14639 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14640 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14641 IEM_MC_ARG(uint32_t, u32Value, 2); \
14642 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14643 \
14644 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14645 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14646 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14647 IEM_MC_REF_EFLAGS(pEFlags); \
14648 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14649 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14650 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14651 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14652 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14653 } IEM_MC_ELSE() { \
14654 IEM_MC_RAISE_DIVIDE_ERROR(); \
14655 } IEM_MC_ENDIF(); \
14656 \
14657 IEM_MC_END(); \
14658 break; \
14659 \
14660 case IEMMODE_64BIT: \
14661 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14663 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14664 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14665 IEM_MC_ARG(uint64_t, u64Value, 2); \
14666 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14667 \
14668 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14669 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14670 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14671 IEM_MC_REF_EFLAGS(pEFlags); \
14672 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14673 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14675 } IEM_MC_ELSE() { \
14676 IEM_MC_RAISE_DIVIDE_ERROR(); \
14677 } IEM_MC_ENDIF(); \
14678 \
14679 IEM_MC_END(); \
14680 break; \
14681 \
14682 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14683 } \
14684 } \
14685 else \
14686 { \
14687 /* memory access. */ \
14688 switch (pVCpu->iem.s.enmEffOpSize) \
14689 { \
14690 case IEMMODE_16BIT: \
14691 IEM_MC_BEGIN(0, 0); \
14692 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14693 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14694 IEM_MC_ARG(uint16_t, u16Value, 2); \
14695 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14697 \
14698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14700 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14701 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14702 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14703 IEM_MC_REF_EFLAGS(pEFlags); \
14704 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14705 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14706 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14707 } IEM_MC_ELSE() { \
14708 IEM_MC_RAISE_DIVIDE_ERROR(); \
14709 } IEM_MC_ENDIF(); \
14710 \
14711 IEM_MC_END(); \
14712 break; \
14713 \
14714 case IEMMODE_32BIT: \
14715 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14716 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14717 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14718 IEM_MC_ARG(uint32_t, u32Value, 2); \
14719 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14721 \
14722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14724 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14725 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14726 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14727 IEM_MC_REF_EFLAGS(pEFlags); \
14728 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14729 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14730 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14731 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14732 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14733 } IEM_MC_ELSE() { \
14734 IEM_MC_RAISE_DIVIDE_ERROR(); \
14735 } IEM_MC_ENDIF(); \
14736 \
14737 IEM_MC_END(); \
14738 break; \
14739 \
14740 case IEMMODE_64BIT: \
14741 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14742 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14743 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14744 IEM_MC_ARG(uint64_t, u64Value, 2); \
14745 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14747 \
14748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14750 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14751 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14752 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14753 IEM_MC_REF_EFLAGS(pEFlags); \
14754 IEM_MC_CALL_AIMPL_4(int32_t, rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14755 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14756 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14757 } IEM_MC_ELSE() { \
14758 IEM_MC_RAISE_DIVIDE_ERROR(); \
14759 } IEM_MC_ENDIF(); \
14760 \
14761 IEM_MC_END(); \
14762 break; \
14763 \
14764 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14765 } \
14766 } (void)0
14767
14768
14769/**
14770 * @opmaps grp3_f6
14771 * @opcode /2
14772 * @opflclass unchanged
14773 */
14774FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14775{
14776/** @todo does not modify EFLAGS. */
14777 IEMOP_MNEMONIC(not_Eb, "not Eb");
14778 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14779}
14780
14781
14782/**
14783 * @opmaps grp3_f6
14784 * @opcode /3
14785 * @opflclass arithmetic
14786 */
14787FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14788{
14789 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14790 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14791}
14792
14793
14794/**
14795 * @opcode 0xf6
14796 */
14797FNIEMOP_DEF(iemOp_Grp3_Eb)
14798{
14799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14800 switch (IEM_GET_MODRM_REG_8(bRm))
14801 {
14802 case 0:
14803 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14804 case 1:
14805 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14806 case 2:
14807 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14808 case 3:
14809 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14810 case 4:
14811 {
14812 /**
14813 * @opdone
14814 * @opmaps grp3_f6
14815 * @opcode /4
14816 * @opflclass multiply
14817 */
14818 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14819 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14820 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14821 break;
14822 }
14823 case 5:
14824 {
14825 /**
14826 * @opdone
14827 * @opmaps grp3_f6
14828 * @opcode /5
14829 * @opflclass multiply
14830 */
14831 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14832 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14833 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14834 break;
14835 }
14836 case 6:
14837 {
14838 /**
14839 * @opdone
14840 * @opmaps grp3_f6
14841 * @opcode /6
14842 * @opflclass division
14843 */
14844 IEMOP_MNEMONIC(div_Eb, "div Eb");
14845 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14846 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14847 break;
14848 }
14849 case 7:
14850 {
14851 /**
14852 * @opdone
14853 * @opmaps grp3_f6
14854 * @opcode /7
14855 * @opflclass division
14856 */
14857 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14858 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14859 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14860 break;
14861 }
14862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14863 }
14864}
14865
14866
14867/**
14868 * @opmaps grp3_f7
14869 * @opcode /0
14870 * @opflclass logical
14871 */
14872FNIEMOP_DEF_1(iemOp_grp3_test_Ev_Iz, uint8_t, bRm)
14873{
14874 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14876 IEMOP_BODY_BINARY_Ev_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14877}
14878
14879
14880/**
14881 * @opmaps grp3_f7
14882 * @opcode /2
14883 * @opflclass unchanged
14884 */
14885FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14886{
14887/** @todo does not modify EFLAGS */
14888 IEMOP_MNEMONIC(not_Ev, "not Ev");
14889 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14890 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14891}
14892
14893
14894/**
14895 * @opmaps grp3_f7
14896 * @opcode /3
14897 * @opflclass arithmetic
14898 */
14899FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14900{
14901 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14902 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14903 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14904}
14905
14906
14907/**
14908 * @opmaps grp3_f7
14909 * @opcode /4
14910 * @opflclass multiply
14911 */
14912FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14913{
14914 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14916 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14917}
14918
14919
14920/**
14921 * @opmaps grp3_f7
14922 * @opcode /5
14923 * @opflclass multiply
14924 */
14925FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14926{
14927 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14929 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14930}
14931
14932
14933/**
14934 * @opmaps grp3_f7
14935 * @opcode /6
14936 * @opflclass division
14937 */
14938FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14939{
14940 IEMOP_MNEMONIC(div_Ev, "div Ev");
14941 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14942 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14943}
14944
14945
14946/**
14947 * @opmaps grp3_f7
14948 * @opcode /7
14949 * @opflclass division
14950 */
14951FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14952{
14953 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14954 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14955 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14956}
14957
14958
14959/**
14960 * @opcode 0xf7
14961 */
14962FNIEMOP_DEF(iemOp_Grp3_Ev)
14963{
14964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14965 switch (IEM_GET_MODRM_REG_8(bRm))
14966 {
14967 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14968 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14969 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14970 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14971 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14972 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14973 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14974 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14976 }
14977}
14978
14979
14980/**
14981 * @opcode 0xf8
14982 * @opflmodify cf
14983 * @opflclear cf
14984 */
14985FNIEMOP_DEF(iemOp_clc)
14986{
14987 IEMOP_MNEMONIC(clc, "clc");
14988 IEM_MC_BEGIN(0, 0);
14989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14990 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14991 IEM_MC_ADVANCE_RIP_AND_FINISH();
14992 IEM_MC_END();
14993}
14994
14995
14996/**
14997 * @opcode 0xf9
14998 * @opflmodify cf
14999 * @opflset cf
15000 */
15001FNIEMOP_DEF(iemOp_stc)
15002{
15003 IEMOP_MNEMONIC(stc, "stc");
15004 IEM_MC_BEGIN(0, 0);
15005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15006 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
15007 IEM_MC_ADVANCE_RIP_AND_FINISH();
15008 IEM_MC_END();
15009}
15010
15011
15012/**
15013 * @opcode 0xfa
15014 * @opfltest iopl,vm
15015 * @opflmodify if,vif
15016 */
15017FNIEMOP_DEF(iemOp_cli)
15018{
15019 IEMOP_MNEMONIC(cli, "cli");
15020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15021 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
15022}
15023
15024
15025/**
15026 * @opcode 0xfb
15027 * @opfltest iopl,vm
15028 * @opflmodify if,vif
15029 */
15030FNIEMOP_DEF(iemOp_sti)
15031{
15032 IEMOP_MNEMONIC(sti, "sti");
15033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15034 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
15035 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
15036}
15037
15038
15039/**
15040 * @opcode 0xfc
15041 * @opflmodify df
15042 * @opflclear df
15043 */
15044FNIEMOP_DEF(iemOp_cld)
15045{
15046 IEMOP_MNEMONIC(cld, "cld");
15047 IEM_MC_BEGIN(0, 0);
15048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15049 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15050 IEM_MC_ADVANCE_RIP_AND_FINISH();
15051 IEM_MC_END();
15052}
15053
15054
15055/**
15056 * @opcode 0xfd
15057 * @opflmodify df
15058 * @opflset df
15059 */
15060FNIEMOP_DEF(iemOp_std)
15061{
15062 IEMOP_MNEMONIC(std, "std");
15063 IEM_MC_BEGIN(0, 0);
15064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15065 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15066 IEM_MC_ADVANCE_RIP_AND_FINISH();
15067 IEM_MC_END();
15068}
15069
15070
15071/**
15072 * @opmaps grp4
15073 * @opcode /0
15074 * @opflclass incdec
15075 */
15076FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
15077{
15078 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
15079 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
15080}
15081
15082
15083/**
15084 * @opmaps grp4
15085 * @opcode /1
15086 * @opflclass incdec
15087 */
15088FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
15089{
15090 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
15091 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
15092}
15093
15094
15095/**
15096 * @opcode 0xfe
15097 */
15098FNIEMOP_DEF(iemOp_Grp4)
15099{
15100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15101 switch (IEM_GET_MODRM_REG_8(bRm))
15102 {
15103 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
15104 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
15105 default:
15106 /** @todo is the eff-addr decoded? */
15107 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
15108 IEMOP_RAISE_INVALID_OPCODE_RET();
15109 }
15110}
15111
15112/**
15113 * @opmaps grp5
15114 * @opcode /0
15115 * @opflclass incdec
15116 */
15117FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
15118{
15119 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
15120 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
15121 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
15122}
15123
15124
15125/**
15126 * @opmaps grp5
15127 * @opcode /1
15128 * @opflclass incdec
15129 */
15130FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
15131{
15132 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
15133 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
15134 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
15135}
15136
15137
15138/**
15139 * Opcode 0xff /2.
15140 * @param bRm The RM byte.
15141 */
15142FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15143{
15144 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
15145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15146
15147 if (IEM_IS_MODRM_REG_MODE(bRm))
15148 {
15149 /* The new RIP is taken from a register. */
15150 switch (pVCpu->iem.s.enmEffOpSize)
15151 {
15152 case IEMMODE_16BIT:
15153 IEM_MC_BEGIN(0, 0);
15154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15155 IEM_MC_LOCAL(uint16_t, u16Target);
15156 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15157 IEM_MC_IND_CALL_U16_AND_FINISH(u16Target);
15158 IEM_MC_END();
15159 break;
15160
15161 case IEMMODE_32BIT:
15162 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15164 IEM_MC_LOCAL(uint32_t, u32Target);
15165 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15166 IEM_MC_IND_CALL_U32_AND_FINISH(u32Target);
15167 IEM_MC_END();
15168 break;
15169
15170 case IEMMODE_64BIT:
15171 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15173 IEM_MC_LOCAL(uint64_t, u64Target);
15174 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15175 IEM_MC_IND_CALL_U64_AND_FINISH(u64Target);
15176 IEM_MC_END();
15177 break;
15178
15179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15180 }
15181 }
15182 else
15183 {
15184 /* The new RIP is taken from memory. */
15185 switch (pVCpu->iem.s.enmEffOpSize)
15186 {
15187 case IEMMODE_16BIT:
15188 IEM_MC_BEGIN(0, 0);
15189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15192 IEM_MC_LOCAL(uint16_t, u16Target);
15193 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15194 IEM_MC_IND_CALL_U16_AND_FINISH(u16Target);
15195 IEM_MC_END();
15196 break;
15197
15198 case IEMMODE_32BIT:
15199 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15203 IEM_MC_LOCAL(uint32_t, u32Target);
15204 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15205 IEM_MC_IND_CALL_U32_AND_FINISH(u32Target);
15206 IEM_MC_END();
15207 break;
15208
15209 case IEMMODE_64BIT:
15210 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15214 IEM_MC_LOCAL(uint64_t, u64Target);
15215 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15216 IEM_MC_IND_CALL_U64_AND_FINISH(u64Target);
15217 IEM_MC_END();
15218 break;
15219
15220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15221 }
15222 }
15223}
15224
15225#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
15226 /* Registers? How?? */ \
15227 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
15228 { /* likely */ } \
15229 else \
15230 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
15231 \
15232 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
15233 /** @todo what does VIA do? */ \
15234 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
15235 { /* likely */ } \
15236 else \
15237 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
15238 \
15239 /* Far pointer loaded from memory. */ \
15240 switch (pVCpu->iem.s.enmEffOpSize) \
15241 { \
15242 case IEMMODE_16BIT: \
15243 IEM_MC_BEGIN(0, 0); \
15244 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15245 IEM_MC_ARG(uint16_t, offSeg, 1); \
15246 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15250 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15251 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15252 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15253 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15254 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15255 IEM_MC_END(); \
15256 break; \
15257 \
15258 case IEMMODE_32BIT: \
15259 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
15260 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15261 IEM_MC_ARG(uint32_t, offSeg, 1); \
15262 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15266 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15267 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15268 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15269 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15270 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15271 IEM_MC_END(); \
15272 break; \
15273 \
15274 case IEMMODE_64BIT: \
15275 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15276 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
15277 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15278 IEM_MC_ARG(uint64_t, offSeg, 1); \
15279 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15283 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15284 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15285 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15286 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15287 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15288 IEM_MC_END(); \
15289 break; \
15290 \
15291 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15292 } do {} while (0)
15293
15294
15295/**
15296 * Opcode 0xff /3.
15297 * @param bRm The RM byte.
15298 */
15299FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15300{
15301 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15302 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15303}
15304
15305
15306/**
15307 * Opcode 0xff /4.
15308 * @param bRm The RM byte.
15309 */
15310FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15311{
15312 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15313 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15314
15315 if (IEM_IS_MODRM_REG_MODE(bRm))
15316 {
15317 /* The new RIP is taken from a register. */
15318 switch (pVCpu->iem.s.enmEffOpSize)
15319 {
15320 case IEMMODE_16BIT:
15321 IEM_MC_BEGIN(0, 0);
15322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15323 IEM_MC_LOCAL(uint16_t, u16Target);
15324 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15325 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15326 IEM_MC_END();
15327 break;
15328
15329 case IEMMODE_32BIT:
15330 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15332 IEM_MC_LOCAL(uint32_t, u32Target);
15333 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15334 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15335 IEM_MC_END();
15336 break;
15337
15338 case IEMMODE_64BIT:
15339 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15341 IEM_MC_LOCAL(uint64_t, u64Target);
15342 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15343 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15344 IEM_MC_END();
15345 break;
15346
15347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15348 }
15349 }
15350 else
15351 {
15352 /* The new RIP is taken from a memory location. */
15353 switch (pVCpu->iem.s.enmEffOpSize)
15354 {
15355 case IEMMODE_16BIT:
15356 IEM_MC_BEGIN(0, 0);
15357 IEM_MC_LOCAL(uint16_t, u16Target);
15358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15361 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15362 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15363 IEM_MC_END();
15364 break;
15365
15366 case IEMMODE_32BIT:
15367 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15368 IEM_MC_LOCAL(uint32_t, u32Target);
15369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15372 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15373 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15374 IEM_MC_END();
15375 break;
15376
15377 case IEMMODE_64BIT:
15378 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15379 IEM_MC_LOCAL(uint64_t, u64Target);
15380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15383 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15384 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15385 IEM_MC_END();
15386 break;
15387
15388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15389 }
15390 }
15391}
15392
15393
15394/**
15395 * Opcode 0xff /5.
15396 * @param bRm The RM byte.
15397 */
15398FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15399{
15400 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15401 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15402}
15403
15404
15405/**
15406 * Opcode 0xff /6.
15407 * @param bRm The RM byte.
15408 */
15409FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15410{
15411 IEMOP_MNEMONIC(push_Ev, "push Ev");
15412
15413 /* Registers are handled by a common worker. */
15414 if (IEM_IS_MODRM_REG_MODE(bRm))
15415 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15416
15417 /* Memory we do here. */
15418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15419 switch (pVCpu->iem.s.enmEffOpSize)
15420 {
15421 case IEMMODE_16BIT:
15422 IEM_MC_BEGIN(0, 0);
15423 IEM_MC_LOCAL(uint16_t, u16Src);
15424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15427 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15428 IEM_MC_PUSH_U16(u16Src);
15429 IEM_MC_ADVANCE_RIP_AND_FINISH();
15430 IEM_MC_END();
15431 break;
15432
15433 case IEMMODE_32BIT:
15434 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15435 IEM_MC_LOCAL(uint32_t, u32Src);
15436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15439 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15440 IEM_MC_PUSH_U32(u32Src);
15441 IEM_MC_ADVANCE_RIP_AND_FINISH();
15442 IEM_MC_END();
15443 break;
15444
15445 case IEMMODE_64BIT:
15446 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15447 IEM_MC_LOCAL(uint64_t, u64Src);
15448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15451 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15452 IEM_MC_PUSH_U64(u64Src);
15453 IEM_MC_ADVANCE_RIP_AND_FINISH();
15454 IEM_MC_END();
15455 break;
15456
15457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15458 }
15459}
15460
15461
15462/**
15463 * @opcode 0xff
15464 */
15465FNIEMOP_DEF(iemOp_Grp5)
15466{
15467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15468 switch (IEM_GET_MODRM_REG_8(bRm))
15469 {
15470 case 0:
15471 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15472 case 1:
15473 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15474 case 2:
15475 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15476 case 3:
15477 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15478 case 4:
15479 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15480 case 5:
15481 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15482 case 6:
15483 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15484 case 7:
15485 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15486 IEMOP_RAISE_INVALID_OPCODE_RET();
15487 }
15488 AssertFailedReturn(VERR_IEM_IPE_3);
15489}
15490
15491
15492
15493const PFNIEMOP g_apfnOneByteMap[256] =
15494{
15495 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15496 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15497 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15498 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15499 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15500 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15501 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15502 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15503 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15504 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15505 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15506 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15507 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15508 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15509 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15510 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15511 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15512 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15513 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15514 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15515 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15516 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15517 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15518 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15519 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15520 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15521 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15522 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15523 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15524 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15525 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15526 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15527 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15528 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15529 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15530 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15531 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15532 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15533 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15534 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15535 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15536 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15537 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15538 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15539 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15540 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15541 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15542 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15543 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15544 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15545 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15546 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15547 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15548 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15549 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15550 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15551 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15552 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15553 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15554 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15555 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15556 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15557 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15558 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15559};
15560
15561
15562/** @} */
15563
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use