VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103622

Last change on this file since 103622 was 103613, checked in by vboxsync, 15 months ago

VMM/IEM: Experimental code for emitting native code instead of calling AImpl helper, experimenting on: xor reg32,reg32. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 563.1 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103613 2024-02-29 13:01:56Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
774}
775
776
777/**
778 * @opcode 0x04
779 * @opgroup og_gen_arith_bin
780 * @opflclass arithmetic
781 * @opcopytests iemOp_add_Eb_Gb
782 */
783FNIEMOP_DEF(iemOp_add_Al_Ib)
784{
785 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
786 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
787}
788
789
790/**
791 * @opcode 0x05
792 * @opgroup og_gen_arith_bin
793 * @opflclass arithmetic
794 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
795 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
796 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
797 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
798 */
799FNIEMOP_DEF(iemOp_add_eAX_Iz)
800{
801 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
802 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
803}
804
805
806/**
807 * @opcode 0x06
808 * @opgroup og_stack_sreg
809 */
810FNIEMOP_DEF(iemOp_push_ES)
811{
812 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
813 IEMOP_HLP_NO_64BIT();
814 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
815}
816
817
818/**
819 * @opcode 0x07
820 * @opgroup og_stack_sreg
821 */
822FNIEMOP_DEF(iemOp_pop_ES)
823{
824 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
825 IEMOP_HLP_NO_64BIT();
826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
827 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
828 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
829 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
832 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
833 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
834}
835
836
837/**
838 * @opcode 0x08
839 * @opgroup og_gen_arith_bin
840 * @opflclass logical
841 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
842 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
843 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
845 */
846FNIEMOP_DEF(iemOp_or_Eb_Gb)
847{
848 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
849 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
850 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
851}
852
853
854/*
855 * @opcode 0x09
856 * @opgroup og_gen_arith_bin
857 * @opflclass logical
858 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
859 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
860 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
864 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
865 */
866FNIEMOP_DEF(iemOp_or_Ev_Gv)
867{
868 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
871 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
872}
873
874
875/**
876 * @opcode 0x0a
877 * @opgroup og_gen_arith_bin
878 * @opflclass logical
879 * @opcopytests iemOp_or_Eb_Gb
880 */
881FNIEMOP_DEF(iemOp_or_Gb_Eb)
882{
883 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
886}
887
888
889/**
890 * @opcode 0x0b
891 * @opgroup og_gen_arith_bin
892 * @opflclass logical
893 * @opcopytests iemOp_or_Ev_Gv
894 */
895FNIEMOP_DEF(iemOp_or_Gv_Ev)
896{
897 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
900 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
901}
902
903
904/**
905 * @opcode 0x0c
906 * @opgroup og_gen_arith_bin
907 * @opflclass logical
908 * @opcopytests iemOp_or_Eb_Gb
909 */
910FNIEMOP_DEF(iemOp_or_Al_Ib)
911{
912 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
914 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
915}
916
917
918/**
919 * @opcode 0x0d
920 * @opgroup og_gen_arith_bin
921 * @opflclass logical
922 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
923 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
924 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
927 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
928 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
929 */
930FNIEMOP_DEF(iemOp_or_eAX_Iz)
931{
932 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
934 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
935}
936
937
938/**
939 * @opcode 0x0e
940 * @opgroup og_stack_sreg
941 */
942FNIEMOP_DEF(iemOp_push_CS)
943{
944 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
945 IEMOP_HLP_NO_64BIT();
946 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
947}
948
949
950/**
951 * @opcode 0x0f
952 * @opmnemonic EscTwo0f
953 * @openc two0f
954 * @opdisenum OP_2B_ESC
955 * @ophints harmless
956 * @opgroup og_escapes
957 */
958FNIEMOP_DEF(iemOp_2byteEscape)
959{
960#if 0 /// @todo def VBOX_STRICT
961 /* Sanity check the table the first time around. */
962 static bool s_fTested = false;
963 if (RT_LIKELY(s_fTested)) { /* likely */ }
964 else
965 {
966 s_fTested = true;
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
969 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
970 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
971 }
972#endif
973
974 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
975 {
976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
977 IEMOP_HLP_MIN_286();
978 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
979 }
980 /* @opdone */
981
982 /*
983 * On the 8086 this is a POP CS instruction.
984 * For the time being we don't specify this this.
985 */
986 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
987 IEMOP_HLP_NO_64BIT();
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 /** @todo eliminate END_TB here */
990 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
991 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
992 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
993 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
994}
995
996/**
997 * @opcode 0x10
998 * @opgroup og_gen_arith_bin
999 * @opflclass arithmetic_carry
1000 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1001 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1002 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1003 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1004 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1005 */
1006FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1007{
1008 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1009 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1010}
1011
1012
1013/**
1014 * @opcode 0x11
1015 * @opgroup og_gen_arith_bin
1016 * @opflclass arithmetic_carry
1017 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1019 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1020 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1021 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1022 */
1023FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1024{
1025 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1026 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1027 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1028}
1029
1030
1031/**
1032 * @opcode 0x12
1033 * @opgroup og_gen_arith_bin
1034 * @opflclass arithmetic_carry
1035 * @opcopytests iemOp_adc_Eb_Gb
1036 */
1037FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1038{
1039 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1040 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1041}
1042
1043
1044/**
1045 * @opcode 0x13
1046 * @opgroup og_gen_arith_bin
1047 * @opflclass arithmetic_carry
1048 * @opcopytests iemOp_adc_Ev_Gv
1049 */
1050FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1051{
1052 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1054 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1055}
1056
1057
1058/**
1059 * @opcode 0x14
1060 * @opgroup og_gen_arith_bin
1061 * @opflclass arithmetic_carry
1062 * @opcopytests iemOp_adc_Eb_Gb
1063 */
1064FNIEMOP_DEF(iemOp_adc_Al_Ib)
1065{
1066 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1067 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1068}
1069
1070
1071/**
1072 * @opcode 0x15
1073 * @opgroup og_gen_arith_bin
1074 * @opflclass arithmetic_carry
1075 * @opcopytests iemOp_adc_Ev_Gv
1076 */
1077FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1078{
1079 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1080 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1081}
1082
1083
1084/**
1085 * @opcode 0x16
1086 */
1087FNIEMOP_DEF(iemOp_push_SS)
1088{
1089 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1090 IEMOP_HLP_NO_64BIT();
1091 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1092}
1093
1094
1095/**
1096 * @opcode 0x17
1097 */
1098FNIEMOP_DEF(iemOp_pop_SS)
1099{
1100 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1102 IEMOP_HLP_NO_64BIT();
1103 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1104 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1105 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1106 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1107 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1108 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1109 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1110}
1111
1112
1113/**
1114 * @opcode 0x18
1115 * @opgroup og_gen_arith_bin
1116 * @opflclass arithmetic_carry
1117 */
1118FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1119{
1120 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1121 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1122}
1123
1124
1125/**
1126 * @opcode 0x19
1127 * @opgroup og_gen_arith_bin
1128 * @opflclass arithmetic_carry
1129 */
1130FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1131{
1132 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1133 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1134 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1135}
1136
1137
1138/**
1139 * @opcode 0x1a
1140 * @opgroup og_gen_arith_bin
1141 * @opflclass arithmetic_carry
1142 */
1143FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1144{
1145 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1146 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1147}
1148
1149
1150/**
1151 * @opcode 0x1b
1152 * @opgroup og_gen_arith_bin
1153 * @opflclass arithmetic_carry
1154 */
1155FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1156{
1157 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1159 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1160}
1161
1162
1163/**
1164 * @opcode 0x1c
1165 * @opgroup og_gen_arith_bin
1166 * @opflclass arithmetic_carry
1167 */
1168FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1169{
1170 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1171 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1172}
1173
1174
1175/**
1176 * @opcode 0x1d
1177 * @opgroup og_gen_arith_bin
1178 * @opflclass arithmetic_carry
1179 */
1180FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1181{
1182 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1183 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1184}
1185
1186
1187/**
1188 * @opcode 0x1e
1189 * @opgroup og_stack_sreg
1190 */
1191FNIEMOP_DEF(iemOp_push_DS)
1192{
1193 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1194 IEMOP_HLP_NO_64BIT();
1195 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1196}
1197
1198
1199/**
1200 * @opcode 0x1f
1201 * @opgroup og_stack_sreg
1202 */
1203FNIEMOP_DEF(iemOp_pop_DS)
1204{
1205 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1207 IEMOP_HLP_NO_64BIT();
1208 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1209 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1210 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1211 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1212 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1213 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1214 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1215}
1216
1217
1218/**
1219 * @opcode 0x20
1220 * @opgroup og_gen_arith_bin
1221 * @opflclass logical
1222 */
1223FNIEMOP_DEF(iemOp_and_Eb_Gb)
1224{
1225 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1227 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1228}
1229
1230
1231/**
1232 * @opcode 0x21
1233 * @opgroup og_gen_arith_bin
1234 * @opflclass logical
1235 */
1236FNIEMOP_DEF(iemOp_and_Ev_Gv)
1237{
1238 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1240 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1241 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1242}
1243
1244
1245/**
1246 * @opcode 0x22
1247 * @opgroup og_gen_arith_bin
1248 * @opflclass logical
1249 */
1250FNIEMOP_DEF(iemOp_and_Gb_Eb)
1251{
1252 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1254 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1255}
1256
1257
1258/**
1259 * @opcode 0x23
1260 * @opgroup og_gen_arith_bin
1261 * @opflclass logical
1262 */
1263FNIEMOP_DEF(iemOp_and_Gv_Ev)
1264{
1265 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1266 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1268 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1269}
1270
1271
1272/**
1273 * @opcode 0x24
1274 * @opgroup og_gen_arith_bin
1275 * @opflclass logical
1276 */
1277FNIEMOP_DEF(iemOp_and_Al_Ib)
1278{
1279 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1281 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1282}
1283
1284
1285/**
1286 * @opcode 0x25
1287 * @opgroup og_gen_arith_bin
1288 * @opflclass logical
1289 */
1290FNIEMOP_DEF(iemOp_and_eAX_Iz)
1291{
1292 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1295}
1296
1297
1298/**
1299 * @opcode 0x26
1300 * @opmnemonic SEG
1301 * @op1 ES
1302 * @opgroup og_prefix
1303 * @openc prefix
1304 * @opdisenum OP_SEG
1305 * @ophints harmless
1306 */
1307FNIEMOP_DEF(iemOp_seg_ES)
1308{
1309 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1310 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1311 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1312
1313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1315}
1316
1317
1318/**
1319 * @opcode 0x27
1320 * @opfltest af,cf
1321 * @opflmodify cf,pf,af,zf,sf,of
1322 * @opflundef of
1323 */
1324FNIEMOP_DEF(iemOp_daa)
1325{
1326 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1327 IEMOP_HLP_NO_64BIT();
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1330 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1331}
1332
1333
1334/**
1335 * @opcode 0x28
1336 * @opgroup og_gen_arith_bin
1337 * @opflclass arithmetic
1338 */
1339FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1340{
1341 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1342 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1343}
1344
1345
1346/**
1347 * @opcode 0x29
1348 * @opgroup og_gen_arith_bin
1349 * @opflclass arithmetic
1350 */
1351FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1352{
1353 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1354 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1355 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1356}
1357
1358
1359/**
1360 * @opcode 0x2a
1361 * @opgroup og_gen_arith_bin
1362 * @opflclass arithmetic
1363 */
1364FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1365{
1366 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1367 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1368}
1369
1370
1371/**
1372 * @opcode 0x2b
1373 * @opgroup og_gen_arith_bin
1374 * @opflclass arithmetic
1375 */
1376FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1377{
1378 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1380 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1381}
1382
1383
1384/**
1385 * @opcode 0x2c
1386 * @opgroup og_gen_arith_bin
1387 * @opflclass arithmetic
1388 */
1389FNIEMOP_DEF(iemOp_sub_Al_Ib)
1390{
1391 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1392 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1393}
1394
1395
1396/**
1397 * @opcode 0x2d
1398 * @opgroup og_gen_arith_bin
1399 * @opflclass arithmetic
1400 */
1401FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1402{
1403 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1404 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1405}
1406
1407
1408/**
1409 * @opcode 0x2e
1410 * @opmnemonic SEG
1411 * @op1 CS
1412 * @opgroup og_prefix
1413 * @openc prefix
1414 * @opdisenum OP_SEG
1415 * @ophints harmless
1416 */
1417FNIEMOP_DEF(iemOp_seg_CS)
1418{
1419 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1420 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1421 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1422
1423 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1424 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1425}
1426
1427
1428/**
1429 * @opcode 0x2f
1430 * @opfltest af,cf
1431 * @opflmodify cf,pf,af,zf,sf,of
1432 * @opflundef of
1433 */
1434FNIEMOP_DEF(iemOp_das)
1435{
1436 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1437 IEMOP_HLP_NO_64BIT();
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1440 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1441}
1442
1443
1444/**
1445 * @opcode 0x30
1446 * @opgroup og_gen_arith_bin
1447 * @opflclass logical
1448 */
1449FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1450{
1451 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1452 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1453 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1454}
1455
1456
1457/**
1458 * @opcode 0x31
1459 * @opgroup og_gen_arith_bin
1460 * @opflclass logical
1461 */
1462FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1463{
1464 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1466 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1467 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1468}
1469
1470
1471/**
1472 * @opcode 0x32
1473 * @opgroup og_gen_arith_bin
1474 * @opflclass logical
1475 */
1476FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1477{
1478 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1479 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1480 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1481}
1482
1483
1484/**
1485 * @opcode 0x33
1486 * @opgroup og_gen_arith_bin
1487 * @opflclass logical
1488 */
1489FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1490{
1491 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1493
1494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1495
1496 /*
1497 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome.
1498 */
1499 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
1500 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
1501 {
1502 switch (pVCpu->iem.s.enmEffOpSize)
1503 {
1504 case IEMMODE_16BIT:
1505 IEM_MC_BEGIN(1, 0, 0, 0);
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1508 IEM_MC_LOCAL(uint32_t, fEFlags);
1509 IEM_MC_FETCH_EFLAGS(fEFlags);
1510 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1511 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1512 IEM_MC_COMMIT_EFLAGS(fEFlags);
1513 IEM_MC_ADVANCE_RIP_AND_FINISH();
1514 IEM_MC_END();
1515 break;
1516
1517 case IEMMODE_32BIT:
1518 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
1519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1520 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1521 IEM_MC_LOCAL(uint32_t, fEFlags);
1522 IEM_MC_FETCH_EFLAGS(fEFlags);
1523 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1524 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1525 IEM_MC_COMMIT_EFLAGS(fEFlags);
1526 IEM_MC_ADVANCE_RIP_AND_FINISH();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_64BIT:
1531 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1534 IEM_MC_LOCAL(uint32_t, fEFlags);
1535 IEM_MC_FETCH_EFLAGS(fEFlags);
1536 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1537 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1538 IEM_MC_COMMIT_EFLAGS(fEFlags);
1539 IEM_MC_ADVANCE_RIP_AND_FINISH();
1540 IEM_MC_END();
1541 break;
1542
1543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1544 }
1545 }
1546
1547 //I E M OP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0); - restore later.
1548 /*
1549 * START TEMP EXPERIMENTAL CODE
1550 */
1551 if (IEM_IS_MODRM_REG_MODE(bRm))
1552 {
1553 switch (pVCpu->iem.s.enmEffOpSize)
1554 {
1555 case IEMMODE_16BIT:
1556 IEM_MC_BEGIN(3, 0, 0, 0);
1557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1558 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1559 IEM_MC_ARG(uint16_t, u16Src, 1);
1560 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1561 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
1562 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1563 IEM_MC_REF_EFLAGS(pEFlags);
1564 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xor_u16, pu16Dst, u16Src, pEFlags);
1565 IEM_MC_ADVANCE_RIP_AND_FINISH();
1566 IEM_MC_END();
1567 break;
1568
1569 case IEMMODE_32BIT:
1570 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1572 IEM_MC_ARG(uint32_t, u32Src, 1);
1573 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
1574 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
1575 IEM_MC_LOCAL(uint32_t, u32Dst);
1576 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1577 /// @todo IEM_MC_LOCAL_EFLAGS(uEFlags);
1578 IEM_MC_LOCAL(uint32_t, uEFlags);
1579 IEM_MC_FETCH_EFLAGS(uEFlags);
1580 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_xor_r_r_efl, u32Dst, u32Src, uEFlags, 32);
1581 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Dst);
1582 IEM_MC_COMMIT_EFLAGS(uEFlags);
1583 } IEM_MC_NATIVE_ELSE() {
1584 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1585 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1586 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1587 IEM_MC_REF_EFLAGS(pEFlags);
1588 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xor_u32, pu32Dst, u32Src, pEFlags);
1589 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1590 } IEM_MC_NATIVE_ENDIF();
1591 IEM_MC_ADVANCE_RIP_AND_FINISH();
1592 IEM_MC_END();
1593 break;
1594
1595 case IEMMODE_64BIT:
1596 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1599 IEM_MC_ARG(uint64_t, u64Src, 1);
1600 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1601
1602 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
1603 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1604 IEM_MC_REF_EFLAGS(pEFlags);
1605 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xor_u64, pu64Dst, u64Src, pEFlags);
1606
1607 IEM_MC_ADVANCE_RIP_AND_FINISH();
1608 IEM_MC_END();
1609 break;
1610
1611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1612 }
1613 }
1614 else
1615 {
1616 /*
1617 * We're accessing memory.
1618 */
1619 switch (pVCpu->iem.s.enmEffOpSize)
1620 {
1621 case IEMMODE_16BIT:
1622 IEM_MC_BEGIN(3, 1, 0, 0);
1623 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1624 IEM_MC_ARG(uint16_t, u16Src, 1);
1625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1631 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1632 IEM_MC_REF_EFLAGS(pEFlags);
1633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xor_u16, pu16Dst, u16Src, pEFlags);
1634
1635 IEM_MC_ADVANCE_RIP_AND_FINISH();
1636 IEM_MC_END();
1637 break;
1638
1639 case IEMMODE_32BIT:
1640 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1641 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1642 IEM_MC_ARG(uint32_t, u32Src, 1);
1643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1645
1646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1648 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1649 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1650 IEM_MC_REF_EFLAGS(pEFlags);
1651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xor_u32, pu32Dst, u32Src, pEFlags);
1652
1653 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1654 IEM_MC_ADVANCE_RIP_AND_FINISH();
1655 IEM_MC_END();
1656 break;
1657
1658 case IEMMODE_64BIT:
1659 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
1660 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1661 IEM_MC_ARG(uint64_t, u64Src, 1);
1662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1668 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1669 IEM_MC_REF_EFLAGS(pEFlags);
1670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xor_u64, pu64Dst, u64Src, pEFlags);
1671
1672 IEM_MC_ADVANCE_RIP_AND_FINISH();
1673 IEM_MC_END();
1674 break;
1675
1676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1677 }
1678 }
1679 /* END TEMP EXPERIMENTAL CODE */
1680}
1681
1682
1683/**
1684 * @opcode 0x34
1685 * @opgroup og_gen_arith_bin
1686 * @opflclass logical
1687 */
1688FNIEMOP_DEF(iemOp_xor_Al_Ib)
1689{
1690 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1691 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1692 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1693}
1694
1695
1696/**
1697 * @opcode 0x35
1698 * @opgroup og_gen_arith_bin
1699 * @opflclass logical
1700 */
1701FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1702{
1703 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1704 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1705 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1706}
1707
1708
1709/**
1710 * @opcode 0x36
1711 * @opmnemonic SEG
1712 * @op1 SS
1713 * @opgroup og_prefix
1714 * @openc prefix
1715 * @opdisenum OP_SEG
1716 * @ophints harmless
1717 */
1718FNIEMOP_DEF(iemOp_seg_SS)
1719{
1720 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1721 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1722 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1723
1724 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1725 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1726}
1727
1728
1729/**
1730 * @opcode 0x37
1731 * @opfltest af
1732 * @opflmodify cf,pf,af,zf,sf,of
1733 * @opflundef pf,zf,sf,of
1734 * @opgroup og_gen_arith_dec
1735 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1736 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1737 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1738 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1739 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1740 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1741 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1742 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1743 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1745 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1747 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1749 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1750 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1751 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1752 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1753 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1754 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1756 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1758 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1760 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1762 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1764 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1766 */
1767FNIEMOP_DEF(iemOp_aaa)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1775}
1776
1777
1778/**
1779 * @opcode 0x38
1780 * @opflclass arithmetic
1781 */
1782FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1783{
1784 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1785 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1786}
1787
1788
1789/**
1790 * @opcode 0x39
1791 * @opflclass arithmetic
1792 */
1793FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1794{
1795 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1796 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1797}
1798
1799
1800/**
1801 * @opcode 0x3a
1802 * @opflclass arithmetic
1803 */
1804FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1805{
1806 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1807 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1808}
1809
1810
1811/**
1812 * @opcode 0x3b
1813 * @opflclass arithmetic
1814 */
1815FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1816{
1817 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1820}
1821
1822
1823/**
1824 * @opcode 0x3c
1825 * @opflclass arithmetic
1826 */
1827FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1828{
1829 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1830 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1831}
1832
1833
1834/**
1835 * @opcode 0x3d
1836 * @opflclass arithmetic
1837 */
1838FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1839{
1840 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1841 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1842}
1843
1844
1845/**
1846 * @opcode 0x3e
1847 */
1848FNIEMOP_DEF(iemOp_seg_DS)
1849{
1850 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1852 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1853
1854 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1855 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1856}
1857
1858
1859/**
1860 * @opcode 0x3f
1861 * @opfltest af
1862 * @opflmodify cf,pf,af,zf,sf,of
1863 * @opflundef pf,zf,sf,of
1864 * @opgroup og_gen_arith_dec
1865 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1866 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1867 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1868 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1869 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1870 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1871 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1872 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1873 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1874 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1875 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1876 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1877 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1878 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1879 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1880 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1881 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1882 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1883 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1884 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1885 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1886 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1887 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1888 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1889 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1890 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1891 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1892 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1893 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1894 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1895 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1896 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1897 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1898 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1899 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1900 */
1901FNIEMOP_DEF(iemOp_aas)
1902{
1903 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1904 IEMOP_HLP_NO_64BIT();
1905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1907
1908 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1909}
1910
1911
1912/**
1913 * Common 'inc/dec register' helper.
1914 *
1915 * Not for 64-bit code, only for what became the rex prefixes.
1916 */
1917#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1918 switch (pVCpu->iem.s.enmEffOpSize) \
1919 { \
1920 case IEMMODE_16BIT: \
1921 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1923 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1924 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1925 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1926 IEM_MC_REF_EFLAGS(pEFlags); \
1927 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1929 IEM_MC_END(); \
1930 break; \
1931 \
1932 case IEMMODE_32BIT: \
1933 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1935 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1936 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1937 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1938 IEM_MC_REF_EFLAGS(pEFlags); \
1939 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1940 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1942 IEM_MC_END(); \
1943 break; \
1944 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1945 } \
1946 (void)0
1947
1948/**
1949 * @opcode 0x40
1950 * @opflclass incdec
1951 */
1952FNIEMOP_DEF(iemOp_inc_eAX)
1953{
1954 /*
1955 * This is a REX prefix in 64-bit mode.
1956 */
1957 if (IEM_IS_64BIT_CODE(pVCpu))
1958 {
1959 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1960 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1961
1962 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1963 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1964 }
1965
1966 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1967 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1968}
1969
1970
1971/**
1972 * @opcode 0x41
1973 * @opflclass incdec
1974 */
1975FNIEMOP_DEF(iemOp_inc_eCX)
1976{
1977 /*
1978 * This is a REX prefix in 64-bit mode.
1979 */
1980 if (IEM_IS_64BIT_CODE(pVCpu))
1981 {
1982 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1983 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1984 pVCpu->iem.s.uRexB = 1 << 3;
1985
1986 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1987 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1988 }
1989
1990 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1991 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1992}
1993
1994
1995/**
1996 * @opcode 0x42
1997 * @opflclass incdec
1998 */
1999FNIEMOP_DEF(iemOp_inc_eDX)
2000{
2001 /*
2002 * This is a REX prefix in 64-bit mode.
2003 */
2004 if (IEM_IS_64BIT_CODE(pVCpu))
2005 {
2006 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2007 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2008 pVCpu->iem.s.uRexIndex = 1 << 3;
2009
2010 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2011 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2012 }
2013
2014 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2015 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2016}
2017
2018
2019
2020/**
2021 * @opcode 0x43
2022 * @opflclass incdec
2023 */
2024FNIEMOP_DEF(iemOp_inc_eBX)
2025{
2026 /*
2027 * This is a REX prefix in 64-bit mode.
2028 */
2029 if (IEM_IS_64BIT_CODE(pVCpu))
2030 {
2031 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2032 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2033 pVCpu->iem.s.uRexB = 1 << 3;
2034 pVCpu->iem.s.uRexIndex = 1 << 3;
2035
2036 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2037 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2038 }
2039
2040 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2041 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2042}
2043
2044
2045/**
2046 * @opcode 0x44
2047 * @opflclass incdec
2048 */
2049FNIEMOP_DEF(iemOp_inc_eSP)
2050{
2051 /*
2052 * This is a REX prefix in 64-bit mode.
2053 */
2054 if (IEM_IS_64BIT_CODE(pVCpu))
2055 {
2056 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2057 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2058 pVCpu->iem.s.uRexReg = 1 << 3;
2059
2060 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2061 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2062 }
2063
2064 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2065 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2066}
2067
2068
2069/**
2070 * @opcode 0x45
2071 * @opflclass incdec
2072 */
2073FNIEMOP_DEF(iemOp_inc_eBP)
2074{
2075 /*
2076 * This is a REX prefix in 64-bit mode.
2077 */
2078 if (IEM_IS_64BIT_CODE(pVCpu))
2079 {
2080 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2081 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2082 pVCpu->iem.s.uRexReg = 1 << 3;
2083 pVCpu->iem.s.uRexB = 1 << 3;
2084
2085 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2086 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2087 }
2088
2089 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2090 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2091}
2092
2093
2094/**
2095 * @opcode 0x46
2096 * @opflclass incdec
2097 */
2098FNIEMOP_DEF(iemOp_inc_eSI)
2099{
2100 /*
2101 * This is a REX prefix in 64-bit mode.
2102 */
2103 if (IEM_IS_64BIT_CODE(pVCpu))
2104 {
2105 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2107 pVCpu->iem.s.uRexReg = 1 << 3;
2108 pVCpu->iem.s.uRexIndex = 1 << 3;
2109
2110 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2111 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2112 }
2113
2114 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2115 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2116}
2117
2118
2119/**
2120 * @opcode 0x47
2121 * @opflclass incdec
2122 */
2123FNIEMOP_DEF(iemOp_inc_eDI)
2124{
2125 /*
2126 * This is a REX prefix in 64-bit mode.
2127 */
2128 if (IEM_IS_64BIT_CODE(pVCpu))
2129 {
2130 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2131 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2132 pVCpu->iem.s.uRexReg = 1 << 3;
2133 pVCpu->iem.s.uRexB = 1 << 3;
2134 pVCpu->iem.s.uRexIndex = 1 << 3;
2135
2136 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2137 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2138 }
2139
2140 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2141 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2142}
2143
2144
2145/**
2146 * @opcode 0x48
2147 * @opflclass incdec
2148 */
2149FNIEMOP_DEF(iemOp_dec_eAX)
2150{
2151 /*
2152 * This is a REX prefix in 64-bit mode.
2153 */
2154 if (IEM_IS_64BIT_CODE(pVCpu))
2155 {
2156 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2157 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2158 iemRecalEffOpSize(pVCpu);
2159
2160 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2161 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2162 }
2163
2164 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2165 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2166}
2167
2168
2169/**
2170 * @opcode 0x49
2171 * @opflclass incdec
2172 */
2173FNIEMOP_DEF(iemOp_dec_eCX)
2174{
2175 /*
2176 * This is a REX prefix in 64-bit mode.
2177 */
2178 if (IEM_IS_64BIT_CODE(pVCpu))
2179 {
2180 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2181 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2182 pVCpu->iem.s.uRexB = 1 << 3;
2183 iemRecalEffOpSize(pVCpu);
2184
2185 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2186 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2187 }
2188
2189 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2190 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2191}
2192
2193
2194/**
2195 * @opcode 0x4a
2196 * @opflclass incdec
2197 */
2198FNIEMOP_DEF(iemOp_dec_eDX)
2199{
2200 /*
2201 * This is a REX prefix in 64-bit mode.
2202 */
2203 if (IEM_IS_64BIT_CODE(pVCpu))
2204 {
2205 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2206 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2207 pVCpu->iem.s.uRexIndex = 1 << 3;
2208 iemRecalEffOpSize(pVCpu);
2209
2210 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2211 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2212 }
2213
2214 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2215 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2216}
2217
2218
2219/**
2220 * @opcode 0x4b
2221 * @opflclass incdec
2222 */
2223FNIEMOP_DEF(iemOp_dec_eBX)
2224{
2225 /*
2226 * This is a REX prefix in 64-bit mode.
2227 */
2228 if (IEM_IS_64BIT_CODE(pVCpu))
2229 {
2230 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2231 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2232 pVCpu->iem.s.uRexB = 1 << 3;
2233 pVCpu->iem.s.uRexIndex = 1 << 3;
2234 iemRecalEffOpSize(pVCpu);
2235
2236 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2237 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2238 }
2239
2240 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2241 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2242}
2243
2244
2245/**
2246 * @opcode 0x4c
2247 * @opflclass incdec
2248 */
2249FNIEMOP_DEF(iemOp_dec_eSP)
2250{
2251 /*
2252 * This is a REX prefix in 64-bit mode.
2253 */
2254 if (IEM_IS_64BIT_CODE(pVCpu))
2255 {
2256 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2257 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2258 pVCpu->iem.s.uRexReg = 1 << 3;
2259 iemRecalEffOpSize(pVCpu);
2260
2261 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2262 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2263 }
2264
2265 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2266 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2267}
2268
2269
2270/**
2271 * @opcode 0x4d
2272 * @opflclass incdec
2273 */
2274FNIEMOP_DEF(iemOp_dec_eBP)
2275{
2276 /*
2277 * This is a REX prefix in 64-bit mode.
2278 */
2279 if (IEM_IS_64BIT_CODE(pVCpu))
2280 {
2281 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2282 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2283 pVCpu->iem.s.uRexReg = 1 << 3;
2284 pVCpu->iem.s.uRexB = 1 << 3;
2285 iemRecalEffOpSize(pVCpu);
2286
2287 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2288 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2289 }
2290
2291 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2292 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2293}
2294
2295
2296/**
2297 * @opcode 0x4e
2298 * @opflclass incdec
2299 */
2300FNIEMOP_DEF(iemOp_dec_eSI)
2301{
2302 /*
2303 * This is a REX prefix in 64-bit mode.
2304 */
2305 if (IEM_IS_64BIT_CODE(pVCpu))
2306 {
2307 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2308 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2309 pVCpu->iem.s.uRexReg = 1 << 3;
2310 pVCpu->iem.s.uRexIndex = 1 << 3;
2311 iemRecalEffOpSize(pVCpu);
2312
2313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2315 }
2316
2317 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2318 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2319}
2320
2321
2322/**
2323 * @opcode 0x4f
2324 * @opflclass incdec
2325 */
2326FNIEMOP_DEF(iemOp_dec_eDI)
2327{
2328 /*
2329 * This is a REX prefix in 64-bit mode.
2330 */
2331 if (IEM_IS_64BIT_CODE(pVCpu))
2332 {
2333 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2334 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2335 pVCpu->iem.s.uRexReg = 1 << 3;
2336 pVCpu->iem.s.uRexB = 1 << 3;
2337 pVCpu->iem.s.uRexIndex = 1 << 3;
2338 iemRecalEffOpSize(pVCpu);
2339
2340 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2341 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2342 }
2343
2344 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2345 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2346}
2347
2348
2349/**
2350 * Common 'push register' helper.
2351 */
2352FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2353{
2354 if (IEM_IS_64BIT_CODE(pVCpu))
2355 {
2356 iReg |= pVCpu->iem.s.uRexB;
2357 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2358 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2359 }
2360
2361 switch (pVCpu->iem.s.enmEffOpSize)
2362 {
2363 case IEMMODE_16BIT:
2364 IEM_MC_BEGIN(0, 1, 0, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint16_t, u16Value);
2367 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2368 IEM_MC_PUSH_U16(u16Value);
2369 IEM_MC_ADVANCE_RIP_AND_FINISH();
2370 IEM_MC_END();
2371 break;
2372
2373 case IEMMODE_32BIT:
2374 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2376 IEM_MC_LOCAL(uint32_t, u32Value);
2377 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2378 IEM_MC_PUSH_U32(u32Value);
2379 IEM_MC_ADVANCE_RIP_AND_FINISH();
2380 IEM_MC_END();
2381 break;
2382
2383 case IEMMODE_64BIT:
2384 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2386 IEM_MC_LOCAL(uint64_t, u64Value);
2387 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2388 IEM_MC_PUSH_U64(u64Value);
2389 IEM_MC_ADVANCE_RIP_AND_FINISH();
2390 IEM_MC_END();
2391 break;
2392
2393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2394 }
2395}
2396
2397
2398/**
2399 * @opcode 0x50
2400 */
2401FNIEMOP_DEF(iemOp_push_eAX)
2402{
2403 IEMOP_MNEMONIC(push_rAX, "push rAX");
2404 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2405}
2406
2407
2408/**
2409 * @opcode 0x51
2410 */
2411FNIEMOP_DEF(iemOp_push_eCX)
2412{
2413 IEMOP_MNEMONIC(push_rCX, "push rCX");
2414 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2415}
2416
2417
2418/**
2419 * @opcode 0x52
2420 */
2421FNIEMOP_DEF(iemOp_push_eDX)
2422{
2423 IEMOP_MNEMONIC(push_rDX, "push rDX");
2424 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2425}
2426
2427
2428/**
2429 * @opcode 0x53
2430 */
2431FNIEMOP_DEF(iemOp_push_eBX)
2432{
2433 IEMOP_MNEMONIC(push_rBX, "push rBX");
2434 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2435}
2436
2437
2438/**
2439 * @opcode 0x54
2440 */
2441FNIEMOP_DEF(iemOp_push_eSP)
2442{
2443 IEMOP_MNEMONIC(push_rSP, "push rSP");
2444 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2445 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2446
2447 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2448 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_LOCAL(uint16_t, u16Value);
2451 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2452 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2453 IEM_MC_PUSH_U16(u16Value);
2454 IEM_MC_ADVANCE_RIP_AND_FINISH();
2455 IEM_MC_END();
2456}
2457
2458
2459/**
2460 * @opcode 0x55
2461 */
2462FNIEMOP_DEF(iemOp_push_eBP)
2463{
2464 IEMOP_MNEMONIC(push_rBP, "push rBP");
2465 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2466}
2467
2468
2469/**
2470 * @opcode 0x56
2471 */
2472FNIEMOP_DEF(iemOp_push_eSI)
2473{
2474 IEMOP_MNEMONIC(push_rSI, "push rSI");
2475 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2476}
2477
2478
2479/**
2480 * @opcode 0x57
2481 */
2482FNIEMOP_DEF(iemOp_push_eDI)
2483{
2484 IEMOP_MNEMONIC(push_rDI, "push rDI");
2485 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2486}
2487
2488
2489/**
2490 * Common 'pop register' helper.
2491 */
2492FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2493{
2494 if (IEM_IS_64BIT_CODE(pVCpu))
2495 {
2496 iReg |= pVCpu->iem.s.uRexB;
2497 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2498 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2499 }
2500
2501 switch (pVCpu->iem.s.enmEffOpSize)
2502 {
2503 case IEMMODE_16BIT:
2504 IEM_MC_BEGIN(0, 0, 0, 0);
2505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2506 IEM_MC_POP_GREG_U16(iReg);
2507 IEM_MC_ADVANCE_RIP_AND_FINISH();
2508 IEM_MC_END();
2509 break;
2510
2511 case IEMMODE_32BIT:
2512 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2514 IEM_MC_POP_GREG_U32(iReg);
2515 IEM_MC_ADVANCE_RIP_AND_FINISH();
2516 IEM_MC_END();
2517 break;
2518
2519 case IEMMODE_64BIT:
2520 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2522 IEM_MC_POP_GREG_U64(iReg);
2523 IEM_MC_ADVANCE_RIP_AND_FINISH();
2524 IEM_MC_END();
2525 break;
2526
2527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2528 }
2529}
2530
2531
2532/**
2533 * @opcode 0x58
2534 */
2535FNIEMOP_DEF(iemOp_pop_eAX)
2536{
2537 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2538 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2539}
2540
2541
2542/**
2543 * @opcode 0x59
2544 */
2545FNIEMOP_DEF(iemOp_pop_eCX)
2546{
2547 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2548 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2549}
2550
2551
2552/**
2553 * @opcode 0x5a
2554 */
2555FNIEMOP_DEF(iemOp_pop_eDX)
2556{
2557 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2558 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2559}
2560
2561
2562/**
2563 * @opcode 0x5b
2564 */
2565FNIEMOP_DEF(iemOp_pop_eBX)
2566{
2567 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2568 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2569}
2570
2571
2572/**
2573 * @opcode 0x5c
2574 */
2575FNIEMOP_DEF(iemOp_pop_eSP)
2576{
2577 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2578 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2579}
2580
2581
2582/**
2583 * @opcode 0x5d
2584 */
2585FNIEMOP_DEF(iemOp_pop_eBP)
2586{
2587 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2588 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2589}
2590
2591
2592/**
2593 * @opcode 0x5e
2594 */
2595FNIEMOP_DEF(iemOp_pop_eSI)
2596{
2597 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2598 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2599}
2600
2601
2602/**
2603 * @opcode 0x5f
2604 */
2605FNIEMOP_DEF(iemOp_pop_eDI)
2606{
2607 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2608 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2609}
2610
2611
2612/**
2613 * @opcode 0x60
2614 */
2615FNIEMOP_DEF(iemOp_pusha)
2616{
2617 IEMOP_MNEMONIC(pusha, "pusha");
2618 IEMOP_HLP_MIN_186();
2619 IEMOP_HLP_NO_64BIT();
2620 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2621 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2622 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2623 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2624}
2625
2626
2627/**
2628 * @opcode 0x61
2629 */
2630FNIEMOP_DEF(iemOp_popa__mvex)
2631{
2632 if (!IEM_IS_64BIT_CODE(pVCpu))
2633 {
2634 IEMOP_MNEMONIC(popa, "popa");
2635 IEMOP_HLP_MIN_186();
2636 IEMOP_HLP_NO_64BIT();
2637 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2638 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2639 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2641 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2642 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2643 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2644 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2647 iemCImpl_popa_16);
2648 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2649 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2650 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2652 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2653 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2654 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2657 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2658 iemCImpl_popa_32);
2659 }
2660 IEMOP_MNEMONIC(mvex, "mvex");
2661 Log(("mvex prefix is not supported!\n"));
2662 IEMOP_RAISE_INVALID_OPCODE_RET();
2663}
2664
2665
2666/**
2667 * @opcode 0x62
2668 * @opmnemonic bound
2669 * @op1 Gv_RO
2670 * @op2 Ma
2671 * @opmincpu 80186
2672 * @ophints harmless x86_invalid_64
2673 * @optest op1=0 op2=0 ->
2674 * @optest op1=1 op2=0 -> value.xcpt=5
2675 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2676 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2677 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2678 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2679 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2680 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2681 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2682 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2683 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2684 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2685 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2686 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2687 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2688 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2689 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2690 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2691 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2692 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2693 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2694 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2695 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2696 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2697 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2698 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2699 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2700 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2701 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2702 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2703 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2704 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2705 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2706 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2707 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2708 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2709 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2710 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2711 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2712 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2713 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2714 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2715 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2716 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2717 */
2718FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2719{
2720 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2721 compatability mode it is invalid with MOD=3.
2722
2723 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2724 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2725 given as R and X without an exact description, so we assume it builds on
2726 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2727 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2728 uint8_t bRm;
2729 if (!IEM_IS_64BIT_CODE(pVCpu))
2730 {
2731 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2732 IEMOP_HLP_MIN_186();
2733 IEM_OPCODE_GET_NEXT_U8(&bRm);
2734 if (IEM_IS_MODRM_MEM_MODE(bRm))
2735 {
2736 /** @todo testcase: check that there are two memory accesses involved. Check
2737 * whether they're both read before the \#BR triggers. */
2738 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2739 {
2740 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2741 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2742 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2743 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2745
2746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2748
2749 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2750 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2751 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2752
2753 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2754 IEM_MC_END();
2755 }
2756 else /* 32-bit operands */
2757 {
2758 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2759 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2760 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2761 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2763
2764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2766
2767 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2768 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2769 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2770
2771 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2772 IEM_MC_END();
2773 }
2774 }
2775
2776 /*
2777 * @opdone
2778 */
2779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2780 {
2781 /* Note that there is no need for the CPU to fetch further bytes
2782 here because MODRM.MOD == 3. */
2783 Log(("evex not supported by the guest CPU!\n"));
2784 IEMOP_RAISE_INVALID_OPCODE_RET();
2785 }
2786 }
2787 else
2788 {
2789 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2790 * does modr/m read, whereas AMD probably doesn't... */
2791 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2792 {
2793 Log(("evex not supported by the guest CPU!\n"));
2794 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2795 }
2796 IEM_OPCODE_GET_NEXT_U8(&bRm);
2797 }
2798
2799 IEMOP_MNEMONIC(evex, "evex");
2800 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2801 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2802 Log(("evex prefix is not implemented!\n"));
2803 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2804}
2805
2806
2807/**
2808 * @opcode 0x63
2809 * @opflmodify zf
2810 * @note non-64-bit modes.
2811 */
2812FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2813{
2814 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2815 IEMOP_HLP_MIN_286();
2816 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818
2819 if (IEM_IS_MODRM_REG_MODE(bRm))
2820 {
2821 /* Register */
2822 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2823 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2824 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2825 IEM_MC_ARG(uint16_t, u16Src, 1);
2826 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2827
2828 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2829 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2830 IEM_MC_REF_EFLAGS(pEFlags);
2831 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2832
2833 IEM_MC_ADVANCE_RIP_AND_FINISH();
2834 IEM_MC_END();
2835 }
2836 else
2837 {
2838 /* Memory */
2839 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2841 IEM_MC_ARG(uint16_t, u16Src, 1);
2842 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2844 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2845
2846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2847 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2848 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2849 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2850 IEM_MC_FETCH_EFLAGS(EFlags);
2851 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2852
2853 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2854 IEM_MC_COMMIT_EFLAGS(EFlags);
2855 IEM_MC_ADVANCE_RIP_AND_FINISH();
2856 IEM_MC_END();
2857 }
2858}
2859
2860
2861/**
2862 * @opcode 0x63
2863 *
2864 * @note This is a weird one. It works like a regular move instruction if
2865 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2866 * @todo This definitely needs a testcase to verify the odd cases. */
2867FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2868{
2869 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2870
2871 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2873
2874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2875 {
2876 if (IEM_IS_MODRM_REG_MODE(bRm))
2877 {
2878 /*
2879 * Register to register.
2880 */
2881 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2883 IEM_MC_LOCAL(uint64_t, u64Value);
2884 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2885 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2886 IEM_MC_ADVANCE_RIP_AND_FINISH();
2887 IEM_MC_END();
2888 }
2889 else
2890 {
2891 /*
2892 * We're loading a register from memory.
2893 */
2894 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2895 IEM_MC_LOCAL(uint64_t, u64Value);
2896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2900 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2901 IEM_MC_ADVANCE_RIP_AND_FINISH();
2902 IEM_MC_END();
2903 }
2904 }
2905 else
2906 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2907}
2908
2909
2910/**
2911 * @opcode 0x64
2912 * @opmnemonic segfs
2913 * @opmincpu 80386
2914 * @opgroup og_prefixes
2915 */
2916FNIEMOP_DEF(iemOp_seg_FS)
2917{
2918 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2919 IEMOP_HLP_MIN_386();
2920
2921 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2922 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2923
2924 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2925 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2926}
2927
2928
2929/**
2930 * @opcode 0x65
2931 * @opmnemonic seggs
2932 * @opmincpu 80386
2933 * @opgroup og_prefixes
2934 */
2935FNIEMOP_DEF(iemOp_seg_GS)
2936{
2937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2938 IEMOP_HLP_MIN_386();
2939
2940 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2941 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2942
2943 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2944 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2945}
2946
2947
2948/**
2949 * @opcode 0x66
2950 * @opmnemonic opsize
2951 * @openc prefix
2952 * @opmincpu 80386
2953 * @ophints harmless
2954 * @opgroup og_prefixes
2955 */
2956FNIEMOP_DEF(iemOp_op_size)
2957{
2958 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2959 IEMOP_HLP_MIN_386();
2960
2961 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2962 iemRecalEffOpSize(pVCpu);
2963
2964 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2965 when REPZ or REPNZ are present. */
2966 if (pVCpu->iem.s.idxPrefix == 0)
2967 pVCpu->iem.s.idxPrefix = 1;
2968
2969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2971}
2972
2973
2974/**
2975 * @opcode 0x67
2976 * @opmnemonic addrsize
2977 * @openc prefix
2978 * @opmincpu 80386
2979 * @ophints harmless
2980 * @opgroup og_prefixes
2981 */
2982FNIEMOP_DEF(iemOp_addr_size)
2983{
2984 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2985 IEMOP_HLP_MIN_386();
2986
2987 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2988 switch (pVCpu->iem.s.enmDefAddrMode)
2989 {
2990 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2991 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2992 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2993 default: AssertFailed();
2994 }
2995
2996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2997 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2998}
2999
3000
3001/**
3002 * @opcode 0x68
3003 */
3004FNIEMOP_DEF(iemOp_push_Iz)
3005{
3006 IEMOP_MNEMONIC(push_Iz, "push Iz");
3007 IEMOP_HLP_MIN_186();
3008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3009 switch (pVCpu->iem.s.enmEffOpSize)
3010 {
3011 case IEMMODE_16BIT:
3012 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3013 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3016 IEM_MC_PUSH_U16(u16Value);
3017 IEM_MC_ADVANCE_RIP_AND_FINISH();
3018 IEM_MC_END();
3019 break;
3020
3021 case IEMMODE_32BIT:
3022 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3023 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3026 IEM_MC_PUSH_U32(u32Value);
3027 IEM_MC_ADVANCE_RIP_AND_FINISH();
3028 IEM_MC_END();
3029 break;
3030
3031 case IEMMODE_64BIT:
3032 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3033 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3035 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3036 IEM_MC_PUSH_U64(u64Value);
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 break;
3040
3041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3042 }
3043}
3044
3045
3046/**
3047 * @opcode 0x69
3048 * @opflclass multiply
3049 */
3050FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3051{
3052 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3053 IEMOP_HLP_MIN_186();
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3056
3057 switch (pVCpu->iem.s.enmEffOpSize)
3058 {
3059 case IEMMODE_16BIT:
3060 {
3061 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3062 if (IEM_IS_MODRM_REG_MODE(bRm))
3063 {
3064 /* register operand */
3065 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3066 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3068 IEM_MC_LOCAL(uint16_t, u16Tmp);
3069 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3070 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3071 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3073 IEM_MC_REF_EFLAGS(pEFlags);
3074 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3075 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3076
3077 IEM_MC_ADVANCE_RIP_AND_FINISH();
3078 IEM_MC_END();
3079 }
3080 else
3081 {
3082 /* memory operand */
3083 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3086
3087 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089
3090 IEM_MC_LOCAL(uint16_t, u16Tmp);
3091 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3092
3093 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3094 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3095 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3096 IEM_MC_REF_EFLAGS(pEFlags);
3097 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3098 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3099
3100 IEM_MC_ADVANCE_RIP_AND_FINISH();
3101 IEM_MC_END();
3102 }
3103 break;
3104 }
3105
3106 case IEMMODE_32BIT:
3107 {
3108 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3109 if (IEM_IS_MODRM_REG_MODE(bRm))
3110 {
3111 /* register operand */
3112 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3113 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3115 IEM_MC_LOCAL(uint32_t, u32Tmp);
3116 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3117
3118 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3119 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3120 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3121 IEM_MC_REF_EFLAGS(pEFlags);
3122 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3123 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3124
3125 IEM_MC_ADVANCE_RIP_AND_FINISH();
3126 IEM_MC_END();
3127 }
3128 else
3129 {
3130 /* memory operand */
3131 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3134
3135 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137
3138 IEM_MC_LOCAL(uint32_t, u32Tmp);
3139 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3140
3141 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3142 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3144 IEM_MC_REF_EFLAGS(pEFlags);
3145 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3146 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3147
3148 IEM_MC_ADVANCE_RIP_AND_FINISH();
3149 IEM_MC_END();
3150 }
3151 break;
3152 }
3153
3154 case IEMMODE_64BIT:
3155 {
3156 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3157 if (IEM_IS_MODRM_REG_MODE(bRm))
3158 {
3159 /* register operand */
3160 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3161 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_LOCAL(uint64_t, u64Tmp);
3164 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3165
3166 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3167 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3169 IEM_MC_REF_EFLAGS(pEFlags);
3170 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3171 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3172
3173 IEM_MC_ADVANCE_RIP_AND_FINISH();
3174 IEM_MC_END();
3175 }
3176 else
3177 {
3178 /* memory operand */
3179 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3182
3183 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3185
3186 IEM_MC_LOCAL(uint64_t, u64Tmp);
3187 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3188
3189 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3190 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3191 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3192 IEM_MC_REF_EFLAGS(pEFlags);
3193 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3194 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3195
3196 IEM_MC_ADVANCE_RIP_AND_FINISH();
3197 IEM_MC_END();
3198 }
3199 break;
3200 }
3201
3202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3203 }
3204}
3205
3206
3207/**
3208 * @opcode 0x6a
3209 */
3210FNIEMOP_DEF(iemOp_push_Ib)
3211{
3212 IEMOP_MNEMONIC(push_Ib, "push Ib");
3213 IEMOP_HLP_MIN_186();
3214 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3216
3217 switch (pVCpu->iem.s.enmEffOpSize)
3218 {
3219 case IEMMODE_16BIT:
3220 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3222 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3223 IEM_MC_PUSH_U16(uValue);
3224 IEM_MC_ADVANCE_RIP_AND_FINISH();
3225 IEM_MC_END();
3226 break;
3227 case IEMMODE_32BIT:
3228 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3230 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3231 IEM_MC_PUSH_U32(uValue);
3232 IEM_MC_ADVANCE_RIP_AND_FINISH();
3233 IEM_MC_END();
3234 break;
3235 case IEMMODE_64BIT:
3236 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3238 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3239 IEM_MC_PUSH_U64(uValue);
3240 IEM_MC_ADVANCE_RIP_AND_FINISH();
3241 IEM_MC_END();
3242 break;
3243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3244 }
3245}
3246
3247
3248/**
3249 * @opcode 0x6b
3250 * @opflclass multiply
3251 */
3252FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3253{
3254 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3255 IEMOP_HLP_MIN_186();
3256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3258
3259 switch (pVCpu->iem.s.enmEffOpSize)
3260 {
3261 case IEMMODE_16BIT:
3262 {
3263 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3264 if (IEM_IS_MODRM_REG_MODE(bRm))
3265 {
3266 /* register operand */
3267 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3268 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3270
3271 IEM_MC_LOCAL(uint16_t, u16Tmp);
3272 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3273
3274 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3275 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3277 IEM_MC_REF_EFLAGS(pEFlags);
3278 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3279 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3280
3281 IEM_MC_ADVANCE_RIP_AND_FINISH();
3282 IEM_MC_END();
3283 }
3284 else
3285 {
3286 /* memory operand */
3287 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3288
3289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3291
3292 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3294
3295 IEM_MC_LOCAL(uint16_t, u16Tmp);
3296 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3297
3298 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3299 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3300 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3301 IEM_MC_REF_EFLAGS(pEFlags);
3302 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3303 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3304
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 IEM_MC_END();
3307 }
3308 break;
3309 }
3310
3311 case IEMMODE_32BIT:
3312 {
3313 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3314 if (IEM_IS_MODRM_REG_MODE(bRm))
3315 {
3316 /* register operand */
3317 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3318 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3322
3323 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3324 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3326 IEM_MC_REF_EFLAGS(pEFlags);
3327 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3328 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3329
3330 IEM_MC_ADVANCE_RIP_AND_FINISH();
3331 IEM_MC_END();
3332 }
3333 else
3334 {
3335 /* memory operand */
3336 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3339
3340 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3342
3343 IEM_MC_LOCAL(uint32_t, u32Tmp);
3344 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3345
3346 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3347 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3348 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3349 IEM_MC_REF_EFLAGS(pEFlags);
3350 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3351 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3352
3353 IEM_MC_ADVANCE_RIP_AND_FINISH();
3354 IEM_MC_END();
3355 }
3356 break;
3357 }
3358
3359 case IEMMODE_64BIT:
3360 {
3361 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3362 if (IEM_IS_MODRM_REG_MODE(bRm))
3363 {
3364 /* register operand */
3365 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3366 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 IEM_MC_LOCAL(uint64_t, u64Tmp);
3369 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3370
3371 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3372 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3373 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3374 IEM_MC_REF_EFLAGS(pEFlags);
3375 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3376 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3377
3378 IEM_MC_ADVANCE_RIP_AND_FINISH();
3379 IEM_MC_END();
3380 }
3381 else
3382 {
3383 /* memory operand */
3384 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3387
3388 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3393
3394 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3395 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3397 IEM_MC_REF_EFLAGS(pEFlags);
3398 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3399 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3400
3401 IEM_MC_ADVANCE_RIP_AND_FINISH();
3402 IEM_MC_END();
3403 }
3404 break;
3405 }
3406
3407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3408 }
3409}
3410
3411
3412/**
3413 * @opcode 0x6c
3414 * @opfltest iopl,df
3415 */
3416FNIEMOP_DEF(iemOp_insb_Yb_DX)
3417{
3418 IEMOP_HLP_MIN_186();
3419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3420 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3421 {
3422 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3423 switch (pVCpu->iem.s.enmEffAddrMode)
3424 {
3425 case IEMMODE_16BIT:
3426 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3427 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3428 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3429 iemCImpl_rep_ins_op8_addr16, false);
3430 case IEMMODE_32BIT:
3431 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3432 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3433 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3434 iemCImpl_rep_ins_op8_addr32, false);
3435 case IEMMODE_64BIT:
3436 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3437 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3438 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3439 iemCImpl_rep_ins_op8_addr64, false);
3440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3441 }
3442 }
3443 else
3444 {
3445 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3446 switch (pVCpu->iem.s.enmEffAddrMode)
3447 {
3448 case IEMMODE_16BIT:
3449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3450 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3451 iemCImpl_ins_op8_addr16, false);
3452 case IEMMODE_32BIT:
3453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3455 iemCImpl_ins_op8_addr32, false);
3456 case IEMMODE_64BIT:
3457 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3458 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3459 iemCImpl_ins_op8_addr64, false);
3460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3461 }
3462 }
3463}
3464
3465
3466/**
3467 * @opcode 0x6d
3468 * @opfltest iopl,df
3469 */
3470FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3471{
3472 IEMOP_HLP_MIN_186();
3473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3474 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3475 {
3476 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3477 switch (pVCpu->iem.s.enmEffOpSize)
3478 {
3479 case IEMMODE_16BIT:
3480 switch (pVCpu->iem.s.enmEffAddrMode)
3481 {
3482 case IEMMODE_16BIT:
3483 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3485 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3486 iemCImpl_rep_ins_op16_addr16, false);
3487 case IEMMODE_32BIT:
3488 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3489 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3491 iemCImpl_rep_ins_op16_addr32, false);
3492 case IEMMODE_64BIT:
3493 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3494 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3495 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3496 iemCImpl_rep_ins_op16_addr64, false);
3497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3498 }
3499 break;
3500 case IEMMODE_64BIT:
3501 case IEMMODE_32BIT:
3502 switch (pVCpu->iem.s.enmEffAddrMode)
3503 {
3504 case IEMMODE_16BIT:
3505 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3506 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3507 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3508 iemCImpl_rep_ins_op32_addr16, false);
3509 case IEMMODE_32BIT:
3510 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3513 iemCImpl_rep_ins_op32_addr32, false);
3514 case IEMMODE_64BIT:
3515 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3518 iemCImpl_rep_ins_op32_addr64, false);
3519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3520 }
3521 break;
3522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3523 }
3524 }
3525 else
3526 {
3527 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3528 switch (pVCpu->iem.s.enmEffOpSize)
3529 {
3530 case IEMMODE_16BIT:
3531 switch (pVCpu->iem.s.enmEffAddrMode)
3532 {
3533 case IEMMODE_16BIT:
3534 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3535 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3536 iemCImpl_ins_op16_addr16, false);
3537 case IEMMODE_32BIT:
3538 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3539 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3540 iemCImpl_ins_op16_addr32, false);
3541 case IEMMODE_64BIT:
3542 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3543 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3544 iemCImpl_ins_op16_addr64, false);
3545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3546 }
3547 break;
3548 case IEMMODE_64BIT:
3549 case IEMMODE_32BIT:
3550 switch (pVCpu->iem.s.enmEffAddrMode)
3551 {
3552 case IEMMODE_16BIT:
3553 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3554 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3555 iemCImpl_ins_op32_addr16, false);
3556 case IEMMODE_32BIT:
3557 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3559 iemCImpl_ins_op32_addr32, false);
3560 case IEMMODE_64BIT:
3561 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3562 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3563 iemCImpl_ins_op32_addr64, false);
3564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3565 }
3566 break;
3567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3568 }
3569 }
3570}
3571
3572
3573/**
3574 * @opcode 0x6e
3575 * @opfltest iopl,df
3576 */
3577FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3578{
3579 IEMOP_HLP_MIN_186();
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3582 {
3583 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3584 switch (pVCpu->iem.s.enmEffAddrMode)
3585 {
3586 case IEMMODE_16BIT:
3587 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3588 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3589 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3590 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3591 case IEMMODE_32BIT:
3592 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3593 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3594 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3595 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3596 case IEMMODE_64BIT:
3597 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3598 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3599 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3600 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3602 }
3603 }
3604 else
3605 {
3606 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3607 switch (pVCpu->iem.s.enmEffAddrMode)
3608 {
3609 case IEMMODE_16BIT:
3610 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3612 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3613 case IEMMODE_32BIT:
3614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3616 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3617 case IEMMODE_64BIT:
3618 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3619 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3620 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3622 }
3623 }
3624}
3625
3626
3627/**
3628 * @opcode 0x6f
3629 * @opfltest iopl,df
3630 */
3631FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3632{
3633 IEMOP_HLP_MIN_186();
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3636 {
3637 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3638 switch (pVCpu->iem.s.enmEffOpSize)
3639 {
3640 case IEMMODE_16BIT:
3641 switch (pVCpu->iem.s.enmEffAddrMode)
3642 {
3643 case IEMMODE_16BIT:
3644 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3645 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3647 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3648 case IEMMODE_32BIT:
3649 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3650 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3652 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3653 case IEMMODE_64BIT:
3654 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3657 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3659 }
3660 break;
3661 case IEMMODE_64BIT:
3662 case IEMMODE_32BIT:
3663 switch (pVCpu->iem.s.enmEffAddrMode)
3664 {
3665 case IEMMODE_16BIT:
3666 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3667 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3668 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3669 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3670 case IEMMODE_32BIT:
3671 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3672 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3673 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3674 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3675 case IEMMODE_64BIT:
3676 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3677 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3679 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3681 }
3682 break;
3683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3684 }
3685 }
3686 else
3687 {
3688 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3689 switch (pVCpu->iem.s.enmEffOpSize)
3690 {
3691 case IEMMODE_16BIT:
3692 switch (pVCpu->iem.s.enmEffAddrMode)
3693 {
3694 case IEMMODE_16BIT:
3695 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3696 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3697 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3698 case IEMMODE_32BIT:
3699 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3700 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3701 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3702 case IEMMODE_64BIT:
3703 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3704 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3705 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3707 }
3708 break;
3709 case IEMMODE_64BIT:
3710 case IEMMODE_32BIT:
3711 switch (pVCpu->iem.s.enmEffAddrMode)
3712 {
3713 case IEMMODE_16BIT:
3714 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3715 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3716 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3717 case IEMMODE_32BIT:
3718 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3719 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3720 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3721 case IEMMODE_64BIT:
3722 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3723 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3724 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3726 }
3727 break;
3728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3729 }
3730 }
3731}
3732
3733
3734/**
3735 * @opcode 0x70
3736 * @opfltest of
3737 */
3738FNIEMOP_DEF(iemOp_jo_Jb)
3739{
3740 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3741 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3743
3744 IEM_MC_BEGIN(0, 0, 0, 0);
3745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3747 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3748 } IEM_MC_ELSE() {
3749 IEM_MC_ADVANCE_RIP_AND_FINISH();
3750 } IEM_MC_ENDIF();
3751 IEM_MC_END();
3752}
3753
3754
3755/**
3756 * @opcode 0x71
3757 * @opfltest of
3758 */
3759FNIEMOP_DEF(iemOp_jno_Jb)
3760{
3761 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3762 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3764
3765 IEM_MC_BEGIN(0, 0, 0, 0);
3766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3768 IEM_MC_ADVANCE_RIP_AND_FINISH();
3769 } IEM_MC_ELSE() {
3770 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3771 } IEM_MC_ENDIF();
3772 IEM_MC_END();
3773}
3774
3775/**
3776 * @opcode 0x72
3777 * @opfltest cf
3778 */
3779FNIEMOP_DEF(iemOp_jc_Jb)
3780{
3781 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3782 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3783 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3784
3785 IEM_MC_BEGIN(0, 0, 0, 0);
3786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3788 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3789 } IEM_MC_ELSE() {
3790 IEM_MC_ADVANCE_RIP_AND_FINISH();
3791 } IEM_MC_ENDIF();
3792 IEM_MC_END();
3793}
3794
3795
3796/**
3797 * @opcode 0x73
3798 * @opfltest cf
3799 */
3800FNIEMOP_DEF(iemOp_jnc_Jb)
3801{
3802 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3803 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3805
3806 IEM_MC_BEGIN(0, 0, 0, 0);
3807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3809 IEM_MC_ADVANCE_RIP_AND_FINISH();
3810 } IEM_MC_ELSE() {
3811 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3812 } IEM_MC_ENDIF();
3813 IEM_MC_END();
3814}
3815
3816
3817/**
3818 * @opcode 0x74
3819 * @opfltest zf
3820 */
3821FNIEMOP_DEF(iemOp_je_Jb)
3822{
3823 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3824 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3826
3827 IEM_MC_BEGIN(0, 0, 0, 0);
3828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3830 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3831 } IEM_MC_ELSE() {
3832 IEM_MC_ADVANCE_RIP_AND_FINISH();
3833 } IEM_MC_ENDIF();
3834 IEM_MC_END();
3835}
3836
3837
3838/**
3839 * @opcode 0x75
3840 * @opfltest zf
3841 */
3842FNIEMOP_DEF(iemOp_jne_Jb)
3843{
3844 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3845 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3847
3848 IEM_MC_BEGIN(0, 0, 0, 0);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3851 IEM_MC_ADVANCE_RIP_AND_FINISH();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856}
3857
3858
3859/**
3860 * @opcode 0x76
3861 * @opfltest cf,zf
3862 */
3863FNIEMOP_DEF(iemOp_jbe_Jb)
3864{
3865 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3866 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3868
3869 IEM_MC_BEGIN(0, 0, 0, 0);
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3871 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3872 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3873 } IEM_MC_ELSE() {
3874 IEM_MC_ADVANCE_RIP_AND_FINISH();
3875 } IEM_MC_ENDIF();
3876 IEM_MC_END();
3877}
3878
3879
3880/**
3881 * @opcode 0x77
3882 * @opfltest cf,zf
3883 */
3884FNIEMOP_DEF(iemOp_jnbe_Jb)
3885{
3886 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3887 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3888 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3889
3890 IEM_MC_BEGIN(0, 0, 0, 0);
3891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3892 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3893 IEM_MC_ADVANCE_RIP_AND_FINISH();
3894 } IEM_MC_ELSE() {
3895 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3896 } IEM_MC_ENDIF();
3897 IEM_MC_END();
3898}
3899
3900
3901/**
3902 * @opcode 0x78
3903 * @opfltest sf
3904 */
3905FNIEMOP_DEF(iemOp_js_Jb)
3906{
3907 IEMOP_MNEMONIC(js_Jb, "js Jb");
3908 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3909 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3910
3911 IEM_MC_BEGIN(0, 0, 0, 0);
3912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3914 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3915 } IEM_MC_ELSE() {
3916 IEM_MC_ADVANCE_RIP_AND_FINISH();
3917 } IEM_MC_ENDIF();
3918 IEM_MC_END();
3919}
3920
3921
3922/**
3923 * @opcode 0x79
3924 * @opfltest sf
3925 */
3926FNIEMOP_DEF(iemOp_jns_Jb)
3927{
3928 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3929 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3931
3932 IEM_MC_BEGIN(0, 0, 0, 0);
3933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3935 IEM_MC_ADVANCE_RIP_AND_FINISH();
3936 } IEM_MC_ELSE() {
3937 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3938 } IEM_MC_ENDIF();
3939 IEM_MC_END();
3940}
3941
3942
3943/**
3944 * @opcode 0x7a
3945 * @opfltest pf
3946 */
3947FNIEMOP_DEF(iemOp_jp_Jb)
3948{
3949 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3950 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3952
3953 IEM_MC_BEGIN(0, 0, 0, 0);
3954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3956 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3957 } IEM_MC_ELSE() {
3958 IEM_MC_ADVANCE_RIP_AND_FINISH();
3959 } IEM_MC_ENDIF();
3960 IEM_MC_END();
3961}
3962
3963
3964/**
3965 * @opcode 0x7b
3966 * @opfltest pf
3967 */
3968FNIEMOP_DEF(iemOp_jnp_Jb)
3969{
3970 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3971 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3972 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3973
3974 IEM_MC_BEGIN(0, 0, 0, 0);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3977 IEM_MC_ADVANCE_RIP_AND_FINISH();
3978 } IEM_MC_ELSE() {
3979 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3980 } IEM_MC_ENDIF();
3981 IEM_MC_END();
3982}
3983
3984
3985/**
3986 * @opcode 0x7c
3987 * @opfltest sf,of
3988 */
3989FNIEMOP_DEF(iemOp_jl_Jb)
3990{
3991 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3992 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3993 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3994
3995 IEM_MC_BEGIN(0, 0, 0, 0);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3998 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3999 } IEM_MC_ELSE() {
4000 IEM_MC_ADVANCE_RIP_AND_FINISH();
4001 } IEM_MC_ENDIF();
4002 IEM_MC_END();
4003}
4004
4005
4006/**
4007 * @opcode 0x7d
4008 * @opfltest sf,of
4009 */
4010FNIEMOP_DEF(iemOp_jnl_Jb)
4011{
4012 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4013 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4014 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4015
4016 IEM_MC_BEGIN(0, 0, 0, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4019 IEM_MC_ADVANCE_RIP_AND_FINISH();
4020 } IEM_MC_ELSE() {
4021 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4022 } IEM_MC_ENDIF();
4023 IEM_MC_END();
4024}
4025
4026
4027/**
4028 * @opcode 0x7e
4029 * @opfltest zf,sf,of
4030 */
4031FNIEMOP_DEF(iemOp_jle_Jb)
4032{
4033 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4034 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4036
4037 IEM_MC_BEGIN(0, 0, 0, 0);
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4039 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4040 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4041 } IEM_MC_ELSE() {
4042 IEM_MC_ADVANCE_RIP_AND_FINISH();
4043 } IEM_MC_ENDIF();
4044 IEM_MC_END();
4045}
4046
4047
4048/**
4049 * @opcode 0x7f
4050 * @opfltest zf,sf,of
4051 */
4052FNIEMOP_DEF(iemOp_jnle_Jb)
4053{
4054 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4055 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4056 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4057
4058 IEM_MC_BEGIN(0, 0, 0, 0);
4059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4060 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4061 IEM_MC_ADVANCE_RIP_AND_FINISH();
4062 } IEM_MC_ELSE() {
4063 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4064 } IEM_MC_ENDIF();
4065 IEM_MC_END();
4066}
4067
4068
4069/**
4070 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4071 * iemOp_Grp1_Eb_Ib_80.
4072 */
4073#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
4074 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4075 { \
4076 /* register target */ \
4077 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4078 IEM_MC_BEGIN(3, 0, 0, 0); \
4079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4080 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4081 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4082 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4083 \
4084 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4085 IEM_MC_REF_EFLAGS(pEFlags); \
4086 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4087 \
4088 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4089 IEM_MC_END(); \
4090 } \
4091 else \
4092 { \
4093 /* memory target */ \
4094 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4095 { \
4096 IEM_MC_BEGIN(3, 3, 0, 0); \
4097 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4100 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4101 \
4102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4103 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4104 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4105 IEMOP_HLP_DONE_DECODING(); \
4106 \
4107 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4108 IEM_MC_FETCH_EFLAGS(EFlags); \
4109 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4110 \
4111 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4112 IEM_MC_COMMIT_EFLAGS(EFlags); \
4113 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4114 IEM_MC_END(); \
4115 } \
4116 else \
4117 { \
4118 (void)0
4119
4120#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4121 IEM_MC_BEGIN(3, 3, 0, 0); \
4122 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4125 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4126 \
4127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4129 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4130 IEMOP_HLP_DONE_DECODING(); \
4131 \
4132 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4133 IEM_MC_FETCH_EFLAGS(EFlags); \
4134 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4135 \
4136 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4137 IEM_MC_COMMIT_EFLAGS(EFlags); \
4138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4139 IEM_MC_END(); \
4140 } \
4141 } \
4142 (void)0
4143
4144#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4145 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4146 { \
4147 /* register target */ \
4148 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4149 IEM_MC_BEGIN(3, 0, 0, 0); \
4150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4151 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4152 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4153 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4154 \
4155 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4156 IEM_MC_REF_EFLAGS(pEFlags); \
4157 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4158 \
4159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4160 IEM_MC_END(); \
4161 } \
4162 else \
4163 { \
4164 /* memory target */ \
4165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4166 { \
4167 IEM_MC_BEGIN(3, 3, 0, 0); \
4168 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4169 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4171 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4172 \
4173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4174 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4175 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4176 IEMOP_HLP_DONE_DECODING(); \
4177 \
4178 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4179 IEM_MC_FETCH_EFLAGS(EFlags); \
4180 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4181 \
4182 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4183 IEM_MC_COMMIT_EFLAGS(EFlags); \
4184 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4185 IEM_MC_END(); \
4186 } \
4187 else \
4188 { \
4189 (void)0
4190
4191#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4192 IEMOP_HLP_DONE_DECODING(); \
4193 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4194 } \
4195 } \
4196 (void)0
4197
4198
4199
4200/**
4201 * @opmaps grp1_80,grp1_83
4202 * @opcode /0
4203 * @opflclass arithmetic
4204 */
4205FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4206{
4207 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4208 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4209 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4210}
4211
4212
4213/**
4214 * @opmaps grp1_80,grp1_83
4215 * @opcode /1
4216 * @opflclass logical
4217 */
4218FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4219{
4220 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4221 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4222 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4223}
4224
4225
4226/**
4227 * @opmaps grp1_80,grp1_83
4228 * @opcode /2
4229 * @opflclass arithmetic_carry
4230 */
4231FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4232{
4233 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4234 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4235 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4236}
4237
4238
4239/**
4240 * @opmaps grp1_80,grp1_83
4241 * @opcode /3
4242 * @opflclass arithmetic_carry
4243 */
4244FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4245{
4246 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4247 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4248 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4249}
4250
4251
4252/**
4253 * @opmaps grp1_80,grp1_83
4254 * @opcode /4
4255 * @opflclass logical
4256 */
4257FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4258{
4259 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4260 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4261 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4262}
4263
4264
4265/**
4266 * @opmaps grp1_80,grp1_83
4267 * @opcode /5
4268 * @opflclass arithmetic
4269 */
4270FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4271{
4272 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4273 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4274 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4275}
4276
4277
4278/**
4279 * @opmaps grp1_80,grp1_83
4280 * @opcode /6
4281 * @opflclass logical
4282 */
4283FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4284{
4285 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4286 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4287 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4288}
4289
4290
4291/**
4292 * @opmaps grp1_80,grp1_83
4293 * @opcode /7
4294 * @opflclass arithmetic
4295 */
4296FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4297{
4298 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4299 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4300 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4301}
4302
4303
4304/**
4305 * @opcode 0x80
4306 */
4307FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4308{
4309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4310 switch (IEM_GET_MODRM_REG_8(bRm))
4311 {
4312 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4313 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4314 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4315 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4316 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4317 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4318 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4319 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4321 }
4322}
4323
4324
4325/**
4326 * Body for a group 1 binary operator.
4327 */
4328#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4329 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4330 { \
4331 /* register target */ \
4332 switch (pVCpu->iem.s.enmEffOpSize) \
4333 { \
4334 case IEMMODE_16BIT: \
4335 { \
4336 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4337 IEM_MC_BEGIN(3, 0, 0, 0); \
4338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4339 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4340 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4341 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4342 \
4343 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4344 IEM_MC_REF_EFLAGS(pEFlags); \
4345 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4346 \
4347 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4348 IEM_MC_END(); \
4349 break; \
4350 } \
4351 \
4352 case IEMMODE_32BIT: \
4353 { \
4354 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4355 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4357 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4358 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4359 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4360 \
4361 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4362 IEM_MC_REF_EFLAGS(pEFlags); \
4363 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4364 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4365 \
4366 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4367 IEM_MC_END(); \
4368 break; \
4369 } \
4370 \
4371 case IEMMODE_64BIT: \
4372 { \
4373 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4374 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4376 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4377 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4378 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4379 \
4380 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4381 IEM_MC_REF_EFLAGS(pEFlags); \
4382 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4383 \
4384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4385 IEM_MC_END(); \
4386 break; \
4387 } \
4388 \
4389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4390 } \
4391 } \
4392 else \
4393 { \
4394 /* memory target */ \
4395 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4396 { \
4397 switch (pVCpu->iem.s.enmEffOpSize) \
4398 { \
4399 case IEMMODE_16BIT: \
4400 { \
4401 IEM_MC_BEGIN(3, 3, 0, 0); \
4402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4404 \
4405 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4406 IEMOP_HLP_DONE_DECODING(); \
4407 \
4408 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4409 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4410 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4411 \
4412 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4413 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4414 IEM_MC_FETCH_EFLAGS(EFlags); \
4415 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4416 \
4417 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4418 IEM_MC_COMMIT_EFLAGS(EFlags); \
4419 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4420 IEM_MC_END(); \
4421 break; \
4422 } \
4423 \
4424 case IEMMODE_32BIT: \
4425 { \
4426 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4429 \
4430 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4431 IEMOP_HLP_DONE_DECODING(); \
4432 \
4433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4434 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4435 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4436 \
4437 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4438 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4439 IEM_MC_FETCH_EFLAGS(EFlags); \
4440 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4441 \
4442 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4443 IEM_MC_COMMIT_EFLAGS(EFlags); \
4444 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4445 IEM_MC_END(); \
4446 break; \
4447 } \
4448 \
4449 case IEMMODE_64BIT: \
4450 { \
4451 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4452 \
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4455 \
4456 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4457 IEMOP_HLP_DONE_DECODING(); \
4458 \
4459 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4460 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4462 \
4463 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4464 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4465 IEM_MC_FETCH_EFLAGS(EFlags); \
4466 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4467 \
4468 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4469 IEM_MC_COMMIT_EFLAGS(EFlags); \
4470 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4471 IEM_MC_END(); \
4472 break; \
4473 } \
4474 \
4475 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4476 } \
4477 } \
4478 else \
4479 { \
4480 (void)0
4481/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4482#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4483 switch (pVCpu->iem.s.enmEffOpSize) \
4484 { \
4485 case IEMMODE_16BIT: \
4486 { \
4487 IEM_MC_BEGIN(3, 3, 0, 0); \
4488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4490 \
4491 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4492 IEMOP_HLP_DONE_DECODING(); \
4493 \
4494 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4496 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4497 \
4498 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4499 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4500 IEM_MC_FETCH_EFLAGS(EFlags); \
4501 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4502 \
4503 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4504 IEM_MC_COMMIT_EFLAGS(EFlags); \
4505 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4506 IEM_MC_END(); \
4507 break; \
4508 } \
4509 \
4510 case IEMMODE_32BIT: \
4511 { \
4512 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4515 \
4516 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4517 IEMOP_HLP_DONE_DECODING(); \
4518 \
4519 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4520 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4521 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4522 \
4523 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4524 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4525 IEM_MC_FETCH_EFLAGS(EFlags); \
4526 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4527 \
4528 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4529 IEM_MC_COMMIT_EFLAGS(EFlags); \
4530 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4531 IEM_MC_END(); \
4532 break; \
4533 } \
4534 \
4535 case IEMMODE_64BIT: \
4536 { \
4537 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4540 \
4541 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4542 IEMOP_HLP_DONE_DECODING(); \
4543 \
4544 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4545 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4546 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4547 \
4548 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4549 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4550 IEM_MC_FETCH_EFLAGS(EFlags); \
4551 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4552 \
4553 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4554 IEM_MC_COMMIT_EFLAGS(EFlags); \
4555 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4556 IEM_MC_END(); \
4557 break; \
4558 } \
4559 \
4560 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4561 } \
4562 } \
4563 } \
4564 (void)0
4565
4566/* read-only version */
4567#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4568 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4569 { \
4570 /* register target */ \
4571 switch (pVCpu->iem.s.enmEffOpSize) \
4572 { \
4573 case IEMMODE_16BIT: \
4574 { \
4575 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4576 IEM_MC_BEGIN(3, 0, 0, 0); \
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4578 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4579 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4580 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4581 \
4582 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4583 IEM_MC_REF_EFLAGS(pEFlags); \
4584 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4585 \
4586 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4587 IEM_MC_END(); \
4588 break; \
4589 } \
4590 \
4591 case IEMMODE_32BIT: \
4592 { \
4593 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4594 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4596 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4597 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4598 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4599 \
4600 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4601 IEM_MC_REF_EFLAGS(pEFlags); \
4602 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4603 \
4604 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4605 IEM_MC_END(); \
4606 break; \
4607 } \
4608 \
4609 case IEMMODE_64BIT: \
4610 { \
4611 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4612 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4614 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4615 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4616 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4617 \
4618 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4619 IEM_MC_REF_EFLAGS(pEFlags); \
4620 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4621 \
4622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4623 IEM_MC_END(); \
4624 break; \
4625 } \
4626 \
4627 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4628 } \
4629 } \
4630 else \
4631 { \
4632 /* memory target */ \
4633 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4634 { \
4635 switch (pVCpu->iem.s.enmEffOpSize) \
4636 { \
4637 case IEMMODE_16BIT: \
4638 { \
4639 IEM_MC_BEGIN(3, 3, 0, 0); \
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4642 \
4643 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4644 IEMOP_HLP_DONE_DECODING(); \
4645 \
4646 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4647 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4648 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4649 \
4650 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4651 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4652 IEM_MC_FETCH_EFLAGS(EFlags); \
4653 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4654 \
4655 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4656 IEM_MC_COMMIT_EFLAGS(EFlags); \
4657 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4658 IEM_MC_END(); \
4659 break; \
4660 } \
4661 \
4662 case IEMMODE_32BIT: \
4663 { \
4664 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4667 \
4668 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4669 IEMOP_HLP_DONE_DECODING(); \
4670 \
4671 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4672 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4673 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4674 \
4675 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4676 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4677 IEM_MC_FETCH_EFLAGS(EFlags); \
4678 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4679 \
4680 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4681 IEM_MC_COMMIT_EFLAGS(EFlags); \
4682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4683 IEM_MC_END(); \
4684 break; \
4685 } \
4686 \
4687 case IEMMODE_64BIT: \
4688 { \
4689 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4692 \
4693 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4694 IEMOP_HLP_DONE_DECODING(); \
4695 \
4696 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4697 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4698 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4699 \
4700 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4701 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4702 IEM_MC_FETCH_EFLAGS(EFlags); \
4703 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4704 \
4705 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4706 IEM_MC_COMMIT_EFLAGS(EFlags); \
4707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4708 IEM_MC_END(); \
4709 break; \
4710 } \
4711 \
4712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4713 } \
4714 } \
4715 else \
4716 { \
4717 IEMOP_HLP_DONE_DECODING(); \
4718 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4719 } \
4720 } \
4721 (void)0
4722
4723
4724/**
4725 * @opmaps grp1_81
4726 * @opcode /0
4727 * @opflclass arithmetic
4728 */
4729FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4730{
4731 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4732 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4733 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4734}
4735
4736
4737/**
4738 * @opmaps grp1_81
4739 * @opcode /1
4740 * @opflclass logical
4741 */
4742FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4743{
4744 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4745 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4746 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4747}
4748
4749
4750/**
4751 * @opmaps grp1_81
4752 * @opcode /2
4753 * @opflclass arithmetic_carry
4754 */
4755FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4756{
4757 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4758 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4759 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4760}
4761
4762
4763/**
4764 * @opmaps grp1_81
4765 * @opcode /3
4766 * @opflclass arithmetic_carry
4767 */
4768FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4769{
4770 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4771 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4772 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4773}
4774
4775
4776/**
4777 * @opmaps grp1_81
4778 * @opcode /4
4779 * @opflclass logical
4780 */
4781FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4782{
4783 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4784 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4785 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4786}
4787
4788
4789/**
4790 * @opmaps grp1_81
4791 * @opcode /5
4792 * @opflclass arithmetic
4793 */
4794FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4795{
4796 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4797 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4798 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4799}
4800
4801
4802/**
4803 * @opmaps grp1_81
4804 * @opcode /6
4805 * @opflclass logical
4806 */
4807FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4808{
4809 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4810 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4811 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4812}
4813
4814
4815/**
4816 * @opmaps grp1_81
4817 * @opcode /7
4818 * @opflclass arithmetic
4819 */
4820FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4821{
4822 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4823 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4824}
4825
4826
4827/**
4828 * @opcode 0x81
4829 */
4830FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4831{
4832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4833 switch (IEM_GET_MODRM_REG_8(bRm))
4834 {
4835 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4836 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4837 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4838 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4839 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4840 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4841 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4842 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4844 }
4845}
4846
4847
4848/**
4849 * @opcode 0x82
4850 * @opmnemonic grp1_82
4851 * @opgroup og_groups
4852 */
4853FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4854{
4855 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4856 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4857}
4858
4859
4860/**
4861 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4862 * iemOp_Grp1_Ev_Ib.
4863 */
4864#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4865 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4866 { \
4867 /* \
4868 * Register target \
4869 */ \
4870 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4871 switch (pVCpu->iem.s.enmEffOpSize) \
4872 { \
4873 case IEMMODE_16BIT: \
4874 IEM_MC_BEGIN(3, 0, 0, 0); \
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4876 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4877 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4878 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4879 \
4880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4881 IEM_MC_REF_EFLAGS(pEFlags); \
4882 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4883 \
4884 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4885 IEM_MC_END(); \
4886 break; \
4887 \
4888 case IEMMODE_32BIT: \
4889 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4891 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4892 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4893 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4894 \
4895 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4896 IEM_MC_REF_EFLAGS(pEFlags); \
4897 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4898 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4899 \
4900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4901 IEM_MC_END(); \
4902 break; \
4903 \
4904 case IEMMODE_64BIT: \
4905 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4907 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4908 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4909 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4910 \
4911 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4912 IEM_MC_REF_EFLAGS(pEFlags); \
4913 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4914 \
4915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4916 IEM_MC_END(); \
4917 break; \
4918 \
4919 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4920 } \
4921 } \
4922 else \
4923 { \
4924 /* \
4925 * Memory target. \
4926 */ \
4927 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4928 { \
4929 switch (pVCpu->iem.s.enmEffOpSize) \
4930 { \
4931 case IEMMODE_16BIT: \
4932 IEM_MC_BEGIN(3, 3, 0, 0); \
4933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4935 \
4936 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4937 IEMOP_HLP_DONE_DECODING(); \
4938 \
4939 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4940 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4941 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4942 \
4943 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4945 IEM_MC_FETCH_EFLAGS(EFlags); \
4946 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4947 \
4948 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4949 IEM_MC_COMMIT_EFLAGS(EFlags); \
4950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4951 IEM_MC_END(); \
4952 break; \
4953 \
4954 case IEMMODE_32BIT: \
4955 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4958 \
4959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4960 IEMOP_HLP_DONE_DECODING(); \
4961 \
4962 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4963 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4964 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4965 \
4966 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4967 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4968 IEM_MC_FETCH_EFLAGS(EFlags); \
4969 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4970 \
4971 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4972 IEM_MC_COMMIT_EFLAGS(EFlags); \
4973 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4974 IEM_MC_END(); \
4975 break; \
4976 \
4977 case IEMMODE_64BIT: \
4978 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4981 \
4982 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4983 IEMOP_HLP_DONE_DECODING(); \
4984 \
4985 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4986 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4987 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4988 \
4989 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4990 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4991 IEM_MC_FETCH_EFLAGS(EFlags); \
4992 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4993 \
4994 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4995 IEM_MC_COMMIT_EFLAGS(EFlags); \
4996 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4997 IEM_MC_END(); \
4998 break; \
4999 \
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5001 } \
5002 } \
5003 else \
5004 { \
5005 (void)0
5006/* Separate macro to work around parsing issue in IEMAllInstPython.py */
5007#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
5008 switch (pVCpu->iem.s.enmEffOpSize) \
5009 { \
5010 case IEMMODE_16BIT: \
5011 IEM_MC_BEGIN(3, 3, 0, 0); \
5012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5014 \
5015 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5016 IEMOP_HLP_DONE_DECODING(); \
5017 \
5018 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5019 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5020 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5021 \
5022 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5023 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5024 IEM_MC_FETCH_EFLAGS(EFlags); \
5025 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
5026 \
5027 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5028 IEM_MC_COMMIT_EFLAGS(EFlags); \
5029 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5030 IEM_MC_END(); \
5031 break; \
5032 \
5033 case IEMMODE_32BIT: \
5034 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5037 \
5038 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5039 IEMOP_HLP_DONE_DECODING(); \
5040 \
5041 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5042 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5043 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5044 \
5045 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5046 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5047 IEM_MC_FETCH_EFLAGS(EFlags); \
5048 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
5049 \
5050 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5051 IEM_MC_COMMIT_EFLAGS(EFlags); \
5052 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5053 IEM_MC_END(); \
5054 break; \
5055 \
5056 case IEMMODE_64BIT: \
5057 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5060 \
5061 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5062 IEMOP_HLP_DONE_DECODING(); \
5063 \
5064 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5065 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5066 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5067 \
5068 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5070 IEM_MC_FETCH_EFLAGS(EFlags); \
5071 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
5072 \
5073 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5074 IEM_MC_COMMIT_EFLAGS(EFlags); \
5075 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5076 IEM_MC_END(); \
5077 break; \
5078 \
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5080 } \
5081 } \
5082 } \
5083 (void)0
5084
5085/* read-only variant */
5086#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
5087 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5088 { \
5089 /* \
5090 * Register target \
5091 */ \
5092 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5093 switch (pVCpu->iem.s.enmEffOpSize) \
5094 { \
5095 case IEMMODE_16BIT: \
5096 IEM_MC_BEGIN(3, 0, 0, 0); \
5097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5098 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5099 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5100 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5101 \
5102 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5103 IEM_MC_REF_EFLAGS(pEFlags); \
5104 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5105 \
5106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5107 IEM_MC_END(); \
5108 break; \
5109 \
5110 case IEMMODE_32BIT: \
5111 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5113 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5114 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5115 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5116 \
5117 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5118 IEM_MC_REF_EFLAGS(pEFlags); \
5119 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5120 \
5121 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5122 IEM_MC_END(); \
5123 break; \
5124 \
5125 case IEMMODE_64BIT: \
5126 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5128 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5129 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5130 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5131 \
5132 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5133 IEM_MC_REF_EFLAGS(pEFlags); \
5134 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5135 \
5136 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5137 IEM_MC_END(); \
5138 break; \
5139 \
5140 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5141 } \
5142 } \
5143 else \
5144 { \
5145 /* \
5146 * Memory target. \
5147 */ \
5148 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5149 { \
5150 switch (pVCpu->iem.s.enmEffOpSize) \
5151 { \
5152 case IEMMODE_16BIT: \
5153 IEM_MC_BEGIN(3, 3, 0, 0); \
5154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5156 \
5157 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5158 IEMOP_HLP_DONE_DECODING(); \
5159 \
5160 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5161 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5162 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5163 \
5164 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5165 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5166 IEM_MC_FETCH_EFLAGS(EFlags); \
5167 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5168 \
5169 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5170 IEM_MC_COMMIT_EFLAGS(EFlags); \
5171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5172 IEM_MC_END(); \
5173 break; \
5174 \
5175 case IEMMODE_32BIT: \
5176 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5179 \
5180 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5181 IEMOP_HLP_DONE_DECODING(); \
5182 \
5183 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5184 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5185 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5186 \
5187 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5188 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5189 IEM_MC_FETCH_EFLAGS(EFlags); \
5190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5191 \
5192 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5193 IEM_MC_COMMIT_EFLAGS(EFlags); \
5194 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5195 IEM_MC_END(); \
5196 break; \
5197 \
5198 case IEMMODE_64BIT: \
5199 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5202 \
5203 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5204 IEMOP_HLP_DONE_DECODING(); \
5205 \
5206 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5207 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5208 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5209 \
5210 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5211 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5212 IEM_MC_FETCH_EFLAGS(EFlags); \
5213 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5214 \
5215 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5216 IEM_MC_COMMIT_EFLAGS(EFlags); \
5217 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5218 IEM_MC_END(); \
5219 break; \
5220 \
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5222 } \
5223 } \
5224 else \
5225 { \
5226 IEMOP_HLP_DONE_DECODING(); \
5227 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5228 } \
5229 } \
5230 (void)0
5231
5232/**
5233 * @opmaps grp1_83
5234 * @opcode /0
5235 * @opflclass arithmetic
5236 */
5237FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5238{
5239 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5240 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5241 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5242}
5243
5244
5245/**
5246 * @opmaps grp1_83
5247 * @opcode /1
5248 * @opflclass logical
5249 */
5250FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5251{
5252 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5253 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5254 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5255}
5256
5257
5258/**
5259 * @opmaps grp1_83
5260 * @opcode /2
5261 * @opflclass arithmetic_carry
5262 */
5263FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5264{
5265 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5266 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5267 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5268}
5269
5270
5271/**
5272 * @opmaps grp1_83
5273 * @opcode /3
5274 * @opflclass arithmetic_carry
5275 */
5276FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5277{
5278 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5279 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5280 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5281}
5282
5283
5284/**
5285 * @opmaps grp1_83
5286 * @opcode /4
5287 * @opflclass logical
5288 */
5289FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5290{
5291 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5292 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5293 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5294}
5295
5296
5297/**
5298 * @opmaps grp1_83
5299 * @opcode /5
5300 * @opflclass arithmetic
5301 */
5302FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5303{
5304 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5305 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5306 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5307}
5308
5309
5310/**
5311 * @opmaps grp1_83
5312 * @opcode /6
5313 * @opflclass logical
5314 */
5315FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5316{
5317 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5318 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5319 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5320}
5321
5322
5323/**
5324 * @opmaps grp1_83
5325 * @opcode /7
5326 * @opflclass arithmetic
5327 */
5328FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5329{
5330 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5331 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5332}
5333
5334
5335/**
5336 * @opcode 0x83
5337 */
5338FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5339{
5340 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5341 to the 386 even if absent in the intel reference manuals and some
5342 3rd party opcode listings. */
5343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5344 switch (IEM_GET_MODRM_REG_8(bRm))
5345 {
5346 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5347 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5348 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5349 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5350 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5351 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5352 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5353 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5355 }
5356}
5357
5358
5359/**
5360 * @opcode 0x84
5361 * @opflclass logical
5362 */
5363FNIEMOP_DEF(iemOp_test_Eb_Gb)
5364{
5365 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5367 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5368}
5369
5370
5371/**
5372 * @opcode 0x85
5373 * @opflclass logical
5374 */
5375FNIEMOP_DEF(iemOp_test_Ev_Gv)
5376{
5377 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5379 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5380}
5381
5382
5383/**
5384 * @opcode 0x86
5385 */
5386FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5387{
5388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5389 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5390
5391 /*
5392 * If rm is denoting a register, no more instruction bytes.
5393 */
5394 if (IEM_IS_MODRM_REG_MODE(bRm))
5395 {
5396 IEM_MC_BEGIN(0, 2, 0, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_LOCAL(uint8_t, uTmp1);
5399 IEM_MC_LOCAL(uint8_t, uTmp2);
5400
5401 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5402 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5403 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5404 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5405
5406 IEM_MC_ADVANCE_RIP_AND_FINISH();
5407 IEM_MC_END();
5408 }
5409 else
5410 {
5411 /*
5412 * We're accessing memory.
5413 */
5414#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5415 IEM_MC_BEGIN(2, 4, 0, 0); \
5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5417 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5418 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5419 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5420 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5421 \
5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5423 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5424 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5425 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5426 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5427 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5428 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5429 \
5430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5431 IEM_MC_END()
5432
5433 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5434 {
5435 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5436 }
5437 else
5438 {
5439 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5440 }
5441 }
5442}
5443
5444
5445/**
5446 * @opcode 0x87
5447 */
5448FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5449{
5450 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5452
5453 /*
5454 * If rm is denoting a register, no more instruction bytes.
5455 */
5456 if (IEM_IS_MODRM_REG_MODE(bRm))
5457 {
5458 switch (pVCpu->iem.s.enmEffOpSize)
5459 {
5460 case IEMMODE_16BIT:
5461 IEM_MC_BEGIN(0, 2, 0, 0);
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 IEM_MC_LOCAL(uint16_t, uTmp1);
5464 IEM_MC_LOCAL(uint16_t, uTmp2);
5465
5466 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5467 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5468 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5469 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5470
5471 IEM_MC_ADVANCE_RIP_AND_FINISH();
5472 IEM_MC_END();
5473 break;
5474
5475 case IEMMODE_32BIT:
5476 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_LOCAL(uint32_t, uTmp1);
5479 IEM_MC_LOCAL(uint32_t, uTmp2);
5480
5481 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5482 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5484 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5485
5486 IEM_MC_ADVANCE_RIP_AND_FINISH();
5487 IEM_MC_END();
5488 break;
5489
5490 case IEMMODE_64BIT:
5491 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5493 IEM_MC_LOCAL(uint64_t, uTmp1);
5494 IEM_MC_LOCAL(uint64_t, uTmp2);
5495
5496 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5497 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5498 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5499 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5500
5501 IEM_MC_ADVANCE_RIP_AND_FINISH();
5502 IEM_MC_END();
5503 break;
5504
5505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5506 }
5507 }
5508 else
5509 {
5510 /*
5511 * We're accessing memory.
5512 */
5513#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5514 do { \
5515 switch (pVCpu->iem.s.enmEffOpSize) \
5516 { \
5517 case IEMMODE_16BIT: \
5518 IEM_MC_BEGIN(2, 4, 0, 0); \
5519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5521 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5522 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5523 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5524 \
5525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5526 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5527 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5528 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5529 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5530 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5531 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5532 \
5533 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5534 IEM_MC_END(); \
5535 break; \
5536 \
5537 case IEMMODE_32BIT: \
5538 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5540 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5541 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5542 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5543 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5544 \
5545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5546 IEMOP_HLP_DONE_DECODING(); \
5547 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5548 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5549 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5550 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5551 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5552 \
5553 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5554 IEM_MC_END(); \
5555 break; \
5556 \
5557 case IEMMODE_64BIT: \
5558 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5561 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5562 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5563 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5564 \
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5566 IEMOP_HLP_DONE_DECODING(); \
5567 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5568 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5569 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5570 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5571 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5572 \
5573 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5574 IEM_MC_END(); \
5575 break; \
5576 \
5577 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5578 } \
5579 } while (0)
5580 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5581 {
5582 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5583 }
5584 else
5585 {
5586 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5587 }
5588 }
5589}
5590
5591
5592/**
5593 * @opcode 0x88
5594 */
5595FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5596{
5597 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5598
5599 uint8_t bRm;
5600 IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 /*
5603 * If rm is denoting a register, no more instruction bytes.
5604 */
5605 if (IEM_IS_MODRM_REG_MODE(bRm))
5606 {
5607 IEM_MC_BEGIN(0, 1, 0, 0);
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609 IEM_MC_LOCAL(uint8_t, u8Value);
5610 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5611 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5612 IEM_MC_ADVANCE_RIP_AND_FINISH();
5613 IEM_MC_END();
5614 }
5615 else
5616 {
5617 /*
5618 * We're writing a register to memory.
5619 */
5620 IEM_MC_BEGIN(0, 2, 0, 0);
5621 IEM_MC_LOCAL(uint8_t, u8Value);
5622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5625 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5626 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5627 IEM_MC_ADVANCE_RIP_AND_FINISH();
5628 IEM_MC_END();
5629 }
5630}
5631
5632
5633/**
5634 * @opcode 0x89
5635 */
5636FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5637{
5638 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5639
5640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5641
5642 /*
5643 * If rm is denoting a register, no more instruction bytes.
5644 */
5645 if (IEM_IS_MODRM_REG_MODE(bRm))
5646 {
5647 switch (pVCpu->iem.s.enmEffOpSize)
5648 {
5649 case IEMMODE_16BIT:
5650 IEM_MC_BEGIN(0, 1, 0, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_LOCAL(uint16_t, u16Value);
5653 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5654 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5655 IEM_MC_ADVANCE_RIP_AND_FINISH();
5656 IEM_MC_END();
5657 break;
5658
5659 case IEMMODE_32BIT:
5660 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5662 IEM_MC_LOCAL(uint32_t, u32Value);
5663 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5664 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5665 IEM_MC_ADVANCE_RIP_AND_FINISH();
5666 IEM_MC_END();
5667 break;
5668
5669 case IEMMODE_64BIT:
5670 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672 IEM_MC_LOCAL(uint64_t, u64Value);
5673 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5674 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5675 IEM_MC_ADVANCE_RIP_AND_FINISH();
5676 IEM_MC_END();
5677 break;
5678
5679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5680 }
5681 }
5682 else
5683 {
5684 /*
5685 * We're writing a register to memory.
5686 */
5687 switch (pVCpu->iem.s.enmEffOpSize)
5688 {
5689 case IEMMODE_16BIT:
5690 IEM_MC_BEGIN(0, 2, 0, 0);
5691 IEM_MC_LOCAL(uint16_t, u16Value);
5692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5695 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5696 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5697 IEM_MC_ADVANCE_RIP_AND_FINISH();
5698 IEM_MC_END();
5699 break;
5700
5701 case IEMMODE_32BIT:
5702 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5703 IEM_MC_LOCAL(uint32_t, u32Value);
5704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5708 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5709 IEM_MC_ADVANCE_RIP_AND_FINISH();
5710 IEM_MC_END();
5711 break;
5712
5713 case IEMMODE_64BIT:
5714 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5715 IEM_MC_LOCAL(uint64_t, u64Value);
5716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5720 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5726 }
5727 }
5728}
5729
5730
5731/**
5732 * @opcode 0x8a
5733 */
5734FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5735{
5736 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5737
5738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5739
5740 /*
5741 * If rm is denoting a register, no more instruction bytes.
5742 */
5743 if (IEM_IS_MODRM_REG_MODE(bRm))
5744 {
5745 IEM_MC_BEGIN(0, 1, 0, 0);
5746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5747 IEM_MC_LOCAL(uint8_t, u8Value);
5748 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5749 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5750 IEM_MC_ADVANCE_RIP_AND_FINISH();
5751 IEM_MC_END();
5752 }
5753 else
5754 {
5755 /*
5756 * We're loading a register from memory.
5757 */
5758 IEM_MC_BEGIN(0, 2, 0, 0);
5759 IEM_MC_LOCAL(uint8_t, u8Value);
5760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5763 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5764 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5765 IEM_MC_ADVANCE_RIP_AND_FINISH();
5766 IEM_MC_END();
5767 }
5768}
5769
5770
5771/**
5772 * @opcode 0x8b
5773 */
5774FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5775{
5776 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5777
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779
5780 /*
5781 * If rm is denoting a register, no more instruction bytes.
5782 */
5783 if (IEM_IS_MODRM_REG_MODE(bRm))
5784 {
5785 switch (pVCpu->iem.s.enmEffOpSize)
5786 {
5787 case IEMMODE_16BIT:
5788 IEM_MC_BEGIN(0, 1, 0, 0);
5789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5790 IEM_MC_LOCAL(uint16_t, u16Value);
5791 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5792 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5793 IEM_MC_ADVANCE_RIP_AND_FINISH();
5794 IEM_MC_END();
5795 break;
5796
5797 case IEMMODE_32BIT:
5798 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5800 IEM_MC_LOCAL(uint32_t, u32Value);
5801 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5802 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5803 IEM_MC_ADVANCE_RIP_AND_FINISH();
5804 IEM_MC_END();
5805 break;
5806
5807 case IEMMODE_64BIT:
5808 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5810 IEM_MC_LOCAL(uint64_t, u64Value);
5811 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5812 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5813 IEM_MC_ADVANCE_RIP_AND_FINISH();
5814 IEM_MC_END();
5815 break;
5816
5817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5818 }
5819 }
5820 else
5821 {
5822 /*
5823 * We're loading a register from memory.
5824 */
5825 switch (pVCpu->iem.s.enmEffOpSize)
5826 {
5827 case IEMMODE_16BIT:
5828 IEM_MC_BEGIN(0, 2, 0, 0);
5829 IEM_MC_LOCAL(uint16_t, u16Value);
5830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5834 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5835 IEM_MC_ADVANCE_RIP_AND_FINISH();
5836 IEM_MC_END();
5837 break;
5838
5839 case IEMMODE_32BIT:
5840 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5841 IEM_MC_LOCAL(uint32_t, u32Value);
5842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5846 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5847 IEM_MC_ADVANCE_RIP_AND_FINISH();
5848 IEM_MC_END();
5849 break;
5850
5851 case IEMMODE_64BIT:
5852 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5853 IEM_MC_LOCAL(uint64_t, u64Value);
5854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5858 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5859 IEM_MC_ADVANCE_RIP_AND_FINISH();
5860 IEM_MC_END();
5861 break;
5862
5863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5864 }
5865 }
5866}
5867
5868
5869/**
5870 * opcode 0x63
5871 * @todo Table fixme
5872 */
5873FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5874{
5875 if (!IEM_IS_64BIT_CODE(pVCpu))
5876 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5877 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5878 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5879 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5880}
5881
5882
5883/**
5884 * @opcode 0x8c
5885 */
5886FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5887{
5888 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5889
5890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5891
5892 /*
5893 * Check that the destination register exists. The REX.R prefix is ignored.
5894 */
5895 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5896 if (iSegReg > X86_SREG_GS)
5897 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5898
5899 /*
5900 * If rm is denoting a register, no more instruction bytes.
5901 * In that case, the operand size is respected and the upper bits are
5902 * cleared (starting with some pentium).
5903 */
5904 if (IEM_IS_MODRM_REG_MODE(bRm))
5905 {
5906 switch (pVCpu->iem.s.enmEffOpSize)
5907 {
5908 case IEMMODE_16BIT:
5909 IEM_MC_BEGIN(0, 1, 0, 0);
5910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5911 IEM_MC_LOCAL(uint16_t, u16Value);
5912 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5913 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5914 IEM_MC_ADVANCE_RIP_AND_FINISH();
5915 IEM_MC_END();
5916 break;
5917
5918 case IEMMODE_32BIT:
5919 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5921 IEM_MC_LOCAL(uint32_t, u32Value);
5922 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5923 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5924 IEM_MC_ADVANCE_RIP_AND_FINISH();
5925 IEM_MC_END();
5926 break;
5927
5928 case IEMMODE_64BIT:
5929 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5931 IEM_MC_LOCAL(uint64_t, u64Value);
5932 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5933 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5934 IEM_MC_ADVANCE_RIP_AND_FINISH();
5935 IEM_MC_END();
5936 break;
5937
5938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5939 }
5940 }
5941 else
5942 {
5943 /*
5944 * We're saving the register to memory. The access is word sized
5945 * regardless of operand size prefixes.
5946 */
5947#if 0 /* not necessary */
5948 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5949#endif
5950 IEM_MC_BEGIN(0, 2, 0, 0);
5951 IEM_MC_LOCAL(uint16_t, u16Value);
5952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5955 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5956 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5957 IEM_MC_ADVANCE_RIP_AND_FINISH();
5958 IEM_MC_END();
5959 }
5960}
5961
5962
5963
5964
5965/**
5966 * @opcode 0x8d
5967 */
5968FNIEMOP_DEF(iemOp_lea_Gv_M)
5969{
5970 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5972 if (IEM_IS_MODRM_REG_MODE(bRm))
5973 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5974
5975 switch (pVCpu->iem.s.enmEffOpSize)
5976 {
5977 case IEMMODE_16BIT:
5978 IEM_MC_BEGIN(0, 2, 0, 0);
5979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5983 * operand-size, which is usually the case. It'll save an instruction
5984 * and a register. */
5985 IEM_MC_LOCAL(uint16_t, u16Cast);
5986 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5987 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5988 IEM_MC_ADVANCE_RIP_AND_FINISH();
5989 IEM_MC_END();
5990 break;
5991
5992 case IEMMODE_32BIT:
5993 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5998 * operand-size, which is usually the case. It'll save an instruction
5999 * and a register. */
6000 IEM_MC_LOCAL(uint32_t, u32Cast);
6001 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6002 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6003 IEM_MC_ADVANCE_RIP_AND_FINISH();
6004 IEM_MC_END();
6005 break;
6006
6007 case IEMMODE_64BIT:
6008 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6012 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6013 IEM_MC_ADVANCE_RIP_AND_FINISH();
6014 IEM_MC_END();
6015 break;
6016
6017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6018 }
6019}
6020
6021
6022/**
6023 * @opcode 0x8e
6024 */
6025FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6026{
6027 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6028
6029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6030
6031 /*
6032 * The practical operand size is 16-bit.
6033 */
6034#if 0 /* not necessary */
6035 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6036#endif
6037
6038 /*
6039 * Check that the destination register exists and can be used with this
6040 * instruction. The REX.R prefix is ignored.
6041 */
6042 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6043 /** @todo r=bird: What does 8086 do here wrt CS? */
6044 if ( iSegReg == X86_SREG_CS
6045 || iSegReg > X86_SREG_GS)
6046 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6047
6048 /*
6049 * If rm is denoting a register, no more instruction bytes.
6050 *
6051 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6052 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6053 * register. This is a restriction of the current recompiler
6054 * approach.
6055 */
6056 if (IEM_IS_MODRM_REG_MODE(bRm))
6057 {
6058#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6059 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
6060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6061 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6062 IEM_MC_ARG(uint16_t, u16Value, 1); \
6063 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6064 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6065 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6066 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6067 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6068 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6069 iemCImpl_load_SReg, iSRegArg, u16Value); \
6070 IEM_MC_END()
6071
6072 if (iSegReg == X86_SREG_SS)
6073 {
6074 if (IEM_IS_32BIT_CODE(pVCpu))
6075 {
6076 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6077 }
6078 else
6079 {
6080 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6081 }
6082 }
6083 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6084 {
6085 IEMOP_MOV_SW_EV_REG_BODY(0);
6086 }
6087 else
6088 {
6089 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6090 }
6091#undef IEMOP_MOV_SW_EV_REG_BODY
6092 }
6093 else
6094 {
6095 /*
6096 * We're loading the register from memory. The access is word sized
6097 * regardless of operand size prefixes.
6098 */
6099#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6100 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
6101 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6102 IEM_MC_ARG(uint16_t, u16Value, 1); \
6103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6106 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6107 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6108 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6109 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6110 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6111 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6112 iemCImpl_load_SReg, iSRegArg, u16Value); \
6113 IEM_MC_END()
6114
6115 if (iSegReg == X86_SREG_SS)
6116 {
6117 if (IEM_IS_32BIT_CODE(pVCpu))
6118 {
6119 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6120 }
6121 else
6122 {
6123 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6124 }
6125 }
6126 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6127 {
6128 IEMOP_MOV_SW_EV_MEM_BODY(0);
6129 }
6130 else
6131 {
6132 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6133 }
6134#undef IEMOP_MOV_SW_EV_MEM_BODY
6135 }
6136}
6137
6138
6139/** Opcode 0x8f /0. */
6140FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6141{
6142 /* This bugger is rather annoying as it requires rSP to be updated before
6143 doing the effective address calculations. Will eventually require a
6144 split between the R/M+SIB decoding and the effective address
6145 calculation - which is something that is required for any attempt at
6146 reusing this code for a recompiler. It may also be good to have if we
6147 need to delay #UD exception caused by invalid lock prefixes.
6148
6149 For now, we'll do a mostly safe interpreter-only implementation here. */
6150 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6151 * now until tests show it's checked.. */
6152 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6153
6154 /* Register access is relatively easy and can share code. */
6155 if (IEM_IS_MODRM_REG_MODE(bRm))
6156 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6157
6158 /*
6159 * Memory target.
6160 *
6161 * Intel says that RSP is incremented before it's used in any effective
6162 * address calcuations. This means some serious extra annoyance here since
6163 * we decode and calculate the effective address in one step and like to
6164 * delay committing registers till everything is done.
6165 *
6166 * So, we'll decode and calculate the effective address twice. This will
6167 * require some recoding if turned into a recompiler.
6168 */
6169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6170
6171#if 1 /* This can be compiled, optimize later if needed. */
6172 switch (pVCpu->iem.s.enmEffOpSize)
6173 {
6174 case IEMMODE_16BIT:
6175 IEM_MC_BEGIN(2, 0, 0, 0);
6176 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6179 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6180 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6181 IEM_MC_END();
6182 break;
6183
6184 case IEMMODE_32BIT:
6185 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6186 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6189 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6190 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6191 IEM_MC_END();
6192 break;
6193
6194 case IEMMODE_64BIT:
6195 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6196 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6199 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6200 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6201 IEM_MC_END();
6202 break;
6203
6204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6205 }
6206
6207#else
6208# ifndef TST_IEM_CHECK_MC
6209 /* Calc effective address with modified ESP. */
6210/** @todo testcase */
6211 RTGCPTR GCPtrEff;
6212 VBOXSTRICTRC rcStrict;
6213 switch (pVCpu->iem.s.enmEffOpSize)
6214 {
6215 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6216 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6217 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6219 }
6220 if (rcStrict != VINF_SUCCESS)
6221 return rcStrict;
6222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6223
6224 /* Perform the operation - this should be CImpl. */
6225 RTUINT64U TmpRsp;
6226 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6227 switch (pVCpu->iem.s.enmEffOpSize)
6228 {
6229 case IEMMODE_16BIT:
6230 {
6231 uint16_t u16Value;
6232 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6233 if (rcStrict == VINF_SUCCESS)
6234 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6235 break;
6236 }
6237
6238 case IEMMODE_32BIT:
6239 {
6240 uint32_t u32Value;
6241 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6242 if (rcStrict == VINF_SUCCESS)
6243 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6244 break;
6245 }
6246
6247 case IEMMODE_64BIT:
6248 {
6249 uint64_t u64Value;
6250 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6251 if (rcStrict == VINF_SUCCESS)
6252 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6253 break;
6254 }
6255
6256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6257 }
6258 if (rcStrict == VINF_SUCCESS)
6259 {
6260 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6261 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6262 }
6263 return rcStrict;
6264
6265# else
6266 return VERR_IEM_IPE_2;
6267# endif
6268#endif
6269}
6270
6271
6272/**
6273 * @opcode 0x8f
6274 */
6275FNIEMOP_DEF(iemOp_Grp1A__xop)
6276{
6277 /*
6278 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6279 * three byte VEX prefix, except that the mmmmm field cannot have the values
6280 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6281 */
6282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6283 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6284 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6285
6286 IEMOP_MNEMONIC(xop, "xop");
6287 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6288 {
6289 /** @todo Test when exctly the XOP conformance checks kick in during
6290 * instruction decoding and fetching (using \#PF). */
6291 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6292 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6293 if ( ( pVCpu->iem.s.fPrefixes
6294 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6295 == 0)
6296 {
6297 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6298 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6299 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6300 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6301 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6302 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6303 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6304 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6305 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6306
6307 /** @todo XOP: Just use new tables and decoders. */
6308 switch (bRm & 0x1f)
6309 {
6310 case 8: /* xop opcode map 8. */
6311 IEMOP_BITCH_ABOUT_STUB();
6312 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6313
6314 case 9: /* xop opcode map 9. */
6315 IEMOP_BITCH_ABOUT_STUB();
6316 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6317
6318 case 10: /* xop opcode map 10. */
6319 IEMOP_BITCH_ABOUT_STUB();
6320 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6321
6322 default:
6323 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6324 IEMOP_RAISE_INVALID_OPCODE_RET();
6325 }
6326 }
6327 else
6328 Log(("XOP: Invalid prefix mix!\n"));
6329 }
6330 else
6331 Log(("XOP: XOP support disabled!\n"));
6332 IEMOP_RAISE_INVALID_OPCODE_RET();
6333}
6334
6335
6336/**
6337 * Common 'xchg reg,rAX' helper.
6338 */
6339FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6340{
6341 iReg |= pVCpu->iem.s.uRexB;
6342 switch (pVCpu->iem.s.enmEffOpSize)
6343 {
6344 case IEMMODE_16BIT:
6345 IEM_MC_BEGIN(0, 2, 0, 0);
6346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6347 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6348 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6349 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6350 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6351 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6352 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6353 IEM_MC_ADVANCE_RIP_AND_FINISH();
6354 IEM_MC_END();
6355 break;
6356
6357 case IEMMODE_32BIT:
6358 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6360 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6361 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6362 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6363 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6364 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6365 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6366 IEM_MC_ADVANCE_RIP_AND_FINISH();
6367 IEM_MC_END();
6368 break;
6369
6370 case IEMMODE_64BIT:
6371 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6374 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6375 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6376 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6377 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6378 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6379 IEM_MC_ADVANCE_RIP_AND_FINISH();
6380 IEM_MC_END();
6381 break;
6382
6383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6384 }
6385}
6386
6387
6388/**
6389 * @opcode 0x90
6390 */
6391FNIEMOP_DEF(iemOp_nop)
6392{
6393 /* R8/R8D and RAX/EAX can be exchanged. */
6394 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6395 {
6396 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6397 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6398 }
6399
6400 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6401 {
6402 IEMOP_MNEMONIC(pause, "pause");
6403 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6404 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6405 if (!IEM_IS_IN_GUEST(pVCpu))
6406 { /* probable */ }
6407#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6408 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6409 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6410#endif
6411#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6412 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6413 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6414#endif
6415 }
6416 else
6417 IEMOP_MNEMONIC(nop, "nop");
6418 /** @todo testcase: lock nop; lock pause */
6419 IEM_MC_BEGIN(0, 0, 0, 0);
6420 IEMOP_HLP_DONE_DECODING();
6421 IEM_MC_ADVANCE_RIP_AND_FINISH();
6422 IEM_MC_END();
6423}
6424
6425
6426/**
6427 * @opcode 0x91
6428 */
6429FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6430{
6431 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6432 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6433}
6434
6435
6436/**
6437 * @opcode 0x92
6438 */
6439FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6440{
6441 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6442 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6443}
6444
6445
6446/**
6447 * @opcode 0x93
6448 */
6449FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6450{
6451 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6452 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6453}
6454
6455
6456/**
6457 * @opcode 0x94
6458 */
6459FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6460{
6461 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6462 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6463}
6464
6465
6466/**
6467 * @opcode 0x95
6468 */
6469FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6470{
6471 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6472 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6473}
6474
6475
6476/**
6477 * @opcode 0x96
6478 */
6479FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6480{
6481 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6482 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6483}
6484
6485
6486/**
6487 * @opcode 0x97
6488 */
6489FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6490{
6491 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6492 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6493}
6494
6495
6496/**
6497 * @opcode 0x98
6498 */
6499FNIEMOP_DEF(iemOp_cbw)
6500{
6501 switch (pVCpu->iem.s.enmEffOpSize)
6502 {
6503 case IEMMODE_16BIT:
6504 IEMOP_MNEMONIC(cbw, "cbw");
6505 IEM_MC_BEGIN(0, 1, 0, 0);
6506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6507 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6508 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6509 } IEM_MC_ELSE() {
6510 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6511 } IEM_MC_ENDIF();
6512 IEM_MC_ADVANCE_RIP_AND_FINISH();
6513 IEM_MC_END();
6514 break;
6515
6516 case IEMMODE_32BIT:
6517 IEMOP_MNEMONIC(cwde, "cwde");
6518 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6520 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6521 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6522 } IEM_MC_ELSE() {
6523 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6524 } IEM_MC_ENDIF();
6525 IEM_MC_ADVANCE_RIP_AND_FINISH();
6526 IEM_MC_END();
6527 break;
6528
6529 case IEMMODE_64BIT:
6530 IEMOP_MNEMONIC(cdqe, "cdqe");
6531 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6534 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6535 } IEM_MC_ELSE() {
6536 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6537 } IEM_MC_ENDIF();
6538 IEM_MC_ADVANCE_RIP_AND_FINISH();
6539 IEM_MC_END();
6540 break;
6541
6542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6543 }
6544}
6545
6546
6547/**
6548 * @opcode 0x99
6549 */
6550FNIEMOP_DEF(iemOp_cwd)
6551{
6552 switch (pVCpu->iem.s.enmEffOpSize)
6553 {
6554 case IEMMODE_16BIT:
6555 IEMOP_MNEMONIC(cwd, "cwd");
6556 IEM_MC_BEGIN(0, 1, 0, 0);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6559 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6560 } IEM_MC_ELSE() {
6561 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6562 } IEM_MC_ENDIF();
6563 IEM_MC_ADVANCE_RIP_AND_FINISH();
6564 IEM_MC_END();
6565 break;
6566
6567 case IEMMODE_32BIT:
6568 IEMOP_MNEMONIC(cdq, "cdq");
6569 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6572 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6573 } IEM_MC_ELSE() {
6574 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6575 } IEM_MC_ENDIF();
6576 IEM_MC_ADVANCE_RIP_AND_FINISH();
6577 IEM_MC_END();
6578 break;
6579
6580 case IEMMODE_64BIT:
6581 IEMOP_MNEMONIC(cqo, "cqo");
6582 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6585 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6586 } IEM_MC_ELSE() {
6587 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6588 } IEM_MC_ENDIF();
6589 IEM_MC_ADVANCE_RIP_AND_FINISH();
6590 IEM_MC_END();
6591 break;
6592
6593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6594 }
6595}
6596
6597
6598/**
6599 * @opcode 0x9a
6600 */
6601FNIEMOP_DEF(iemOp_call_Ap)
6602{
6603 IEMOP_MNEMONIC(call_Ap, "call Ap");
6604 IEMOP_HLP_NO_64BIT();
6605
6606 /* Decode the far pointer address and pass it on to the far call C implementation. */
6607 uint32_t off32Seg;
6608 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6609 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6610 else
6611 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6612 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6614 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6615 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6616 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6617 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6618}
6619
6620
6621/** Opcode 0x9b. (aka fwait) */
6622FNIEMOP_DEF(iemOp_wait)
6623{
6624 IEMOP_MNEMONIC(wait, "wait");
6625 IEM_MC_BEGIN(0, 0, 0, 0);
6626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6627 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6629 IEM_MC_ADVANCE_RIP_AND_FINISH();
6630 IEM_MC_END();
6631}
6632
6633
6634/**
6635 * @opcode 0x9c
6636 */
6637FNIEMOP_DEF(iemOp_pushf_Fv)
6638{
6639 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6641 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6643 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6644}
6645
6646
6647/**
6648 * @opcode 0x9d
6649 */
6650FNIEMOP_DEF(iemOp_popf_Fv)
6651{
6652 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6655 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6656 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6657 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6658}
6659
6660
6661/**
6662 * @opcode 0x9e
6663 * @opflmodify cf,pf,af,zf,sf
6664 */
6665FNIEMOP_DEF(iemOp_sahf)
6666{
6667 IEMOP_MNEMONIC(sahf, "sahf");
6668 if ( IEM_IS_64BIT_CODE(pVCpu)
6669 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6670 IEMOP_RAISE_INVALID_OPCODE_RET();
6671 IEM_MC_BEGIN(0, 2, 0, 0);
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 IEM_MC_LOCAL(uint32_t, u32Flags);
6674 IEM_MC_LOCAL(uint32_t, EFlags);
6675 IEM_MC_FETCH_EFLAGS(EFlags);
6676 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6677 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6678 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6679 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6680 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6681 IEM_MC_COMMIT_EFLAGS(EFlags);
6682 IEM_MC_ADVANCE_RIP_AND_FINISH();
6683 IEM_MC_END();
6684}
6685
6686
6687/**
6688 * @opcode 0x9f
6689 * @opfltest cf,pf,af,zf,sf
6690 */
6691FNIEMOP_DEF(iemOp_lahf)
6692{
6693 IEMOP_MNEMONIC(lahf, "lahf");
6694 if ( IEM_IS_64BIT_CODE(pVCpu)
6695 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6696 IEMOP_RAISE_INVALID_OPCODE_RET();
6697 IEM_MC_BEGIN(0, 1, 0, 0);
6698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6699 IEM_MC_LOCAL(uint8_t, u8Flags);
6700 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6701 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6702 IEM_MC_ADVANCE_RIP_AND_FINISH();
6703 IEM_MC_END();
6704}
6705
6706
6707/**
6708 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6709 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6710 * Will return/throw on failures.
6711 * @param a_GCPtrMemOff The variable to store the offset in.
6712 */
6713#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6714 do \
6715 { \
6716 switch (pVCpu->iem.s.enmEffAddrMode) \
6717 { \
6718 case IEMMODE_16BIT: \
6719 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6720 break; \
6721 case IEMMODE_32BIT: \
6722 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6723 break; \
6724 case IEMMODE_64BIT: \
6725 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6726 break; \
6727 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6728 } \
6729 } while (0)
6730
6731/**
6732 * @opcode 0xa0
6733 */
6734FNIEMOP_DEF(iemOp_mov_AL_Ob)
6735{
6736 /*
6737 * Get the offset.
6738 */
6739 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6740 RTGCPTR GCPtrMemOffDecode;
6741 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6742
6743 /*
6744 * Fetch AL.
6745 */
6746 IEM_MC_BEGIN(0, 2, 0, 0);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEM_MC_LOCAL(uint8_t, u8Tmp);
6749 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6750 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6751 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6752 IEM_MC_ADVANCE_RIP_AND_FINISH();
6753 IEM_MC_END();
6754}
6755
6756
6757/**
6758 * @opcode 0xa1
6759 */
6760FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6761{
6762 /*
6763 * Get the offset.
6764 */
6765 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6766 RTGCPTR GCPtrMemOffDecode;
6767 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6768
6769 /*
6770 * Fetch rAX.
6771 */
6772 switch (pVCpu->iem.s.enmEffOpSize)
6773 {
6774 case IEMMODE_16BIT:
6775 IEM_MC_BEGIN(0, 2, 0, 0);
6776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6777 IEM_MC_LOCAL(uint16_t, u16Tmp);
6778 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6779 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6780 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6781 IEM_MC_ADVANCE_RIP_AND_FINISH();
6782 IEM_MC_END();
6783 break;
6784
6785 case IEMMODE_32BIT:
6786 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6788 IEM_MC_LOCAL(uint32_t, u32Tmp);
6789 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6790 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6791 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6792 IEM_MC_ADVANCE_RIP_AND_FINISH();
6793 IEM_MC_END();
6794 break;
6795
6796 case IEMMODE_64BIT:
6797 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6799 IEM_MC_LOCAL(uint64_t, u64Tmp);
6800 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6801 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6802 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6803 IEM_MC_ADVANCE_RIP_AND_FINISH();
6804 IEM_MC_END();
6805 break;
6806
6807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6808 }
6809}
6810
6811
6812/**
6813 * @opcode 0xa2
6814 */
6815FNIEMOP_DEF(iemOp_mov_Ob_AL)
6816{
6817 /*
6818 * Get the offset.
6819 */
6820 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6821 RTGCPTR GCPtrMemOffDecode;
6822 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6823
6824 /*
6825 * Store AL.
6826 */
6827 IEM_MC_BEGIN(0, 2, 0, 0);
6828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6829 IEM_MC_LOCAL(uint8_t, u8Tmp);
6830 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6831 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6832 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6833 IEM_MC_ADVANCE_RIP_AND_FINISH();
6834 IEM_MC_END();
6835}
6836
6837
6838/**
6839 * @opcode 0xa3
6840 */
6841FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6842{
6843 /*
6844 * Get the offset.
6845 */
6846 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6847 RTGCPTR GCPtrMemOffDecode;
6848 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6849
6850 /*
6851 * Store rAX.
6852 */
6853 switch (pVCpu->iem.s.enmEffOpSize)
6854 {
6855 case IEMMODE_16BIT:
6856 IEM_MC_BEGIN(0, 2, 0, 0);
6857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6858 IEM_MC_LOCAL(uint16_t, u16Tmp);
6859 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6860 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6861 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6862 IEM_MC_ADVANCE_RIP_AND_FINISH();
6863 IEM_MC_END();
6864 break;
6865
6866 case IEMMODE_32BIT:
6867 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 IEM_MC_LOCAL(uint32_t, u32Tmp);
6870 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6871 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6872 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6873 IEM_MC_ADVANCE_RIP_AND_FINISH();
6874 IEM_MC_END();
6875 break;
6876
6877 case IEMMODE_64BIT:
6878 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6880 IEM_MC_LOCAL(uint64_t, u64Tmp);
6881 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6882 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6883 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6884 IEM_MC_ADVANCE_RIP_AND_FINISH();
6885 IEM_MC_END();
6886 break;
6887
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890}
6891
6892/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6893#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6894 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6896 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6897 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6898 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6899 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6900 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6901 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6902 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6903 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6904 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6905 } IEM_MC_ELSE() { \
6906 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6907 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6908 } IEM_MC_ENDIF(); \
6909 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6910 IEM_MC_END() \
6911
6912/**
6913 * @opcode 0xa4
6914 * @opfltest df
6915 */
6916FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6917{
6918 /*
6919 * Use the C implementation if a repeat prefix is encountered.
6920 */
6921 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6922 {
6923 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6925 switch (pVCpu->iem.s.enmEffAddrMode)
6926 {
6927 case IEMMODE_16BIT:
6928 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6929 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6930 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6931 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6932 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6933 case IEMMODE_32BIT:
6934 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6935 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6936 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6937 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6938 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6939 case IEMMODE_64BIT:
6940 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6941 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6942 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6943 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6944 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6946 }
6947 }
6948
6949 /*
6950 * Sharing case implementation with movs[wdq] below.
6951 */
6952 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6953 switch (pVCpu->iem.s.enmEffAddrMode)
6954 {
6955 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6956 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6957 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6959 }
6960}
6961
6962
6963/**
6964 * @opcode 0xa5
6965 * @opfltest df
6966 */
6967FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6968{
6969
6970 /*
6971 * Use the C implementation if a repeat prefix is encountered.
6972 */
6973 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6974 {
6975 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6977 switch (pVCpu->iem.s.enmEffOpSize)
6978 {
6979 case IEMMODE_16BIT:
6980 switch (pVCpu->iem.s.enmEffAddrMode)
6981 {
6982 case IEMMODE_16BIT:
6983 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6984 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6987 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6988 case IEMMODE_32BIT:
6989 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6990 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6991 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6992 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6993 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6994 case IEMMODE_64BIT:
6995 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6996 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6997 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6998 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6999 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7001 }
7002 break;
7003 case IEMMODE_32BIT:
7004 switch (pVCpu->iem.s.enmEffAddrMode)
7005 {
7006 case IEMMODE_16BIT:
7007 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7008 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7009 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7010 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7011 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7012 case IEMMODE_32BIT:
7013 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7014 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7015 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7016 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7017 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7018 case IEMMODE_64BIT:
7019 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7020 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7021 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7022 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7023 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 case IEMMODE_64BIT:
7027 switch (pVCpu->iem.s.enmEffAddrMode)
7028 {
7029 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7030 case IEMMODE_32BIT:
7031 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7032 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7033 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7034 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7035 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7036 case IEMMODE_64BIT:
7037 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7038 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7039 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7041 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7043 }
7044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7045 }
7046 }
7047
7048 /*
7049 * Annoying double switch here.
7050 * Using ugly macro for implementing the cases, sharing it with movsb.
7051 */
7052 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7053 switch (pVCpu->iem.s.enmEffOpSize)
7054 {
7055 case IEMMODE_16BIT:
7056 switch (pVCpu->iem.s.enmEffAddrMode)
7057 {
7058 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7059 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7060 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7062 }
7063 break;
7064
7065 case IEMMODE_32BIT:
7066 switch (pVCpu->iem.s.enmEffAddrMode)
7067 {
7068 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7069 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7070 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7072 }
7073 break;
7074
7075 case IEMMODE_64BIT:
7076 switch (pVCpu->iem.s.enmEffAddrMode)
7077 {
7078 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7079 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7080 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7082 }
7083 break;
7084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7085 }
7086}
7087
7088#undef IEM_MOVS_CASE
7089
7090/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7091#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7092 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7094 \
7095 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7096 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7097 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7098 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7099 \
7100 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7101 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7102 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7103 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7104 \
7105 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7106 IEM_MC_REF_EFLAGS(pEFlags); \
7107 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7109 \
7110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7112 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7113 } IEM_MC_ELSE() { \
7114 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7115 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7116 } IEM_MC_ENDIF(); \
7117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7118 IEM_MC_END() \
7119
7120/**
7121 * @opcode 0xa6
7122 * @opflclass arithmetic
7123 * @opfltest df
7124 */
7125FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7126{
7127
7128 /*
7129 * Use the C implementation if a repeat prefix is encountered.
7130 */
7131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7132 {
7133 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7135 switch (pVCpu->iem.s.enmEffAddrMode)
7136 {
7137 case IEMMODE_16BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7143 case IEMMODE_32BIT:
7144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7145 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7147 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7148 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7149 case IEMMODE_64BIT:
7150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7151 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7154 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7156 }
7157 }
7158 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7159 {
7160 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7162 switch (pVCpu->iem.s.enmEffAddrMode)
7163 {
7164 case IEMMODE_16BIT:
7165 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7166 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7167 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7168 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7169 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7170 case IEMMODE_32BIT:
7171 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7172 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7173 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7174 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7175 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7176 case IEMMODE_64BIT:
7177 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7178 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7179 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7180 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7181 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7183 }
7184 }
7185
7186 /*
7187 * Sharing case implementation with cmps[wdq] below.
7188 */
7189 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7190 switch (pVCpu->iem.s.enmEffAddrMode)
7191 {
7192 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7193 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7194 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7196 }
7197}
7198
7199
7200/**
7201 * @opcode 0xa7
7202 * @opflclass arithmetic
7203 * @opfltest df
7204 */
7205FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7206{
7207 /*
7208 * Use the C implementation if a repeat prefix is encountered.
7209 */
7210 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7211 {
7212 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7214 switch (pVCpu->iem.s.enmEffOpSize)
7215 {
7216 case IEMMODE_16BIT:
7217 switch (pVCpu->iem.s.enmEffAddrMode)
7218 {
7219 case IEMMODE_16BIT:
7220 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7221 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7222 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7223 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7224 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7225 case IEMMODE_32BIT:
7226 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7227 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7228 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7229 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7230 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7231 case IEMMODE_64BIT:
7232 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7233 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7234 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7236 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7238 }
7239 break;
7240 case IEMMODE_32BIT:
7241 switch (pVCpu->iem.s.enmEffAddrMode)
7242 {
7243 case IEMMODE_16BIT:
7244 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7245 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7246 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7247 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7248 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7249 case IEMMODE_32BIT:
7250 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7251 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7252 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7253 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7254 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7255 case IEMMODE_64BIT:
7256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7258 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7259 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7260 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7262 }
7263 case IEMMODE_64BIT:
7264 switch (pVCpu->iem.s.enmEffAddrMode)
7265 {
7266 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7267 case IEMMODE_32BIT:
7268 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7269 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7270 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7271 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7272 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7273 case IEMMODE_64BIT:
7274 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7275 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7276 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7277 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7278 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7280 }
7281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7282 }
7283 }
7284
7285 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7286 {
7287 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7289 switch (pVCpu->iem.s.enmEffOpSize)
7290 {
7291 case IEMMODE_16BIT:
7292 switch (pVCpu->iem.s.enmEffAddrMode)
7293 {
7294 case IEMMODE_16BIT:
7295 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7298 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7299 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7300 case IEMMODE_32BIT:
7301 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7302 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7303 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7305 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7306 case IEMMODE_64BIT:
7307 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7308 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7309 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7310 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7311 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7313 }
7314 break;
7315 case IEMMODE_32BIT:
7316 switch (pVCpu->iem.s.enmEffAddrMode)
7317 {
7318 case IEMMODE_16BIT:
7319 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7320 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7321 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7322 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7323 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7324 case IEMMODE_32BIT:
7325 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7329 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7330 case IEMMODE_64BIT:
7331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7332 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7333 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7335 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7337 }
7338 case IEMMODE_64BIT:
7339 switch (pVCpu->iem.s.enmEffAddrMode)
7340 {
7341 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7342 case IEMMODE_32BIT:
7343 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7344 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7345 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7346 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7347 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7348 case IEMMODE_64BIT:
7349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7351 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7352 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7353 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7355 }
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358 }
7359
7360 /*
7361 * Annoying double switch here.
7362 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7363 */
7364 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7365 switch (pVCpu->iem.s.enmEffOpSize)
7366 {
7367 case IEMMODE_16BIT:
7368 switch (pVCpu->iem.s.enmEffAddrMode)
7369 {
7370 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7371 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7372 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7374 }
7375 break;
7376
7377 case IEMMODE_32BIT:
7378 switch (pVCpu->iem.s.enmEffAddrMode)
7379 {
7380 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7381 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7382 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7384 }
7385 break;
7386
7387 case IEMMODE_64BIT:
7388 switch (pVCpu->iem.s.enmEffAddrMode)
7389 {
7390 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7391 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7392 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7394 }
7395 break;
7396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7397 }
7398}
7399
7400#undef IEM_CMPS_CASE
7401
7402/**
7403 * @opcode 0xa8
7404 * @opflclass logical
7405 */
7406FNIEMOP_DEF(iemOp_test_AL_Ib)
7407{
7408 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7410 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7411}
7412
7413
7414/**
7415 * @opcode 0xa9
7416 * @opflclass logical
7417 */
7418FNIEMOP_DEF(iemOp_test_eAX_Iz)
7419{
7420 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7421 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7422 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7423}
7424
7425
7426/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7427#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7428 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7430 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7431 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7432 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7433 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7434 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7436 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7437 } IEM_MC_ELSE() { \
7438 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7439 } IEM_MC_ENDIF(); \
7440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7441 IEM_MC_END() \
7442
7443/**
7444 * @opcode 0xaa
7445 */
7446FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7447{
7448 /*
7449 * Use the C implementation if a repeat prefix is encountered.
7450 */
7451 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7452 {
7453 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7455 switch (pVCpu->iem.s.enmEffAddrMode)
7456 {
7457 case IEMMODE_16BIT:
7458 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7459 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7461 iemCImpl_stos_al_m16);
7462 case IEMMODE_32BIT:
7463 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7464 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7465 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7466 iemCImpl_stos_al_m32);
7467 case IEMMODE_64BIT:
7468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7469 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7470 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7471 iemCImpl_stos_al_m64);
7472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7473 }
7474 }
7475
7476 /*
7477 * Sharing case implementation with stos[wdq] below.
7478 */
7479 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7480 switch (pVCpu->iem.s.enmEffAddrMode)
7481 {
7482 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7483 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7484 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7486 }
7487}
7488
7489
7490/**
7491 * @opcode 0xab
7492 */
7493FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7494{
7495 /*
7496 * Use the C implementation if a repeat prefix is encountered.
7497 */
7498 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7499 {
7500 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7502 switch (pVCpu->iem.s.enmEffOpSize)
7503 {
7504 case IEMMODE_16BIT:
7505 switch (pVCpu->iem.s.enmEffAddrMode)
7506 {
7507 case IEMMODE_16BIT:
7508 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7509 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7511 iemCImpl_stos_ax_m16);
7512 case IEMMODE_32BIT:
7513 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7516 iemCImpl_stos_ax_m32);
7517 case IEMMODE_64BIT:
7518 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7519 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7521 iemCImpl_stos_ax_m64);
7522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7523 }
7524 break;
7525 case IEMMODE_32BIT:
7526 switch (pVCpu->iem.s.enmEffAddrMode)
7527 {
7528 case IEMMODE_16BIT:
7529 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7531 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7532 iemCImpl_stos_eax_m16);
7533 case IEMMODE_32BIT:
7534 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7535 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7536 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7537 iemCImpl_stos_eax_m32);
7538 case IEMMODE_64BIT:
7539 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7542 iemCImpl_stos_eax_m64);
7543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7544 }
7545 case IEMMODE_64BIT:
7546 switch (pVCpu->iem.s.enmEffAddrMode)
7547 {
7548 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7549 case IEMMODE_32BIT:
7550 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7551 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7553 iemCImpl_stos_rax_m32);
7554 case IEMMODE_64BIT:
7555 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7558 iemCImpl_stos_rax_m64);
7559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7560 }
7561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7562 }
7563 }
7564
7565 /*
7566 * Annoying double switch here.
7567 * Using ugly macro for implementing the cases, sharing it with stosb.
7568 */
7569 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7570 switch (pVCpu->iem.s.enmEffOpSize)
7571 {
7572 case IEMMODE_16BIT:
7573 switch (pVCpu->iem.s.enmEffAddrMode)
7574 {
7575 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7576 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7577 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7579 }
7580 break;
7581
7582 case IEMMODE_32BIT:
7583 switch (pVCpu->iem.s.enmEffAddrMode)
7584 {
7585 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7586 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7587 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7589 }
7590 break;
7591
7592 case IEMMODE_64BIT:
7593 switch (pVCpu->iem.s.enmEffAddrMode)
7594 {
7595 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7596 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7597 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7599 }
7600 break;
7601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7602 }
7603}
7604
7605#undef IEM_STOS_CASE
7606
7607/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7608#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7609 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7611 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7612 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7613 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7614 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7615 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7617 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7618 } IEM_MC_ELSE() { \
7619 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7620 } IEM_MC_ENDIF(); \
7621 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7622 IEM_MC_END() \
7623
7624/**
7625 * @opcode 0xac
7626 * @opfltest df
7627 */
7628FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7629{
7630 /*
7631 * Use the C implementation if a repeat prefix is encountered.
7632 */
7633 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7634 {
7635 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7637 switch (pVCpu->iem.s.enmEffAddrMode)
7638 {
7639 case IEMMODE_16BIT:
7640 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7641 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7642 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7643 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7644 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7645 case IEMMODE_32BIT:
7646 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7647 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7648 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7649 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7650 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7651 case IEMMODE_64BIT:
7652 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7653 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7654 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7656 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7658 }
7659 }
7660
7661 /*
7662 * Sharing case implementation with stos[wdq] below.
7663 */
7664 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7665 switch (pVCpu->iem.s.enmEffAddrMode)
7666 {
7667 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7668 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7669 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7671 }
7672}
7673
7674
7675/**
7676 * @opcode 0xad
7677 * @opfltest df
7678 */
7679FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7680{
7681 /*
7682 * Use the C implementation if a repeat prefix is encountered.
7683 */
7684 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7685 {
7686 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688 switch (pVCpu->iem.s.enmEffOpSize)
7689 {
7690 case IEMMODE_16BIT:
7691 switch (pVCpu->iem.s.enmEffAddrMode)
7692 {
7693 case IEMMODE_16BIT:
7694 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7695 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7696 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7697 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7698 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7699 case IEMMODE_32BIT:
7700 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7703 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7704 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7705 case IEMMODE_64BIT:
7706 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7707 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7708 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7709 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7710 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7712 }
7713 break;
7714 case IEMMODE_32BIT:
7715 switch (pVCpu->iem.s.enmEffAddrMode)
7716 {
7717 case IEMMODE_16BIT:
7718 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7719 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7720 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7721 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7722 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7723 case IEMMODE_32BIT:
7724 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7727 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7728 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7729 case IEMMODE_64BIT:
7730 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7731 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7732 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7733 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7734 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7736 }
7737 case IEMMODE_64BIT:
7738 switch (pVCpu->iem.s.enmEffAddrMode)
7739 {
7740 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7741 case IEMMODE_32BIT:
7742 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7743 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7744 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7745 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7746 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7747 case IEMMODE_64BIT:
7748 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7751 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7752 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7754 }
7755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7756 }
7757 }
7758
7759 /*
7760 * Annoying double switch here.
7761 * Using ugly macro for implementing the cases, sharing it with lodsb.
7762 */
7763 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7764 switch (pVCpu->iem.s.enmEffOpSize)
7765 {
7766 case IEMMODE_16BIT:
7767 switch (pVCpu->iem.s.enmEffAddrMode)
7768 {
7769 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7770 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7771 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7773 }
7774 break;
7775
7776 case IEMMODE_32BIT:
7777 switch (pVCpu->iem.s.enmEffAddrMode)
7778 {
7779 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7780 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7781 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7783 }
7784 break;
7785
7786 case IEMMODE_64BIT:
7787 switch (pVCpu->iem.s.enmEffAddrMode)
7788 {
7789 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7790 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7791 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7793 }
7794 break;
7795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7796 }
7797}
7798
7799#undef IEM_LODS_CASE
7800
7801/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7802#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7803 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7805 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7806 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7807 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7808 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7809 \
7810 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7811 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7812 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7813 IEM_MC_REF_EFLAGS(pEFlags); \
7814 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7815 \
7816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7817 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7818 } IEM_MC_ELSE() { \
7819 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7820 } IEM_MC_ENDIF(); \
7821 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7822 IEM_MC_END();
7823
7824/**
7825 * @opcode 0xae
7826 * @opflclass arithmetic
7827 * @opfltest df
7828 */
7829FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7830{
7831 /*
7832 * Use the C implementation if a repeat prefix is encountered.
7833 */
7834 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7835 {
7836 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838 switch (pVCpu->iem.s.enmEffAddrMode)
7839 {
7840 case IEMMODE_16BIT:
7841 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7842 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7843 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7844 iemCImpl_repe_scas_al_m16);
7845 case IEMMODE_32BIT:
7846 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7847 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7848 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7849 iemCImpl_repe_scas_al_m32);
7850 case IEMMODE_64BIT:
7851 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7852 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7853 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7854 iemCImpl_repe_scas_al_m64);
7855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7856 }
7857 }
7858 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7859 {
7860 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7862 switch (pVCpu->iem.s.enmEffAddrMode)
7863 {
7864 case IEMMODE_16BIT:
7865 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7866 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7867 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7868 iemCImpl_repne_scas_al_m16);
7869 case IEMMODE_32BIT:
7870 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7871 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7872 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7873 iemCImpl_repne_scas_al_m32);
7874 case IEMMODE_64BIT:
7875 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7876 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7877 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7878 iemCImpl_repne_scas_al_m64);
7879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7880 }
7881 }
7882
7883 /*
7884 * Sharing case implementation with stos[wdq] below.
7885 */
7886 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7887 switch (pVCpu->iem.s.enmEffAddrMode)
7888 {
7889 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7890 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7891 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7893 }
7894}
7895
7896
7897/**
7898 * @opcode 0xaf
7899 * @opflclass arithmetic
7900 * @opfltest df
7901 */
7902FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7903{
7904 /*
7905 * Use the C implementation if a repeat prefix is encountered.
7906 */
7907 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7908 {
7909 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7911 switch (pVCpu->iem.s.enmEffOpSize)
7912 {
7913 case IEMMODE_16BIT:
7914 switch (pVCpu->iem.s.enmEffAddrMode)
7915 {
7916 case IEMMODE_16BIT:
7917 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7918 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7919 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7920 iemCImpl_repe_scas_ax_m16);
7921 case IEMMODE_32BIT:
7922 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7923 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7924 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7925 iemCImpl_repe_scas_ax_m32);
7926 case IEMMODE_64BIT:
7927 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7928 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7929 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7930 iemCImpl_repe_scas_ax_m64);
7931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7932 }
7933 break;
7934 case IEMMODE_32BIT:
7935 switch (pVCpu->iem.s.enmEffAddrMode)
7936 {
7937 case IEMMODE_16BIT:
7938 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7939 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7940 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7941 iemCImpl_repe_scas_eax_m16);
7942 case IEMMODE_32BIT:
7943 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7944 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7945 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7946 iemCImpl_repe_scas_eax_m32);
7947 case IEMMODE_64BIT:
7948 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7949 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7950 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7951 iemCImpl_repe_scas_eax_m64);
7952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7953 }
7954 case IEMMODE_64BIT:
7955 switch (pVCpu->iem.s.enmEffAddrMode)
7956 {
7957 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7958 case IEMMODE_32BIT:
7959 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7960 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7961 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7962 iemCImpl_repe_scas_rax_m32);
7963 case IEMMODE_64BIT:
7964 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7965 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7966 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7967 iemCImpl_repe_scas_rax_m64);
7968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7969 }
7970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7971 }
7972 }
7973 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7974 {
7975 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7977 switch (pVCpu->iem.s.enmEffOpSize)
7978 {
7979 case IEMMODE_16BIT:
7980 switch (pVCpu->iem.s.enmEffAddrMode)
7981 {
7982 case IEMMODE_16BIT:
7983 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7984 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7986 iemCImpl_repne_scas_ax_m16);
7987 case IEMMODE_32BIT:
7988 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7990 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7991 iemCImpl_repne_scas_ax_m32);
7992 case IEMMODE_64BIT:
7993 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7994 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7995 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7996 iemCImpl_repne_scas_ax_m64);
7997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7998 }
7999 break;
8000 case IEMMODE_32BIT:
8001 switch (pVCpu->iem.s.enmEffAddrMode)
8002 {
8003 case IEMMODE_16BIT:
8004 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8005 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8006 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8007 iemCImpl_repne_scas_eax_m16);
8008 case IEMMODE_32BIT:
8009 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8010 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8011 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8012 iemCImpl_repne_scas_eax_m32);
8013 case IEMMODE_64BIT:
8014 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8015 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8016 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8017 iemCImpl_repne_scas_eax_m64);
8018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8019 }
8020 case IEMMODE_64BIT:
8021 switch (pVCpu->iem.s.enmEffAddrMode)
8022 {
8023 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8024 case IEMMODE_32BIT:
8025 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8026 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8027 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8028 iemCImpl_repne_scas_rax_m32);
8029 case IEMMODE_64BIT:
8030 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8031 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8032 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8033 iemCImpl_repne_scas_rax_m64);
8034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8035 }
8036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8037 }
8038 }
8039
8040 /*
8041 * Annoying double switch here.
8042 * Using ugly macro for implementing the cases, sharing it with scasb.
8043 */
8044 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8045 switch (pVCpu->iem.s.enmEffOpSize)
8046 {
8047 case IEMMODE_16BIT:
8048 switch (pVCpu->iem.s.enmEffAddrMode)
8049 {
8050 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8051 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8052 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8054 }
8055 break;
8056
8057 case IEMMODE_32BIT:
8058 switch (pVCpu->iem.s.enmEffAddrMode)
8059 {
8060 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8061 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8062 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8064 }
8065 break;
8066
8067 case IEMMODE_64BIT:
8068 switch (pVCpu->iem.s.enmEffAddrMode)
8069 {
8070 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8071 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8072 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8074 }
8075 break;
8076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8077 }
8078}
8079
8080#undef IEM_SCAS_CASE
8081
8082/**
8083 * Common 'mov r8, imm8' helper.
8084 */
8085FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8086{
8087 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8088 IEM_MC_BEGIN(0, 0, 0, 0);
8089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8090 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8091 IEM_MC_ADVANCE_RIP_AND_FINISH();
8092 IEM_MC_END();
8093}
8094
8095
8096/**
8097 * @opcode 0xb0
8098 */
8099FNIEMOP_DEF(iemOp_mov_AL_Ib)
8100{
8101 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8102 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8103}
8104
8105
8106/**
8107 * @opcode 0xb1
8108 */
8109FNIEMOP_DEF(iemOp_CL_Ib)
8110{
8111 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8112 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8113}
8114
8115
8116/**
8117 * @opcode 0xb2
8118 */
8119FNIEMOP_DEF(iemOp_DL_Ib)
8120{
8121 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8122 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8123}
8124
8125
8126/**
8127 * @opcode 0xb3
8128 */
8129FNIEMOP_DEF(iemOp_BL_Ib)
8130{
8131 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8132 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8133}
8134
8135
8136/**
8137 * @opcode 0xb4
8138 */
8139FNIEMOP_DEF(iemOp_mov_AH_Ib)
8140{
8141 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8142 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8143}
8144
8145
8146/**
8147 * @opcode 0xb5
8148 */
8149FNIEMOP_DEF(iemOp_CH_Ib)
8150{
8151 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8152 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8153}
8154
8155
8156/**
8157 * @opcode 0xb6
8158 */
8159FNIEMOP_DEF(iemOp_DH_Ib)
8160{
8161 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8162 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8163}
8164
8165
8166/**
8167 * @opcode 0xb7
8168 */
8169FNIEMOP_DEF(iemOp_BH_Ib)
8170{
8171 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8172 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8173}
8174
8175
8176/**
8177 * Common 'mov regX,immX' helper.
8178 */
8179FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8180{
8181 switch (pVCpu->iem.s.enmEffOpSize)
8182 {
8183 case IEMMODE_16BIT:
8184 IEM_MC_BEGIN(0, 0, 0, 0);
8185 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8187 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8188 IEM_MC_ADVANCE_RIP_AND_FINISH();
8189 IEM_MC_END();
8190 break;
8191
8192 case IEMMODE_32BIT:
8193 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8194 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8197 IEM_MC_ADVANCE_RIP_AND_FINISH();
8198 IEM_MC_END();
8199 break;
8200
8201 case IEMMODE_64BIT:
8202 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8203 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8205 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8206 IEM_MC_ADVANCE_RIP_AND_FINISH();
8207 IEM_MC_END();
8208 break;
8209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8210 }
8211}
8212
8213
8214/**
8215 * @opcode 0xb8
8216 */
8217FNIEMOP_DEF(iemOp_eAX_Iv)
8218{
8219 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8220 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8221}
8222
8223
8224/**
8225 * @opcode 0xb9
8226 */
8227FNIEMOP_DEF(iemOp_eCX_Iv)
8228{
8229 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8230 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8231}
8232
8233
8234/**
8235 * @opcode 0xba
8236 */
8237FNIEMOP_DEF(iemOp_eDX_Iv)
8238{
8239 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8240 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8241}
8242
8243
8244/**
8245 * @opcode 0xbb
8246 */
8247FNIEMOP_DEF(iemOp_eBX_Iv)
8248{
8249 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8250 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8251}
8252
8253
8254/**
8255 * @opcode 0xbc
8256 */
8257FNIEMOP_DEF(iemOp_eSP_Iv)
8258{
8259 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8260 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8261}
8262
8263
8264/**
8265 * @opcode 0xbd
8266 */
8267FNIEMOP_DEF(iemOp_eBP_Iv)
8268{
8269 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8270 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8271}
8272
8273
8274/**
8275 * @opcode 0xbe
8276 */
8277FNIEMOP_DEF(iemOp_eSI_Iv)
8278{
8279 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8280 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8281}
8282
8283
8284/**
8285 * @opcode 0xbf
8286 */
8287FNIEMOP_DEF(iemOp_eDI_Iv)
8288{
8289 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8290 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8291}
8292
8293
8294/**
8295 * @opcode 0xc0
8296 */
8297FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8298{
8299 IEMOP_HLP_MIN_186();
8300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8301
8302 /* Need to use a body macro here since the EFLAGS behaviour differs between
8303 the shifts, rotates and rotate w/ carry. Sigh. */
8304#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8305 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8306 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8307 { \
8308 /* register */ \
8309 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8310 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8312 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8313 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8314 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8315 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8316 IEM_MC_REF_EFLAGS(pEFlags); \
8317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8319 IEM_MC_END(); \
8320 } \
8321 else \
8322 { \
8323 /* memory */ \
8324 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8327 \
8328 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8330 \
8331 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8332 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8333 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8334 \
8335 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8337 IEM_MC_FETCH_EFLAGS(EFlags); \
8338 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8339 \
8340 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8341 IEM_MC_COMMIT_EFLAGS(EFlags); \
8342 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8343 IEM_MC_END(); \
8344 } (void)0
8345
8346 switch (IEM_GET_MODRM_REG_8(bRm))
8347 {
8348 /**
8349 * @opdone
8350 * @opmaps grp2_c0
8351 * @opcode /0
8352 * @opflclass rotate_count
8353 */
8354 case 0:
8355 {
8356 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8358 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8359 break;
8360 }
8361 /**
8362 * @opdone
8363 * @opmaps grp2_c0
8364 * @opcode /1
8365 * @opflclass rotate_count
8366 */
8367 case 1:
8368 {
8369 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8371 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8372 break;
8373 }
8374 /**
8375 * @opdone
8376 * @opmaps grp2_c0
8377 * @opcode /2
8378 * @opflclass rotate_carry_count
8379 */
8380 case 2:
8381 {
8382 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8383 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8384 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8385 break;
8386 }
8387 /**
8388 * @opdone
8389 * @opmaps grp2_c0
8390 * @opcode /3
8391 * @opflclass rotate_carry_count
8392 */
8393 case 3:
8394 {
8395 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8397 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8398 break;
8399 }
8400 /**
8401 * @opdone
8402 * @opmaps grp2_c0
8403 * @opcode /4
8404 * @opflclass shift_count
8405 */
8406 case 4:
8407 {
8408 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8410 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8411 break;
8412 }
8413 /**
8414 * @opdone
8415 * @opmaps grp2_c0
8416 * @opcode /5
8417 * @opflclass shift_count
8418 */
8419 case 5:
8420 {
8421 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8422 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8423 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8424 break;
8425 }
8426 /**
8427 * @opdone
8428 * @opmaps grp2_c0
8429 * @opcode /7
8430 * @opflclass shift_count
8431 */
8432 case 7:
8433 {
8434 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8435 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8436 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8437 break;
8438 }
8439
8440 /** @opdone */
8441 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8442 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8443 }
8444#undef GRP2_BODY_Eb_Ib
8445}
8446
8447
8448/* Need to use a body macro here since the EFLAGS behaviour differs between
8449 the shifts, rotates and rotate w/ carry. Sigh. */
8450#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8451 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8452 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8453 { \
8454 /* register */ \
8455 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8456 switch (pVCpu->iem.s.enmEffOpSize) \
8457 { \
8458 case IEMMODE_16BIT: \
8459 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8461 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8462 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8463 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8464 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8465 IEM_MC_REF_EFLAGS(pEFlags); \
8466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8467 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8468 IEM_MC_END(); \
8469 break; \
8470 \
8471 case IEMMODE_32BIT: \
8472 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8474 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8475 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8476 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8477 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8478 IEM_MC_REF_EFLAGS(pEFlags); \
8479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8480 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8481 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8482 IEM_MC_END(); \
8483 break; \
8484 \
8485 case IEMMODE_64BIT: \
8486 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8488 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8489 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8490 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8491 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8492 IEM_MC_REF_EFLAGS(pEFlags); \
8493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8495 IEM_MC_END(); \
8496 break; \
8497 \
8498 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8499 } \
8500 } \
8501 else \
8502 { \
8503 /* memory */ \
8504 switch (pVCpu->iem.s.enmEffOpSize) \
8505 { \
8506 case IEMMODE_16BIT: \
8507 IEM_MC_BEGIN(3, 3, 0, 0); \
8508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8510 \
8511 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8513 \
8514 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8515 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8516 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8517 \
8518 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8520 IEM_MC_FETCH_EFLAGS(EFlags); \
8521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8522 \
8523 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8524 IEM_MC_COMMIT_EFLAGS(EFlags); \
8525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8526 IEM_MC_END(); \
8527 break; \
8528 \
8529 case IEMMODE_32BIT: \
8530 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8533 \
8534 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8536 \
8537 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8538 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8539 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8540 \
8541 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8543 IEM_MC_FETCH_EFLAGS(EFlags); \
8544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8545 \
8546 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8547 IEM_MC_COMMIT_EFLAGS(EFlags); \
8548 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8549 IEM_MC_END(); \
8550 break; \
8551 \
8552 case IEMMODE_64BIT: \
8553 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8556 \
8557 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8559 \
8560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8561 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8562 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8563 \
8564 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8565 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8566 IEM_MC_FETCH_EFLAGS(EFlags); \
8567 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8568 \
8569 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8570 IEM_MC_COMMIT_EFLAGS(EFlags); \
8571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8572 IEM_MC_END(); \
8573 break; \
8574 \
8575 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8576 } \
8577 } (void)0
8578
8579/**
8580 * @opmaps grp2_c1
8581 * @opcode /0
8582 * @opflclass rotate_count
8583 */
8584FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8585{
8586 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8587 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8588}
8589
8590
8591/**
8592 * @opmaps grp2_c1
8593 * @opcode /1
8594 * @opflclass rotate_count
8595 */
8596FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8597{
8598 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8599 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8600}
8601
8602
8603/**
8604 * @opmaps grp2_c1
8605 * @opcode /2
8606 * @opflclass rotate_carry_count
8607 */
8608FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8609{
8610 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8611 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8612}
8613
8614
8615/**
8616 * @opmaps grp2_c1
8617 * @opcode /3
8618 * @opflclass rotate_carry_count
8619 */
8620FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8621{
8622 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8623 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8624}
8625
8626
8627/**
8628 * @opmaps grp2_c1
8629 * @opcode /4
8630 * @opflclass shift_count
8631 */
8632FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8633{
8634 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8635 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8636}
8637
8638
8639/**
8640 * @opmaps grp2_c1
8641 * @opcode /5
8642 * @opflclass shift_count
8643 */
8644FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8645{
8646 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8647 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8648}
8649
8650
8651/**
8652 * @opmaps grp2_c1
8653 * @opcode /7
8654 * @opflclass shift_count
8655 */
8656FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8657{
8658 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8659 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8660}
8661
8662#undef GRP2_BODY_Ev_Ib
8663
8664/**
8665 * @opcode 0xc1
8666 */
8667FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8668{
8669 IEMOP_HLP_MIN_186();
8670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8671
8672 switch (IEM_GET_MODRM_REG_8(bRm))
8673 {
8674 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8675 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8676 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8677 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8678 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8679 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8680 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8681 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8682 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8683 }
8684}
8685
8686
8687/**
8688 * @opcode 0xc2
8689 */
8690FNIEMOP_DEF(iemOp_retn_Iw)
8691{
8692 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8693 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8694 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 switch (pVCpu->iem.s.enmEffOpSize)
8697 {
8698 case IEMMODE_16BIT:
8699 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8700 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8701 case IEMMODE_32BIT:
8702 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8703 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8704 case IEMMODE_64BIT:
8705 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8708 }
8709}
8710
8711
8712/**
8713 * @opcode 0xc3
8714 */
8715FNIEMOP_DEF(iemOp_retn)
8716{
8717 IEMOP_MNEMONIC(retn, "retn");
8718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 switch (pVCpu->iem.s.enmEffOpSize)
8721 {
8722 case IEMMODE_16BIT:
8723 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8724 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8725 case IEMMODE_32BIT:
8726 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8727 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8728 case IEMMODE_64BIT:
8729 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8730 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8732 }
8733}
8734
8735
8736/**
8737 * @opcode 0xc4
8738 */
8739FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8740{
8741 /* The LDS instruction is invalid 64-bit mode. In legacy and
8742 compatability mode it is invalid with MOD=3.
8743 The use as a VEX prefix is made possible by assigning the inverted
8744 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8745 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8747 if ( IEM_IS_64BIT_CODE(pVCpu)
8748 || IEM_IS_MODRM_REG_MODE(bRm) )
8749 {
8750 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8751 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8752 {
8753 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8754 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8755 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8756 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8757 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8758 if (IEM_IS_64BIT_CODE(pVCpu))
8759 {
8760#if 1
8761 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8762 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8763 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8764 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8765#else
8766 if (bVex2 & 0x80 /* VEX.W */)
8767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8768 if (~bRm & 0x20 /* VEX.~B */)
8769 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8770 if (~bRm & 0x40 /* VEX.~X */)
8771 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8772 if (~bRm & 0x80 /* VEX.~R */)
8773 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8774#endif
8775 }
8776 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8777 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8778 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8779 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8780 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8781 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8782
8783 switch (bRm & 0x1f)
8784 {
8785 case 1: /* 0x0f lead opcode byte. */
8786#ifdef IEM_WITH_VEX
8787 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8788#else
8789 IEMOP_BITCH_ABOUT_STUB();
8790 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8791#endif
8792
8793 case 2: /* 0x0f 0x38 lead opcode bytes. */
8794#ifdef IEM_WITH_VEX
8795 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8796#else
8797 IEMOP_BITCH_ABOUT_STUB();
8798 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8799#endif
8800
8801 case 3: /* 0x0f 0x3a lead opcode bytes. */
8802#ifdef IEM_WITH_VEX
8803 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8804#else
8805 IEMOP_BITCH_ABOUT_STUB();
8806 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8807#endif
8808
8809 default:
8810 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8811 IEMOP_RAISE_INVALID_OPCODE_RET();
8812 }
8813 }
8814 Log(("VEX3: VEX support disabled!\n"));
8815 IEMOP_RAISE_INVALID_OPCODE_RET();
8816 }
8817
8818 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8819 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8820}
8821
8822
8823/**
8824 * @opcode 0xc5
8825 */
8826FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8827{
8828 /* The LES instruction is invalid 64-bit mode. In legacy and
8829 compatability mode it is invalid with MOD=3.
8830 The use as a VEX prefix is made possible by assigning the inverted
8831 REX.R to the top MOD bit, and the top bit in the inverted register
8832 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8833 to accessing registers 0..7 in this VEX form. */
8834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8835 if ( IEM_IS_64BIT_CODE(pVCpu)
8836 || IEM_IS_MODRM_REG_MODE(bRm))
8837 {
8838 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8839 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8840 {
8841 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8842 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8843 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8845 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8846 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8847 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8848 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8849 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8850 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8851
8852#ifdef IEM_WITH_VEX
8853 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8854#else
8855 IEMOP_BITCH_ABOUT_STUB();
8856 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8857#endif
8858 }
8859
8860 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8861 Log(("VEX2: VEX support disabled!\n"));
8862 IEMOP_RAISE_INVALID_OPCODE_RET();
8863 }
8864
8865 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8866 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8867}
8868
8869
8870/**
8871 * @opcode 0xc6
8872 */
8873FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8874{
8875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8876 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8877 IEMOP_RAISE_INVALID_OPCODE_RET();
8878 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8879
8880 if (IEM_IS_MODRM_REG_MODE(bRm))
8881 {
8882 /* register access */
8883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8884 IEM_MC_BEGIN(0, 0, 0, 0);
8885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8886 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8887 IEM_MC_ADVANCE_RIP_AND_FINISH();
8888 IEM_MC_END();
8889 }
8890 else
8891 {
8892 /* memory access. */
8893 IEM_MC_BEGIN(0, 1, 0, 0);
8894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8896 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8898 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8899 IEM_MC_ADVANCE_RIP_AND_FINISH();
8900 IEM_MC_END();
8901 }
8902}
8903
8904
8905/**
8906 * @opcode 0xc7
8907 */
8908FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8909{
8910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8911 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8912 IEMOP_RAISE_INVALID_OPCODE_RET();
8913 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8914
8915 if (IEM_IS_MODRM_REG_MODE(bRm))
8916 {
8917 /* register access */
8918 switch (pVCpu->iem.s.enmEffOpSize)
8919 {
8920 case IEMMODE_16BIT:
8921 IEM_MC_BEGIN(0, 0, 0, 0);
8922 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8924 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8925 IEM_MC_ADVANCE_RIP_AND_FINISH();
8926 IEM_MC_END();
8927 break;
8928
8929 case IEMMODE_32BIT:
8930 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8931 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8933 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8934 IEM_MC_ADVANCE_RIP_AND_FINISH();
8935 IEM_MC_END();
8936 break;
8937
8938 case IEMMODE_64BIT:
8939 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8940 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8942 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8943 IEM_MC_ADVANCE_RIP_AND_FINISH();
8944 IEM_MC_END();
8945 break;
8946
8947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8948 }
8949 }
8950 else
8951 {
8952 /* memory access. */
8953 switch (pVCpu->iem.s.enmEffOpSize)
8954 {
8955 case IEMMODE_16BIT:
8956 IEM_MC_BEGIN(0, 1, 0, 0);
8957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8959 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8961 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8962 IEM_MC_ADVANCE_RIP_AND_FINISH();
8963 IEM_MC_END();
8964 break;
8965
8966 case IEMMODE_32BIT:
8967 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8970 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8972 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8973 IEM_MC_ADVANCE_RIP_AND_FINISH();
8974 IEM_MC_END();
8975 break;
8976
8977 case IEMMODE_64BIT:
8978 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8981 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8983 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8984 IEM_MC_ADVANCE_RIP_AND_FINISH();
8985 IEM_MC_END();
8986 break;
8987
8988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8989 }
8990 }
8991}
8992
8993
8994
8995
8996/**
8997 * @opcode 0xc8
8998 */
8999FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9000{
9001 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9002 IEMOP_HLP_MIN_186();
9003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9004 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9005 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9007 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9008 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9009 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9010 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9011}
9012
9013
9014/**
9015 * @opcode 0xc9
9016 */
9017FNIEMOP_DEF(iemOp_leave)
9018{
9019 IEMOP_MNEMONIC(leave, "leave");
9020 IEMOP_HLP_MIN_186();
9021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9023 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9024 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9025 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9026 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9027}
9028
9029
9030/**
9031 * @opcode 0xca
9032 */
9033FNIEMOP_DEF(iemOp_retf_Iw)
9034{
9035 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9036 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9038 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9039 | IEM_CIMPL_F_MODE,
9040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9041 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9042 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9043 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9044 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9045 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9046 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9047 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9048 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9049 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9050 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9051 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9052 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9053 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9054 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9055 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9056 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9057 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9058}
9059
9060
9061/**
9062 * @opcode 0xcb
9063 */
9064FNIEMOP_DEF(iemOp_retf)
9065{
9066 IEMOP_MNEMONIC(retf, "retf");
9067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9068 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9069 | IEM_CIMPL_F_MODE,
9070 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9071 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9072 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9073 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9074 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9075 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9076 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9077 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9078 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9079 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9080 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9081 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9082 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9083 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9084 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9085 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9086 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9087 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9088}
9089
9090
9091/**
9092 * @opcode 0xcc
9093 */
9094FNIEMOP_DEF(iemOp_int3)
9095{
9096 IEMOP_MNEMONIC(int3, "int3");
9097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9098 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9099 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9100 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9101}
9102
9103
9104/**
9105 * @opcode 0xcd
9106 */
9107FNIEMOP_DEF(iemOp_int_Ib)
9108{
9109 IEMOP_MNEMONIC(int_Ib, "int Ib");
9110 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9112 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9113 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9114 iemCImpl_int, u8Int, IEMINT_INTN);
9115 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9116}
9117
9118
9119/**
9120 * @opcode 0xce
9121 */
9122FNIEMOP_DEF(iemOp_into)
9123{
9124 IEMOP_MNEMONIC(into, "into");
9125 IEMOP_HLP_NO_64BIT();
9126 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9127 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9128 UINT64_MAX,
9129 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9130 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9131}
9132
9133
9134/**
9135 * @opcode 0xcf
9136 */
9137FNIEMOP_DEF(iemOp_iret)
9138{
9139 IEMOP_MNEMONIC(iret, "iret");
9140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9141 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9142 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9143 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9144 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9145 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9146 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9147 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9148 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9149 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9150 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9151 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9152 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9153 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9154 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9155 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9156 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9157 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9158 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9159 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9160 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9161 /* Segment registers are sanitized when returning to an outer ring, or fully
9162 reloaded when returning to v86 mode. Thus the large flush list above. */
9163}
9164
9165
9166/**
9167 * @opcode 0xd0
9168 */
9169FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9170{
9171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9172
9173 /* Need to use a body macro here since the EFLAGS behaviour differs between
9174 the shifts, rotates and rotate w/ carry. Sigh. */
9175#define GRP2_BODY_Eb_1(a_pImplExpr) \
9176 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9177 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9178 { \
9179 /* register */ \
9180 IEM_MC_BEGIN(3, 0, 0, 0); \
9181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9182 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9183 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9184 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9185 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9186 IEM_MC_REF_EFLAGS(pEFlags); \
9187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9189 IEM_MC_END(); \
9190 } \
9191 else \
9192 { \
9193 /* memory */ \
9194 IEM_MC_BEGIN(3, 3, 0, 0); \
9195 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9196 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9197 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9199 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9200 \
9201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9203 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9204 IEM_MC_FETCH_EFLAGS(EFlags); \
9205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9206 \
9207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9208 IEM_MC_COMMIT_EFLAGS(EFlags); \
9209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9210 IEM_MC_END(); \
9211 } (void)0
9212
9213 switch (IEM_GET_MODRM_REG_8(bRm))
9214 {
9215 /**
9216 * @opdone
9217 * @opmaps grp2_d0
9218 * @opcode /0
9219 * @opflclass rotate_1
9220 */
9221 case 0:
9222 {
9223 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9224 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9225 break;
9226 }
9227 /**
9228 * @opdone
9229 * @opmaps grp2_d0
9230 * @opcode /1
9231 * @opflclass rotate_1
9232 */
9233 case 1:
9234 {
9235 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9236 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9237 break;
9238 }
9239 /**
9240 * @opdone
9241 * @opmaps grp2_d0
9242 * @opcode /2
9243 * @opflclass rotate_carry_1
9244 */
9245 case 2:
9246 {
9247 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9248 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9249 break;
9250 }
9251 /**
9252 * @opdone
9253 * @opmaps grp2_d0
9254 * @opcode /3
9255 * @opflclass rotate_carry_1
9256 */
9257 case 3:
9258 {
9259 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9260 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9261 break;
9262 }
9263 /**
9264 * @opdone
9265 * @opmaps grp2_d0
9266 * @opcode /4
9267 * @opflclass shift_1
9268 */
9269 case 4:
9270 {
9271 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9272 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9273 break;
9274 }
9275 /**
9276 * @opdone
9277 * @opmaps grp2_d0
9278 * @opcode /5
9279 * @opflclass shift_1
9280 */
9281 case 5:
9282 {
9283 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9284 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9285 break;
9286 }
9287 /**
9288 * @opdone
9289 * @opmaps grp2_d0
9290 * @opcode /7
9291 * @opflclass shift_1
9292 */
9293 case 7:
9294 {
9295 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9296 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9297 break;
9298 }
9299 /** @opdone */
9300 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9301 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9302 }
9303#undef GRP2_BODY_Eb_1
9304}
9305
9306
9307/* Need to use a body macro here since the EFLAGS behaviour differs between
9308 the shifts, rotates and rotate w/ carry. Sigh. */
9309#define GRP2_BODY_Ev_1(a_pImplExpr) \
9310 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9311 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9312 { \
9313 /* register */ \
9314 switch (pVCpu->iem.s.enmEffOpSize) \
9315 { \
9316 case IEMMODE_16BIT: \
9317 IEM_MC_BEGIN(3, 0, 0, 0); \
9318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9319 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9320 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9321 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9322 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9323 IEM_MC_REF_EFLAGS(pEFlags); \
9324 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9325 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9326 IEM_MC_END(); \
9327 break; \
9328 \
9329 case IEMMODE_32BIT: \
9330 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9332 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9333 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9334 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9335 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9336 IEM_MC_REF_EFLAGS(pEFlags); \
9337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9338 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9340 IEM_MC_END(); \
9341 break; \
9342 \
9343 case IEMMODE_64BIT: \
9344 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9346 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9347 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9348 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9349 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9350 IEM_MC_REF_EFLAGS(pEFlags); \
9351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9352 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9353 IEM_MC_END(); \
9354 break; \
9355 \
9356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9357 } \
9358 } \
9359 else \
9360 { \
9361 /* memory */ \
9362 switch (pVCpu->iem.s.enmEffOpSize) \
9363 { \
9364 case IEMMODE_16BIT: \
9365 IEM_MC_BEGIN(3, 3, 0, 0); \
9366 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9367 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9368 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9370 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9371 \
9372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9374 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9375 IEM_MC_FETCH_EFLAGS(EFlags); \
9376 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9377 \
9378 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9379 IEM_MC_COMMIT_EFLAGS(EFlags); \
9380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9381 IEM_MC_END(); \
9382 break; \
9383 \
9384 case IEMMODE_32BIT: \
9385 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9386 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9387 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9388 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9390 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9391 \
9392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9394 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9395 IEM_MC_FETCH_EFLAGS(EFlags); \
9396 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9397 \
9398 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9399 IEM_MC_COMMIT_EFLAGS(EFlags); \
9400 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9401 IEM_MC_END(); \
9402 break; \
9403 \
9404 case IEMMODE_64BIT: \
9405 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9406 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9407 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9408 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9410 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9411 \
9412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9414 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9415 IEM_MC_FETCH_EFLAGS(EFlags); \
9416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9417 \
9418 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9419 IEM_MC_COMMIT_EFLAGS(EFlags); \
9420 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9421 IEM_MC_END(); \
9422 break; \
9423 \
9424 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9425 } \
9426 } (void)0
9427
9428/**
9429 * @opmaps grp2_d1
9430 * @opcode /0
9431 * @opflclass rotate_1
9432 */
9433FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9434{
9435 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9436 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9437}
9438
9439
9440/**
9441 * @opmaps grp2_d1
9442 * @opcode /1
9443 * @opflclass rotate_1
9444 */
9445FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9446{
9447 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9448 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9449}
9450
9451
9452/**
9453 * @opmaps grp2_d1
9454 * @opcode /2
9455 * @opflclass rotate_carry_1
9456 */
9457FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9458{
9459 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9460 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9461}
9462
9463
9464/**
9465 * @opmaps grp2_d1
9466 * @opcode /3
9467 * @opflclass rotate_carry_1
9468 */
9469FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9470{
9471 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9472 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9473}
9474
9475
9476/**
9477 * @opmaps grp2_d1
9478 * @opcode /4
9479 * @opflclass shift_1
9480 */
9481FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9482{
9483 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9484 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9485}
9486
9487
9488/**
9489 * @opmaps grp2_d1
9490 * @opcode /5
9491 * @opflclass shift_1
9492 */
9493FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9494{
9495 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9496 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9497}
9498
9499
9500/**
9501 * @opmaps grp2_d1
9502 * @opcode /7
9503 * @opflclass shift_1
9504 */
9505FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9506{
9507 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9508 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9509}
9510
9511#undef GRP2_BODY_Ev_1
9512
9513/**
9514 * @opcode 0xd1
9515 */
9516FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9517{
9518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9519 switch (IEM_GET_MODRM_REG_8(bRm))
9520 {
9521 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9522 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9523 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9524 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9525 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9526 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9527 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9528 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9529 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9530 }
9531}
9532
9533
9534/**
9535 * @opcode 0xd2
9536 */
9537FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9538{
9539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9540
9541 /* Need to use a body macro here since the EFLAGS behaviour differs between
9542 the shifts, rotates and rotate w/ carry. Sigh. */
9543#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9544 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9545 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9546 { \
9547 /* register */ \
9548 IEM_MC_BEGIN(3, 0, 0, 0); \
9549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9550 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9551 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9552 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9553 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9554 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9555 IEM_MC_REF_EFLAGS(pEFlags); \
9556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9557 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9558 IEM_MC_END(); \
9559 } \
9560 else \
9561 { \
9562 /* memory */ \
9563 IEM_MC_BEGIN(3, 3, 0, 0); \
9564 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9565 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9566 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9568 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9569 \
9570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9572 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9573 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9574 IEM_MC_FETCH_EFLAGS(EFlags); \
9575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9576 \
9577 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9578 IEM_MC_COMMIT_EFLAGS(EFlags); \
9579 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9580 IEM_MC_END(); \
9581 } (void)0
9582
9583 switch (IEM_GET_MODRM_REG_8(bRm))
9584 {
9585 /**
9586 * @opdone
9587 * @opmaps grp2_d0
9588 * @opcode /0
9589 * @opflclass rotate_count
9590 */
9591 case 0:
9592 {
9593 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9594 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9595 break;
9596 }
9597 /**
9598 * @opdone
9599 * @opmaps grp2_d0
9600 * @opcode /1
9601 * @opflclass rotate_count
9602 */
9603 case 1:
9604 {
9605 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9606 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9607 break;
9608 }
9609 /**
9610 * @opdone
9611 * @opmaps grp2_d0
9612 * @opcode /2
9613 * @opflclass rotate_carry_count
9614 */
9615 case 2:
9616 {
9617 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9618 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9619 break;
9620 }
9621 /**
9622 * @opdone
9623 * @opmaps grp2_d0
9624 * @opcode /3
9625 * @opflclass rotate_carry_count
9626 */
9627 case 3:
9628 {
9629 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9630 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9631 break;
9632 }
9633 /**
9634 * @opdone
9635 * @opmaps grp2_d0
9636 * @opcode /4
9637 * @opflclass shift_count
9638 */
9639 case 4:
9640 {
9641 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9642 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9643 break;
9644 }
9645 /**
9646 * @opdone
9647 * @opmaps grp2_d0
9648 * @opcode /5
9649 * @opflclass shift_count
9650 */
9651 case 5:
9652 {
9653 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9654 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9655 break;
9656 }
9657 /**
9658 * @opdone
9659 * @opmaps grp2_d0
9660 * @opcode /7
9661 * @opflclass shift_count
9662 */
9663 case 7:
9664 {
9665 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9666 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9667 break;
9668 }
9669 /** @opdone */
9670 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9671 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9672 }
9673#undef GRP2_BODY_Eb_CL
9674}
9675
9676
9677/* Need to use a body macro here since the EFLAGS behaviour differs between
9678 the shifts, rotates and rotate w/ carry. Sigh. */
9679#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9680 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9681 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9682 { \
9683 /* register */ \
9684 switch (pVCpu->iem.s.enmEffOpSize) \
9685 { \
9686 case IEMMODE_16BIT: \
9687 IEM_MC_BEGIN(3, 0, 0, 0); \
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9689 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9690 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9691 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9692 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9693 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9694 IEM_MC_REF_EFLAGS(pEFlags); \
9695 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9696 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9697 IEM_MC_END(); \
9698 break; \
9699 \
9700 case IEMMODE_32BIT: \
9701 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9703 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9704 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9705 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9706 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9707 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9708 IEM_MC_REF_EFLAGS(pEFlags); \
9709 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9710 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9711 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9712 IEM_MC_END(); \
9713 break; \
9714 \
9715 case IEMMODE_64BIT: \
9716 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9718 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9719 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9720 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9721 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9722 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9723 IEM_MC_REF_EFLAGS(pEFlags); \
9724 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9725 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9726 IEM_MC_END(); \
9727 break; \
9728 \
9729 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9730 } \
9731 } \
9732 else \
9733 { \
9734 /* memory */ \
9735 switch (pVCpu->iem.s.enmEffOpSize) \
9736 { \
9737 case IEMMODE_16BIT: \
9738 IEM_MC_BEGIN(3, 3, 0, 0); \
9739 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9740 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9741 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9743 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9744 \
9745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9747 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9748 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9749 IEM_MC_FETCH_EFLAGS(EFlags); \
9750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9751 \
9752 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9753 IEM_MC_COMMIT_EFLAGS(EFlags); \
9754 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9755 IEM_MC_END(); \
9756 break; \
9757 \
9758 case IEMMODE_32BIT: \
9759 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9760 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9761 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9762 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9764 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9765 \
9766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9768 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9769 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9770 IEM_MC_FETCH_EFLAGS(EFlags); \
9771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9772 \
9773 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9774 IEM_MC_COMMIT_EFLAGS(EFlags); \
9775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9776 IEM_MC_END(); \
9777 break; \
9778 \
9779 case IEMMODE_64BIT: \
9780 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9781 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9782 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9783 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9785 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9786 \
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9789 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9790 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9791 IEM_MC_FETCH_EFLAGS(EFlags); \
9792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9793 \
9794 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9795 IEM_MC_COMMIT_EFLAGS(EFlags); \
9796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9797 IEM_MC_END(); \
9798 break; \
9799 \
9800 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9801 } \
9802 } (void)0
9803
9804
9805/**
9806 * @opmaps grp2_d0
9807 * @opcode /0
9808 * @opflclass rotate_count
9809 */
9810FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9811{
9812 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9813 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9814}
9815
9816
9817/**
9818 * @opmaps grp2_d0
9819 * @opcode /1
9820 * @opflclass rotate_count
9821 */
9822FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9823{
9824 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9825 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9826}
9827
9828
9829/**
9830 * @opmaps grp2_d0
9831 * @opcode /2
9832 * @opflclass rotate_carry_count
9833 */
9834FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9837 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9838}
9839
9840
9841/**
9842 * @opmaps grp2_d0
9843 * @opcode /3
9844 * @opflclass rotate_carry_count
9845 */
9846FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9847{
9848 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9849 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9850}
9851
9852
9853/**
9854 * @opmaps grp2_d0
9855 * @opcode /4
9856 * @opflclass shift_count
9857 */
9858FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9859{
9860 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9861 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9862}
9863
9864
9865/**
9866 * @opmaps grp2_d0
9867 * @opcode /5
9868 * @opflclass shift_count
9869 */
9870FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9871{
9872 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9873 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9874}
9875
9876
9877/**
9878 * @opmaps grp2_d0
9879 * @opcode /7
9880 * @opflclass shift_count
9881 */
9882FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9883{
9884 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9885 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9886}
9887
9888#undef GRP2_BODY_Ev_CL
9889
9890/**
9891 * @opcode 0xd3
9892 */
9893FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9894{
9895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9896 switch (IEM_GET_MODRM_REG_8(bRm))
9897 {
9898 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9899 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9900 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9901 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9902 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9903 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9904 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9905 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9906 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9907 }
9908}
9909
9910
9911/**
9912 * @opcode 0xd4
9913 * @opflmodify cf,pf,af,zf,sf,of
9914 * @opflundef cf,af,of
9915 */
9916FNIEMOP_DEF(iemOp_aam_Ib)
9917{
9918/** @todo testcase: aam */
9919 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9920 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9922 IEMOP_HLP_NO_64BIT();
9923 if (!bImm)
9924 IEMOP_RAISE_DIVIDE_ERROR_RET();
9925 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9926}
9927
9928
9929/**
9930 * @opcode 0xd5
9931 * @opflmodify cf,pf,af,zf,sf,of
9932 * @opflundef cf,af,of
9933 */
9934FNIEMOP_DEF(iemOp_aad_Ib)
9935{
9936/** @todo testcase: aad? */
9937 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9938 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9940 IEMOP_HLP_NO_64BIT();
9941 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9942}
9943
9944
9945/**
9946 * @opcode 0xd6
9947 */
9948FNIEMOP_DEF(iemOp_salc)
9949{
9950 IEMOP_MNEMONIC(salc, "salc");
9951 IEMOP_HLP_NO_64BIT();
9952
9953 IEM_MC_BEGIN(0, 0, 0, 0);
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9956 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9957 } IEM_MC_ELSE() {
9958 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9959 } IEM_MC_ENDIF();
9960 IEM_MC_ADVANCE_RIP_AND_FINISH();
9961 IEM_MC_END();
9962}
9963
9964
9965/**
9966 * @opcode 0xd7
9967 */
9968FNIEMOP_DEF(iemOp_xlat)
9969{
9970 IEMOP_MNEMONIC(xlat, "xlat");
9971 switch (pVCpu->iem.s.enmEffAddrMode)
9972 {
9973 case IEMMODE_16BIT:
9974 IEM_MC_BEGIN(2, 0, 0, 0);
9975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9976 IEM_MC_LOCAL(uint8_t, u8Tmp);
9977 IEM_MC_LOCAL(uint16_t, u16Addr);
9978 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9979 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9980 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9981 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9982 IEM_MC_ADVANCE_RIP_AND_FINISH();
9983 IEM_MC_END();
9984 break;
9985
9986 case IEMMODE_32BIT:
9987 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9989 IEM_MC_LOCAL(uint8_t, u8Tmp);
9990 IEM_MC_LOCAL(uint32_t, u32Addr);
9991 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9992 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9993 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9994 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9995 IEM_MC_ADVANCE_RIP_AND_FINISH();
9996 IEM_MC_END();
9997 break;
9998
9999 case IEMMODE_64BIT:
10000 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
10001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10002 IEM_MC_LOCAL(uint8_t, u8Tmp);
10003 IEM_MC_LOCAL(uint64_t, u64Addr);
10004 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10005 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10006 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10007 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10008 IEM_MC_ADVANCE_RIP_AND_FINISH();
10009 IEM_MC_END();
10010 break;
10011
10012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10013 }
10014}
10015
10016
10017/**
10018 * Common worker for FPU instructions working on ST0 and STn, and storing the
10019 * result in ST0.
10020 *
10021 * @param bRm Mod R/M byte.
10022 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10023 */
10024FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10025{
10026 IEM_MC_BEGIN(3, 1, 0, 0);
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10029 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10030 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10031 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10032
10033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10035 IEM_MC_PREPARE_FPU_USAGE();
10036 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10037 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10038 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10039 } IEM_MC_ELSE() {
10040 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10041 } IEM_MC_ENDIF();
10042 IEM_MC_ADVANCE_RIP_AND_FINISH();
10043
10044 IEM_MC_END();
10045}
10046
10047
10048/**
10049 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10050 * flags.
10051 *
10052 * @param bRm Mod R/M byte.
10053 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10054 */
10055FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10056{
10057 IEM_MC_BEGIN(3, 1, 0, 0);
10058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10059 IEM_MC_LOCAL(uint16_t, u16Fsw);
10060 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10061 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10062 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10063
10064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10066 IEM_MC_PREPARE_FPU_USAGE();
10067 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10068 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10069 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10070 } IEM_MC_ELSE() {
10071 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10072 } IEM_MC_ENDIF();
10073 IEM_MC_ADVANCE_RIP_AND_FINISH();
10074
10075 IEM_MC_END();
10076}
10077
10078
10079/**
10080 * Common worker for FPU instructions working on ST0 and STn, only affecting
10081 * flags, and popping when done.
10082 *
10083 * @param bRm Mod R/M byte.
10084 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10085 */
10086FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10087{
10088 IEM_MC_BEGIN(3, 1, 0, 0);
10089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10090 IEM_MC_LOCAL(uint16_t, u16Fsw);
10091 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10094
10095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10097 IEM_MC_PREPARE_FPU_USAGE();
10098 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10099 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10100 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10101 } IEM_MC_ELSE() {
10102 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10103 } IEM_MC_ENDIF();
10104 IEM_MC_ADVANCE_RIP_AND_FINISH();
10105
10106 IEM_MC_END();
10107}
10108
10109
10110/** Opcode 0xd8 11/0. */
10111FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10112{
10113 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10114 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10115}
10116
10117
10118/** Opcode 0xd8 11/1. */
10119FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10120{
10121 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10122 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10123}
10124
10125
10126/** Opcode 0xd8 11/2. */
10127FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10128{
10129 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10130 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10131}
10132
10133
10134/** Opcode 0xd8 11/3. */
10135FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10136{
10137 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10138 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10139}
10140
10141
10142/** Opcode 0xd8 11/4. */
10143FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10144{
10145 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10146 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10147}
10148
10149
10150/** Opcode 0xd8 11/5. */
10151FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10152{
10153 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10154 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10155}
10156
10157
10158/** Opcode 0xd8 11/6. */
10159FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10160{
10161 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10162 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10163}
10164
10165
10166/** Opcode 0xd8 11/7. */
10167FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10168{
10169 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10171}
10172
10173
10174/**
10175 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10176 * the result in ST0.
10177 *
10178 * @param bRm Mod R/M byte.
10179 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10180 */
10181FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10182{
10183 IEM_MC_BEGIN(3, 3, 0, 0);
10184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10185 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10186 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10187 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10189 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10190
10191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10193
10194 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10195 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10196 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10197
10198 IEM_MC_PREPARE_FPU_USAGE();
10199 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10200 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10201 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10202 } IEM_MC_ELSE() {
10203 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10204 } IEM_MC_ENDIF();
10205 IEM_MC_ADVANCE_RIP_AND_FINISH();
10206
10207 IEM_MC_END();
10208}
10209
10210
10211/** Opcode 0xd8 !11/0. */
10212FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10213{
10214 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10215 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10216}
10217
10218
10219/** Opcode 0xd8 !11/1. */
10220FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10221{
10222 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10223 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10224}
10225
10226
10227/** Opcode 0xd8 !11/2. */
10228FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10229{
10230 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10231
10232 IEM_MC_BEGIN(3, 3, 0, 0);
10233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10234 IEM_MC_LOCAL(uint16_t, u16Fsw);
10235 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10236 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10237 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10238 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10239
10240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10242
10243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10245 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10246
10247 IEM_MC_PREPARE_FPU_USAGE();
10248 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10249 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10250 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10251 } IEM_MC_ELSE() {
10252 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10253 } IEM_MC_ENDIF();
10254 IEM_MC_ADVANCE_RIP_AND_FINISH();
10255
10256 IEM_MC_END();
10257}
10258
10259
10260/** Opcode 0xd8 !11/3. */
10261FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10262{
10263 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10264
10265 IEM_MC_BEGIN(3, 3, 0, 0);
10266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10267 IEM_MC_LOCAL(uint16_t, u16Fsw);
10268 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10269 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10270 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10271 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10272
10273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10275
10276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10278 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10279
10280 IEM_MC_PREPARE_FPU_USAGE();
10281 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10282 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10283 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10284 } IEM_MC_ELSE() {
10285 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10286 } IEM_MC_ENDIF();
10287 IEM_MC_ADVANCE_RIP_AND_FINISH();
10288
10289 IEM_MC_END();
10290}
10291
10292
10293/** Opcode 0xd8 !11/4. */
10294FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10295{
10296 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10297 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10298}
10299
10300
10301/** Opcode 0xd8 !11/5. */
10302FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10303{
10304 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10305 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10306}
10307
10308
10309/** Opcode 0xd8 !11/6. */
10310FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10311{
10312 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10313 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10314}
10315
10316
10317/** Opcode 0xd8 !11/7. */
10318FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10319{
10320 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10321 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10322}
10323
10324
10325/**
10326 * @opcode 0xd8
10327 */
10328FNIEMOP_DEF(iemOp_EscF0)
10329{
10330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10331 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10332
10333 if (IEM_IS_MODRM_REG_MODE(bRm))
10334 {
10335 switch (IEM_GET_MODRM_REG_8(bRm))
10336 {
10337 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10338 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10339 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10340 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10341 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10342 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10343 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10344 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10346 }
10347 }
10348 else
10349 {
10350 switch (IEM_GET_MODRM_REG_8(bRm))
10351 {
10352 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10353 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10354 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10355 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10356 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10357 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10358 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10359 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10361 }
10362 }
10363}
10364
10365
10366/** Opcode 0xd9 /0 mem32real
10367 * @sa iemOp_fld_m64r */
10368FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10369{
10370 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10371
10372 IEM_MC_BEGIN(2, 3, 0, 0);
10373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10374 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10375 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10376 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10377 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10378
10379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10381
10382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10384 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10385 IEM_MC_PREPARE_FPU_USAGE();
10386 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10387 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10388 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10389 } IEM_MC_ELSE() {
10390 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10391 } IEM_MC_ENDIF();
10392 IEM_MC_ADVANCE_RIP_AND_FINISH();
10393
10394 IEM_MC_END();
10395}
10396
10397
10398/** Opcode 0xd9 !11/2 mem32real */
10399FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10400{
10401 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10402 IEM_MC_BEGIN(3, 3, 0, 0);
10403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10405
10406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10409 IEM_MC_PREPARE_FPU_USAGE();
10410
10411 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10412 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10413 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10414
10415 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10416 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10417 IEM_MC_LOCAL(uint16_t, u16Fsw);
10418 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10419 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10420 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10421 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10422 } IEM_MC_ELSE() {
10423 IEM_MC_IF_FCW_IM() {
10424 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10425 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10426 } IEM_MC_ELSE() {
10427 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10428 } IEM_MC_ENDIF();
10429 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10430 } IEM_MC_ENDIF();
10431 IEM_MC_ADVANCE_RIP_AND_FINISH();
10432
10433 IEM_MC_END();
10434}
10435
10436
10437/** Opcode 0xd9 !11/3 */
10438FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10439{
10440 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10441 IEM_MC_BEGIN(3, 3, 0, 0);
10442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10444
10445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10446 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10447 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10448 IEM_MC_PREPARE_FPU_USAGE();
10449
10450 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10451 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10452 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10453
10454 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10456 IEM_MC_LOCAL(uint16_t, u16Fsw);
10457 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10458 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10459 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10460 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10461 } IEM_MC_ELSE() {
10462 IEM_MC_IF_FCW_IM() {
10463 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10464 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10465 } IEM_MC_ELSE() {
10466 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10467 } IEM_MC_ENDIF();
10468 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10469 } IEM_MC_ENDIF();
10470 IEM_MC_ADVANCE_RIP_AND_FINISH();
10471
10472 IEM_MC_END();
10473}
10474
10475
10476/** Opcode 0xd9 !11/4 */
10477FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10478{
10479 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10480 IEM_MC_BEGIN(3, 0, 0, 0);
10481 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10483
10484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10486 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10487
10488 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10489 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10490 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10491 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10492 IEM_MC_END();
10493}
10494
10495
10496/** Opcode 0xd9 !11/5 */
10497FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10498{
10499 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10500 IEM_MC_BEGIN(1, 1, 0, 0);
10501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10503
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10506 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10507
10508 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10509 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10510
10511 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10512 iemCImpl_fldcw, u16Fsw);
10513 IEM_MC_END();
10514}
10515
10516
10517/** Opcode 0xd9 !11/6 */
10518FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10519{
10520 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10521 IEM_MC_BEGIN(3, 0, 0, 0);
10522 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10524
10525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10527 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10528
10529 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10530 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10531 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10532 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10533 IEM_MC_END();
10534}
10535
10536
10537/** Opcode 0xd9 !11/7 */
10538FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10539{
10540 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10541 IEM_MC_BEGIN(2, 0, 0, 0);
10542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10543 IEM_MC_LOCAL(uint16_t, u16Fcw);
10544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10548 IEM_MC_FETCH_FCW(u16Fcw);
10549 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10550 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10551 IEM_MC_END();
10552}
10553
10554
10555/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10556FNIEMOP_DEF(iemOp_fnop)
10557{
10558 IEMOP_MNEMONIC(fnop, "fnop");
10559 IEM_MC_BEGIN(0, 0, 0, 0);
10560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10562 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10563 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10564 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10565 * intel optimizations. Investigate. */
10566 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10567 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10568 IEM_MC_END();
10569}
10570
10571
10572/** Opcode 0xd9 11/0 stN */
10573FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10574{
10575 IEMOP_MNEMONIC(fld_stN, "fld stN");
10576 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10577 * indicates that it does. */
10578 IEM_MC_BEGIN(0, 2, 0, 0);
10579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10580 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10581 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10583 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10584
10585 IEM_MC_PREPARE_FPU_USAGE();
10586 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10587 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10588 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10589 } IEM_MC_ELSE() {
10590 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10591 } IEM_MC_ENDIF();
10592
10593 IEM_MC_ADVANCE_RIP_AND_FINISH();
10594 IEM_MC_END();
10595}
10596
10597
10598/** Opcode 0xd9 11/3 stN */
10599FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10600{
10601 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10602 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10603 * indicates that it does. */
10604 IEM_MC_BEGIN(2, 3, 0, 0);
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10607 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10608 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10609 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10610 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10612 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10613
10614 IEM_MC_PREPARE_FPU_USAGE();
10615 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10616 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10617 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10618 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10619 } IEM_MC_ELSE() {
10620 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10621 } IEM_MC_ENDIF();
10622
10623 IEM_MC_ADVANCE_RIP_AND_FINISH();
10624 IEM_MC_END();
10625}
10626
10627
10628/** Opcode 0xd9 11/4, 0xdd 11/2. */
10629FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10630{
10631 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10632
10633 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10634 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10635 if (!iDstReg)
10636 {
10637 IEM_MC_BEGIN(0, 1, 0, 0);
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10642
10643 IEM_MC_PREPARE_FPU_USAGE();
10644 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10645 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10646 } IEM_MC_ELSE() {
10647 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10648 } IEM_MC_ENDIF();
10649
10650 IEM_MC_ADVANCE_RIP_AND_FINISH();
10651 IEM_MC_END();
10652 }
10653 else
10654 {
10655 IEM_MC_BEGIN(0, 2, 0, 0);
10656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10657 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10658 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10661
10662 IEM_MC_PREPARE_FPU_USAGE();
10663 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10664 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10665 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10666 } IEM_MC_ELSE() {
10667 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10668 } IEM_MC_ENDIF();
10669
10670 IEM_MC_ADVANCE_RIP_AND_FINISH();
10671 IEM_MC_END();
10672 }
10673}
10674
10675
10676/**
10677 * Common worker for FPU instructions working on ST0 and replaces it with the
10678 * result, i.e. unary operators.
10679 *
10680 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10681 */
10682FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10683{
10684 IEM_MC_BEGIN(2, 1, 0, 0);
10685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10686 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10687 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10688 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10689
10690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10692 IEM_MC_PREPARE_FPU_USAGE();
10693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10694 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10695 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10696 } IEM_MC_ELSE() {
10697 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10698 } IEM_MC_ENDIF();
10699 IEM_MC_ADVANCE_RIP_AND_FINISH();
10700
10701 IEM_MC_END();
10702}
10703
10704
10705/** Opcode 0xd9 0xe0. */
10706FNIEMOP_DEF(iemOp_fchs)
10707{
10708 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10709 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10710}
10711
10712
10713/** Opcode 0xd9 0xe1. */
10714FNIEMOP_DEF(iemOp_fabs)
10715{
10716 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10717 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10718}
10719
10720
10721/** Opcode 0xd9 0xe4. */
10722FNIEMOP_DEF(iemOp_ftst)
10723{
10724 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10725 IEM_MC_BEGIN(2, 1, 0, 0);
10726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10727 IEM_MC_LOCAL(uint16_t, u16Fsw);
10728 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10729 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10730
10731 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10732 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10733 IEM_MC_PREPARE_FPU_USAGE();
10734 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10735 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10736 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10737 } IEM_MC_ELSE() {
10738 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10739 } IEM_MC_ENDIF();
10740 IEM_MC_ADVANCE_RIP_AND_FINISH();
10741
10742 IEM_MC_END();
10743}
10744
10745
10746/** Opcode 0xd9 0xe5. */
10747FNIEMOP_DEF(iemOp_fxam)
10748{
10749 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10750 IEM_MC_BEGIN(2, 1, 0, 0);
10751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10752 IEM_MC_LOCAL(uint16_t, u16Fsw);
10753 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10754 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10755
10756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10758 IEM_MC_PREPARE_FPU_USAGE();
10759 IEM_MC_REF_FPUREG(pr80Value, 0);
10760 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10761 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10762 IEM_MC_ADVANCE_RIP_AND_FINISH();
10763
10764 IEM_MC_END();
10765}
10766
10767
10768/**
10769 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10770 *
10771 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10772 */
10773FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10774{
10775 IEM_MC_BEGIN(1, 1, 0, 0);
10776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10777 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10778 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10779
10780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10782 IEM_MC_PREPARE_FPU_USAGE();
10783 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10784 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10785 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10786 } IEM_MC_ELSE() {
10787 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10788 } IEM_MC_ENDIF();
10789 IEM_MC_ADVANCE_RIP_AND_FINISH();
10790
10791 IEM_MC_END();
10792}
10793
10794
10795/** Opcode 0xd9 0xe8. */
10796FNIEMOP_DEF(iemOp_fld1)
10797{
10798 IEMOP_MNEMONIC(fld1, "fld1");
10799 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10800}
10801
10802
10803/** Opcode 0xd9 0xe9. */
10804FNIEMOP_DEF(iemOp_fldl2t)
10805{
10806 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10807 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10808}
10809
10810
10811/** Opcode 0xd9 0xea. */
10812FNIEMOP_DEF(iemOp_fldl2e)
10813{
10814 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10815 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10816}
10817
10818/** Opcode 0xd9 0xeb. */
10819FNIEMOP_DEF(iemOp_fldpi)
10820{
10821 IEMOP_MNEMONIC(fldpi, "fldpi");
10822 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10823}
10824
10825
10826/** Opcode 0xd9 0xec. */
10827FNIEMOP_DEF(iemOp_fldlg2)
10828{
10829 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10830 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10831}
10832
10833/** Opcode 0xd9 0xed. */
10834FNIEMOP_DEF(iemOp_fldln2)
10835{
10836 IEMOP_MNEMONIC(fldln2, "fldln2");
10837 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10838}
10839
10840
10841/** Opcode 0xd9 0xee. */
10842FNIEMOP_DEF(iemOp_fldz)
10843{
10844 IEMOP_MNEMONIC(fldz, "fldz");
10845 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10846}
10847
10848
10849/** Opcode 0xd9 0xf0.
10850 *
10851 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10852 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10853 * to produce proper results for +Inf and -Inf.
10854 *
10855 * This is probably usful in the implementation pow() and similar.
10856 */
10857FNIEMOP_DEF(iemOp_f2xm1)
10858{
10859 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10860 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10861}
10862
10863
10864/**
10865 * Common worker for FPU instructions working on STn and ST0, storing the result
10866 * in STn, and popping the stack unless IE, DE or ZE was raised.
10867 *
10868 * @param bRm Mod R/M byte.
10869 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10870 */
10871FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10872{
10873 IEM_MC_BEGIN(3, 1, 0, 0);
10874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10875 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10876 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10877 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10878 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10879
10880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10882
10883 IEM_MC_PREPARE_FPU_USAGE();
10884 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10885 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10886 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10887 } IEM_MC_ELSE() {
10888 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10889 } IEM_MC_ENDIF();
10890 IEM_MC_ADVANCE_RIP_AND_FINISH();
10891
10892 IEM_MC_END();
10893}
10894
10895
10896/** Opcode 0xd9 0xf1. */
10897FNIEMOP_DEF(iemOp_fyl2x)
10898{
10899 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10900 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10901}
10902
10903
10904/**
10905 * Common worker for FPU instructions working on ST0 and having two outputs, one
10906 * replacing ST0 and one pushed onto the stack.
10907 *
10908 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10909 */
10910FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10911{
10912 IEM_MC_BEGIN(2, 1, 0, 0);
10913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10914 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10915 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10916 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10917
10918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10920 IEM_MC_PREPARE_FPU_USAGE();
10921 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10922 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10923 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10924 } IEM_MC_ELSE() {
10925 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10926 } IEM_MC_ENDIF();
10927 IEM_MC_ADVANCE_RIP_AND_FINISH();
10928
10929 IEM_MC_END();
10930}
10931
10932
10933/** Opcode 0xd9 0xf2. */
10934FNIEMOP_DEF(iemOp_fptan)
10935{
10936 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10937 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10938}
10939
10940
10941/** Opcode 0xd9 0xf3. */
10942FNIEMOP_DEF(iemOp_fpatan)
10943{
10944 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10945 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10946}
10947
10948
10949/** Opcode 0xd9 0xf4. */
10950FNIEMOP_DEF(iemOp_fxtract)
10951{
10952 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10953 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10954}
10955
10956
10957/** Opcode 0xd9 0xf5. */
10958FNIEMOP_DEF(iemOp_fprem1)
10959{
10960 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10962}
10963
10964
10965/** Opcode 0xd9 0xf6. */
10966FNIEMOP_DEF(iemOp_fdecstp)
10967{
10968 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10969 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10970 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10971 * FINCSTP and FDECSTP. */
10972 IEM_MC_BEGIN(0, 0, 0, 0);
10973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10974
10975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10977
10978 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10979 IEM_MC_FPU_STACK_DEC_TOP();
10980 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10981
10982 IEM_MC_ADVANCE_RIP_AND_FINISH();
10983 IEM_MC_END();
10984}
10985
10986
10987/** Opcode 0xd9 0xf7. */
10988FNIEMOP_DEF(iemOp_fincstp)
10989{
10990 IEMOP_MNEMONIC(fincstp, "fincstp");
10991 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10992 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10993 * FINCSTP and FDECSTP. */
10994 IEM_MC_BEGIN(0, 0, 0, 0);
10995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10996
10997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10999
11000 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11001 IEM_MC_FPU_STACK_INC_TOP();
11002 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11003
11004 IEM_MC_ADVANCE_RIP_AND_FINISH();
11005 IEM_MC_END();
11006}
11007
11008
11009/** Opcode 0xd9 0xf8. */
11010FNIEMOP_DEF(iemOp_fprem)
11011{
11012 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11014}
11015
11016
11017/** Opcode 0xd9 0xf9. */
11018FNIEMOP_DEF(iemOp_fyl2xp1)
11019{
11020 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11021 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11022}
11023
11024
11025/** Opcode 0xd9 0xfa. */
11026FNIEMOP_DEF(iemOp_fsqrt)
11027{
11028 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11029 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11030}
11031
11032
11033/** Opcode 0xd9 0xfb. */
11034FNIEMOP_DEF(iemOp_fsincos)
11035{
11036 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11037 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11038}
11039
11040
11041/** Opcode 0xd9 0xfc. */
11042FNIEMOP_DEF(iemOp_frndint)
11043{
11044 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11045 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11046}
11047
11048
11049/** Opcode 0xd9 0xfd. */
11050FNIEMOP_DEF(iemOp_fscale)
11051{
11052 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11053 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11054}
11055
11056
11057/** Opcode 0xd9 0xfe. */
11058FNIEMOP_DEF(iemOp_fsin)
11059{
11060 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11061 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11062}
11063
11064
11065/** Opcode 0xd9 0xff. */
11066FNIEMOP_DEF(iemOp_fcos)
11067{
11068 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11069 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11070}
11071
11072
11073/** Used by iemOp_EscF1. */
11074IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11075{
11076 /* 0xe0 */ iemOp_fchs,
11077 /* 0xe1 */ iemOp_fabs,
11078 /* 0xe2 */ iemOp_Invalid,
11079 /* 0xe3 */ iemOp_Invalid,
11080 /* 0xe4 */ iemOp_ftst,
11081 /* 0xe5 */ iemOp_fxam,
11082 /* 0xe6 */ iemOp_Invalid,
11083 /* 0xe7 */ iemOp_Invalid,
11084 /* 0xe8 */ iemOp_fld1,
11085 /* 0xe9 */ iemOp_fldl2t,
11086 /* 0xea */ iemOp_fldl2e,
11087 /* 0xeb */ iemOp_fldpi,
11088 /* 0xec */ iemOp_fldlg2,
11089 /* 0xed */ iemOp_fldln2,
11090 /* 0xee */ iemOp_fldz,
11091 /* 0xef */ iemOp_Invalid,
11092 /* 0xf0 */ iemOp_f2xm1,
11093 /* 0xf1 */ iemOp_fyl2x,
11094 /* 0xf2 */ iemOp_fptan,
11095 /* 0xf3 */ iemOp_fpatan,
11096 /* 0xf4 */ iemOp_fxtract,
11097 /* 0xf5 */ iemOp_fprem1,
11098 /* 0xf6 */ iemOp_fdecstp,
11099 /* 0xf7 */ iemOp_fincstp,
11100 /* 0xf8 */ iemOp_fprem,
11101 /* 0xf9 */ iemOp_fyl2xp1,
11102 /* 0xfa */ iemOp_fsqrt,
11103 /* 0xfb */ iemOp_fsincos,
11104 /* 0xfc */ iemOp_frndint,
11105 /* 0xfd */ iemOp_fscale,
11106 /* 0xfe */ iemOp_fsin,
11107 /* 0xff */ iemOp_fcos
11108};
11109
11110
11111/**
11112 * @opcode 0xd9
11113 */
11114FNIEMOP_DEF(iemOp_EscF1)
11115{
11116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11117 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11118
11119 if (IEM_IS_MODRM_REG_MODE(bRm))
11120 {
11121 switch (IEM_GET_MODRM_REG_8(bRm))
11122 {
11123 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11124 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11125 case 2:
11126 if (bRm == 0xd0)
11127 return FNIEMOP_CALL(iemOp_fnop);
11128 IEMOP_RAISE_INVALID_OPCODE_RET();
11129 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11130 case 4:
11131 case 5:
11132 case 6:
11133 case 7:
11134 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11135 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11137 }
11138 }
11139 else
11140 {
11141 switch (IEM_GET_MODRM_REG_8(bRm))
11142 {
11143 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11144 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11145 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11146 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11147 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11148 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11149 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11150 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11152 }
11153 }
11154}
11155
11156
11157/** Opcode 0xda 11/0. */
11158FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11159{
11160 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11161 IEM_MC_BEGIN(0, 1, 0, 0);
11162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11163 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11164
11165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11166 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11167
11168 IEM_MC_PREPARE_FPU_USAGE();
11169 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11171 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11172 } IEM_MC_ENDIF();
11173 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11174 } IEM_MC_ELSE() {
11175 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11176 } IEM_MC_ENDIF();
11177 IEM_MC_ADVANCE_RIP_AND_FINISH();
11178
11179 IEM_MC_END();
11180}
11181
11182
11183/** Opcode 0xda 11/1. */
11184FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11185{
11186 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11187 IEM_MC_BEGIN(0, 1, 0, 0);
11188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11189 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11190
11191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11193
11194 IEM_MC_PREPARE_FPU_USAGE();
11195 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11196 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11197 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11200 } IEM_MC_ELSE() {
11201 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11202 } IEM_MC_ENDIF();
11203 IEM_MC_ADVANCE_RIP_AND_FINISH();
11204
11205 IEM_MC_END();
11206}
11207
11208
11209/** Opcode 0xda 11/2. */
11210FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11211{
11212 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11213 IEM_MC_BEGIN(0, 1, 0, 0);
11214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11215 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11216
11217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11219
11220 IEM_MC_PREPARE_FPU_USAGE();
11221 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11222 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11223 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11224 } IEM_MC_ENDIF();
11225 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11226 } IEM_MC_ELSE() {
11227 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11228 } IEM_MC_ENDIF();
11229 IEM_MC_ADVANCE_RIP_AND_FINISH();
11230
11231 IEM_MC_END();
11232}
11233
11234
11235/** Opcode 0xda 11/3. */
11236FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11237{
11238 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11239 IEM_MC_BEGIN(0, 1, 0, 0);
11240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11241 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11242
11243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11245
11246 IEM_MC_PREPARE_FPU_USAGE();
11247 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11248 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11249 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11250 } IEM_MC_ENDIF();
11251 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11252 } IEM_MC_ELSE() {
11253 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11254 } IEM_MC_ENDIF();
11255 IEM_MC_ADVANCE_RIP_AND_FINISH();
11256
11257 IEM_MC_END();
11258}
11259
11260
11261/**
11262 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11263 * flags, and popping twice when done.
11264 *
11265 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11266 */
11267FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11268{
11269 IEM_MC_BEGIN(3, 1, 0, 0);
11270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11271 IEM_MC_LOCAL(uint16_t, u16Fsw);
11272 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11273 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11275
11276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11278
11279 IEM_MC_PREPARE_FPU_USAGE();
11280 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11281 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11282 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11283 } IEM_MC_ELSE() {
11284 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11285 } IEM_MC_ENDIF();
11286 IEM_MC_ADVANCE_RIP_AND_FINISH();
11287
11288 IEM_MC_END();
11289}
11290
11291
11292/** Opcode 0xda 0xe9. */
11293FNIEMOP_DEF(iemOp_fucompp)
11294{
11295 IEMOP_MNEMONIC(fucompp, "fucompp");
11296 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11297}
11298
11299
11300/**
11301 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11302 * the result in ST0.
11303 *
11304 * @param bRm Mod R/M byte.
11305 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11306 */
11307FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11308{
11309 IEM_MC_BEGIN(3, 3, 0, 0);
11310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11311 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11312 IEM_MC_LOCAL(int32_t, i32Val2);
11313 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11314 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11315 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11316
11317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11319
11320 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11321 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11322 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11323
11324 IEM_MC_PREPARE_FPU_USAGE();
11325 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11326 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11327 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11328 } IEM_MC_ELSE() {
11329 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11330 } IEM_MC_ENDIF();
11331 IEM_MC_ADVANCE_RIP_AND_FINISH();
11332
11333 IEM_MC_END();
11334}
11335
11336
11337/** Opcode 0xda !11/0. */
11338FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11339{
11340 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11341 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11342}
11343
11344
11345/** Opcode 0xda !11/1. */
11346FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11347{
11348 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11349 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11350}
11351
11352
11353/** Opcode 0xda !11/2. */
11354FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11355{
11356 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11357
11358 IEM_MC_BEGIN(3, 3, 0, 0);
11359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11360 IEM_MC_LOCAL(uint16_t, u16Fsw);
11361 IEM_MC_LOCAL(int32_t, i32Val2);
11362 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11363 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11364 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11365
11366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11368
11369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11371 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11372
11373 IEM_MC_PREPARE_FPU_USAGE();
11374 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11375 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11376 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11377 } IEM_MC_ELSE() {
11378 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11379 } IEM_MC_ENDIF();
11380 IEM_MC_ADVANCE_RIP_AND_FINISH();
11381
11382 IEM_MC_END();
11383}
11384
11385
11386/** Opcode 0xda !11/3. */
11387FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11388{
11389 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11390
11391 IEM_MC_BEGIN(3, 3, 0, 0);
11392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11393 IEM_MC_LOCAL(uint16_t, u16Fsw);
11394 IEM_MC_LOCAL(int32_t, i32Val2);
11395 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11396 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11397 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11398
11399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11401
11402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11404 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11405
11406 IEM_MC_PREPARE_FPU_USAGE();
11407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11408 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11409 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11410 } IEM_MC_ELSE() {
11411 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11412 } IEM_MC_ENDIF();
11413 IEM_MC_ADVANCE_RIP_AND_FINISH();
11414
11415 IEM_MC_END();
11416}
11417
11418
11419/** Opcode 0xda !11/4. */
11420FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11421{
11422 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11423 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11424}
11425
11426
11427/** Opcode 0xda !11/5. */
11428FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11429{
11430 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11431 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11432}
11433
11434
11435/** Opcode 0xda !11/6. */
11436FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11437{
11438 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11439 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11440}
11441
11442
11443/** Opcode 0xda !11/7. */
11444FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11445{
11446 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11447 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11448}
11449
11450
11451/**
11452 * @opcode 0xda
11453 */
11454FNIEMOP_DEF(iemOp_EscF2)
11455{
11456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11457 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11458 if (IEM_IS_MODRM_REG_MODE(bRm))
11459 {
11460 switch (IEM_GET_MODRM_REG_8(bRm))
11461 {
11462 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11463 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11464 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11465 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11466 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11467 case 5:
11468 if (bRm == 0xe9)
11469 return FNIEMOP_CALL(iemOp_fucompp);
11470 IEMOP_RAISE_INVALID_OPCODE_RET();
11471 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11472 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11474 }
11475 }
11476 else
11477 {
11478 switch (IEM_GET_MODRM_REG_8(bRm))
11479 {
11480 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11481 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11482 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11483 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11484 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11485 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11486 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11487 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11489 }
11490 }
11491}
11492
11493
11494/** Opcode 0xdb !11/0. */
11495FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11496{
11497 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11498
11499 IEM_MC_BEGIN(2, 3, 0, 0);
11500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11501 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11502 IEM_MC_LOCAL(int32_t, i32Val);
11503 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11504 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11505
11506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508
11509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11511 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11512
11513 IEM_MC_PREPARE_FPU_USAGE();
11514 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11515 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11516 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11517 } IEM_MC_ELSE() {
11518 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11519 } IEM_MC_ENDIF();
11520 IEM_MC_ADVANCE_RIP_AND_FINISH();
11521
11522 IEM_MC_END();
11523}
11524
11525
11526/** Opcode 0xdb !11/1. */
11527FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11528{
11529 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11530 IEM_MC_BEGIN(3, 3, 0, 0);
11531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11533
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11537 IEM_MC_PREPARE_FPU_USAGE();
11538
11539 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11540 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11541 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11542
11543 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11544 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11545 IEM_MC_LOCAL(uint16_t, u16Fsw);
11546 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11547 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11548 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11549 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11550 } IEM_MC_ELSE() {
11551 IEM_MC_IF_FCW_IM() {
11552 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11553 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11554 } IEM_MC_ELSE() {
11555 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11556 } IEM_MC_ENDIF();
11557 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11558 } IEM_MC_ENDIF();
11559 IEM_MC_ADVANCE_RIP_AND_FINISH();
11560
11561 IEM_MC_END();
11562}
11563
11564
11565/** Opcode 0xdb !11/2. */
11566FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11567{
11568 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11569 IEM_MC_BEGIN(3, 3, 0, 0);
11570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11572
11573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11576 IEM_MC_PREPARE_FPU_USAGE();
11577
11578 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11579 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11580 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11581
11582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11583 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11584 IEM_MC_LOCAL(uint16_t, u16Fsw);
11585 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11586 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11587 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11588 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11589 } IEM_MC_ELSE() {
11590 IEM_MC_IF_FCW_IM() {
11591 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11592 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11593 } IEM_MC_ELSE() {
11594 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11595 } IEM_MC_ENDIF();
11596 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11597 } IEM_MC_ENDIF();
11598 IEM_MC_ADVANCE_RIP_AND_FINISH();
11599
11600 IEM_MC_END();
11601}
11602
11603
11604/** Opcode 0xdb !11/3. */
11605FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11606{
11607 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11608 IEM_MC_BEGIN(3, 2, 0, 0);
11609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11611
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11614 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11615 IEM_MC_PREPARE_FPU_USAGE();
11616
11617 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11618 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11619 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11620
11621 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11622 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11623 IEM_MC_LOCAL(uint16_t, u16Fsw);
11624 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11625 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11626 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11627 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11628 } IEM_MC_ELSE() {
11629 IEM_MC_IF_FCW_IM() {
11630 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11631 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11632 } IEM_MC_ELSE() {
11633 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11634 } IEM_MC_ENDIF();
11635 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11636 } IEM_MC_ENDIF();
11637 IEM_MC_ADVANCE_RIP_AND_FINISH();
11638
11639 IEM_MC_END();
11640}
11641
11642
11643/** Opcode 0xdb !11/5. */
11644FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11645{
11646 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11647
11648 IEM_MC_BEGIN(2, 3, 0, 0);
11649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11650 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11651 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11652 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11653 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11654
11655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11657
11658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11660 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11661
11662 IEM_MC_PREPARE_FPU_USAGE();
11663 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11664 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11665 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11666 } IEM_MC_ELSE() {
11667 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11668 } IEM_MC_ENDIF();
11669 IEM_MC_ADVANCE_RIP_AND_FINISH();
11670
11671 IEM_MC_END();
11672}
11673
11674
11675/** Opcode 0xdb !11/7. */
11676FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11677{
11678 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11679 IEM_MC_BEGIN(3, 3, 0, 0);
11680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11682
11683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11685 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11686 IEM_MC_PREPARE_FPU_USAGE();
11687
11688 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11689 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11690 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11691
11692 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11694 IEM_MC_LOCAL(uint16_t, u16Fsw);
11695 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11696 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11697 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11698 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11699 } IEM_MC_ELSE() {
11700 IEM_MC_IF_FCW_IM() {
11701 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11702 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11703 } IEM_MC_ELSE() {
11704 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11705 } IEM_MC_ENDIF();
11706 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11707 } IEM_MC_ENDIF();
11708 IEM_MC_ADVANCE_RIP_AND_FINISH();
11709
11710 IEM_MC_END();
11711}
11712
11713
11714/** Opcode 0xdb 11/0. */
11715FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11716{
11717 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11718 IEM_MC_BEGIN(0, 1, 0, 0);
11719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11720 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11721
11722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11724
11725 IEM_MC_PREPARE_FPU_USAGE();
11726 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11727 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11728 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11729 } IEM_MC_ENDIF();
11730 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11731 } IEM_MC_ELSE() {
11732 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11733 } IEM_MC_ENDIF();
11734 IEM_MC_ADVANCE_RIP_AND_FINISH();
11735
11736 IEM_MC_END();
11737}
11738
11739
11740/** Opcode 0xdb 11/1. */
11741FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11742{
11743 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11744 IEM_MC_BEGIN(0, 1, 0, 0);
11745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11746 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11747
11748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11749 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11750
11751 IEM_MC_PREPARE_FPU_USAGE();
11752 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11753 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11754 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11755 } IEM_MC_ENDIF();
11756 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11757 } IEM_MC_ELSE() {
11758 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11759 } IEM_MC_ENDIF();
11760 IEM_MC_ADVANCE_RIP_AND_FINISH();
11761
11762 IEM_MC_END();
11763}
11764
11765
11766/** Opcode 0xdb 11/2. */
11767FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11768{
11769 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11770 IEM_MC_BEGIN(0, 1, 0, 0);
11771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11772 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11773
11774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11776
11777 IEM_MC_PREPARE_FPU_USAGE();
11778 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11779 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11780 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11781 } IEM_MC_ENDIF();
11782 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11783 } IEM_MC_ELSE() {
11784 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11785 } IEM_MC_ENDIF();
11786 IEM_MC_ADVANCE_RIP_AND_FINISH();
11787
11788 IEM_MC_END();
11789}
11790
11791
11792/** Opcode 0xdb 11/3. */
11793FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11794{
11795 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11796 IEM_MC_BEGIN(0, 1, 0, 0);
11797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11798 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11799
11800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11802
11803 IEM_MC_PREPARE_FPU_USAGE();
11804 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11805 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11806 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11807 } IEM_MC_ENDIF();
11808 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11809 } IEM_MC_ELSE() {
11810 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11811 } IEM_MC_ENDIF();
11812 IEM_MC_ADVANCE_RIP_AND_FINISH();
11813
11814 IEM_MC_END();
11815}
11816
11817
11818/** Opcode 0xdb 0xe0. */
11819FNIEMOP_DEF(iemOp_fneni)
11820{
11821 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11822 IEM_MC_BEGIN(0, 0, 0, 0);
11823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11825 IEM_MC_ADVANCE_RIP_AND_FINISH();
11826 IEM_MC_END();
11827}
11828
11829
11830/** Opcode 0xdb 0xe1. */
11831FNIEMOP_DEF(iemOp_fndisi)
11832{
11833 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11834 IEM_MC_BEGIN(0, 0, 0, 0);
11835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11837 IEM_MC_ADVANCE_RIP_AND_FINISH();
11838 IEM_MC_END();
11839}
11840
11841
11842/** Opcode 0xdb 0xe2. */
11843FNIEMOP_DEF(iemOp_fnclex)
11844{
11845 IEMOP_MNEMONIC(fnclex, "fnclex");
11846 IEM_MC_BEGIN(0, 0, 0, 0);
11847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11849 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11850 IEM_MC_CLEAR_FSW_EX();
11851 IEM_MC_ADVANCE_RIP_AND_FINISH();
11852 IEM_MC_END();
11853}
11854
11855
11856/** Opcode 0xdb 0xe3. */
11857FNIEMOP_DEF(iemOp_fninit)
11858{
11859 IEMOP_MNEMONIC(fninit, "fninit");
11860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11861 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11862 iemCImpl_finit, false /*fCheckXcpts*/);
11863}
11864
11865
11866/** Opcode 0xdb 0xe4. */
11867FNIEMOP_DEF(iemOp_fnsetpm)
11868{
11869 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11870 IEM_MC_BEGIN(0, 0, 0, 0);
11871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11872 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11873 IEM_MC_ADVANCE_RIP_AND_FINISH();
11874 IEM_MC_END();
11875}
11876
11877
11878/** Opcode 0xdb 0xe5. */
11879FNIEMOP_DEF(iemOp_frstpm)
11880{
11881 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11882#if 0 /* #UDs on newer CPUs */
11883 IEM_MC_BEGIN(0, 0, 0, 0);
11884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11886 IEM_MC_ADVANCE_RIP_AND_FINISH();
11887 IEM_MC_END();
11888 return VINF_SUCCESS;
11889#else
11890 IEMOP_RAISE_INVALID_OPCODE_RET();
11891#endif
11892}
11893
11894
11895/** Opcode 0xdb 11/5. */
11896FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11897{
11898 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11899 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11900 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11901 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11902}
11903
11904
11905/** Opcode 0xdb 11/6. */
11906FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11907{
11908 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11909 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11910 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11911 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11912}
11913
11914
11915/**
11916 * @opcode 0xdb
11917 */
11918FNIEMOP_DEF(iemOp_EscF3)
11919{
11920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11921 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11922 if (IEM_IS_MODRM_REG_MODE(bRm))
11923 {
11924 switch (IEM_GET_MODRM_REG_8(bRm))
11925 {
11926 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11927 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11928 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11929 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11930 case 4:
11931 switch (bRm)
11932 {
11933 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11934 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11935 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11936 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11937 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11938 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11939 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11940 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11942 }
11943 break;
11944 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11945 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11946 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11948 }
11949 }
11950 else
11951 {
11952 switch (IEM_GET_MODRM_REG_8(bRm))
11953 {
11954 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11955 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11956 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11957 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11958 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11959 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11960 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11961 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11963 }
11964 }
11965}
11966
11967
11968/**
11969 * Common worker for FPU instructions working on STn and ST0, and storing the
11970 * result in STn unless IE, DE or ZE was raised.
11971 *
11972 * @param bRm Mod R/M byte.
11973 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11974 */
11975FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11976{
11977 IEM_MC_BEGIN(3, 1, 0, 0);
11978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11979 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11980 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11981 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11983
11984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11986
11987 IEM_MC_PREPARE_FPU_USAGE();
11988 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11989 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11990 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11991 } IEM_MC_ELSE() {
11992 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11993 } IEM_MC_ENDIF();
11994 IEM_MC_ADVANCE_RIP_AND_FINISH();
11995
11996 IEM_MC_END();
11997}
11998
11999
12000/** Opcode 0xdc 11/0. */
12001FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12002{
12003 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12004 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12005}
12006
12007
12008/** Opcode 0xdc 11/1. */
12009FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12010{
12011 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12012 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12013}
12014
12015
12016/** Opcode 0xdc 11/4. */
12017FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12018{
12019 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12020 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12021}
12022
12023
12024/** Opcode 0xdc 11/5. */
12025FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12026{
12027 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12028 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12029}
12030
12031
12032/** Opcode 0xdc 11/6. */
12033FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12034{
12035 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12036 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12037}
12038
12039
12040/** Opcode 0xdc 11/7. */
12041FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12042{
12043 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12044 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12045}
12046
12047
12048/**
12049 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12050 * memory operand, and storing the result in ST0.
12051 *
12052 * @param bRm Mod R/M byte.
12053 * @param pfnImpl Pointer to the instruction implementation (assembly).
12054 */
12055FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12056{
12057 IEM_MC_BEGIN(3, 3, 0, 0);
12058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12059 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12060 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12061 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12062 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12063 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12064
12065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12069
12070 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12071 IEM_MC_PREPARE_FPU_USAGE();
12072 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12073 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12074 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12075 } IEM_MC_ELSE() {
12076 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12077 } IEM_MC_ENDIF();
12078 IEM_MC_ADVANCE_RIP_AND_FINISH();
12079
12080 IEM_MC_END();
12081}
12082
12083
12084/** Opcode 0xdc !11/0. */
12085FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12086{
12087 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12088 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12089}
12090
12091
12092/** Opcode 0xdc !11/1. */
12093FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12094{
12095 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12096 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12097}
12098
12099
12100/** Opcode 0xdc !11/2. */
12101FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12102{
12103 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12104
12105 IEM_MC_BEGIN(3, 3, 0, 0);
12106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12107 IEM_MC_LOCAL(uint16_t, u16Fsw);
12108 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12109 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12110 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12111 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12112
12113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12115
12116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12117 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12118 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12119
12120 IEM_MC_PREPARE_FPU_USAGE();
12121 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12122 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12123 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12124 } IEM_MC_ELSE() {
12125 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12126 } IEM_MC_ENDIF();
12127 IEM_MC_ADVANCE_RIP_AND_FINISH();
12128
12129 IEM_MC_END();
12130}
12131
12132
12133/** Opcode 0xdc !11/3. */
12134FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12135{
12136 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12137
12138 IEM_MC_BEGIN(3, 3, 0, 0);
12139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12140 IEM_MC_LOCAL(uint16_t, u16Fsw);
12141 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12142 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12143 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12144 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12145
12146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12148
12149 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12150 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12151 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12152
12153 IEM_MC_PREPARE_FPU_USAGE();
12154 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12155 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12156 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12157 } IEM_MC_ELSE() {
12158 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12159 } IEM_MC_ENDIF();
12160 IEM_MC_ADVANCE_RIP_AND_FINISH();
12161
12162 IEM_MC_END();
12163}
12164
12165
12166/** Opcode 0xdc !11/4. */
12167FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12168{
12169 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12170 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12171}
12172
12173
12174/** Opcode 0xdc !11/5. */
12175FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12176{
12177 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12178 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12179}
12180
12181
12182/** Opcode 0xdc !11/6. */
12183FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12184{
12185 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12186 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12187}
12188
12189
12190/** Opcode 0xdc !11/7. */
12191FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12192{
12193 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12194 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12195}
12196
12197
12198/**
12199 * @opcode 0xdc
12200 */
12201FNIEMOP_DEF(iemOp_EscF4)
12202{
12203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12204 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12205 if (IEM_IS_MODRM_REG_MODE(bRm))
12206 {
12207 switch (IEM_GET_MODRM_REG_8(bRm))
12208 {
12209 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12210 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12211 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12212 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12213 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12214 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12215 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12216 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12218 }
12219 }
12220 else
12221 {
12222 switch (IEM_GET_MODRM_REG_8(bRm))
12223 {
12224 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12225 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12226 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12227 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12228 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12229 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12230 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12231 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12233 }
12234 }
12235}
12236
12237
12238/** Opcode 0xdd !11/0.
12239 * @sa iemOp_fld_m32r */
12240FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12241{
12242 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12243
12244 IEM_MC_BEGIN(2, 3, 0, 0);
12245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12246 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12247 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12248 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12249 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12250
12251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12255
12256 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12257 IEM_MC_PREPARE_FPU_USAGE();
12258 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12259 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12260 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12261 } IEM_MC_ELSE() {
12262 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12263 } IEM_MC_ENDIF();
12264 IEM_MC_ADVANCE_RIP_AND_FINISH();
12265
12266 IEM_MC_END();
12267}
12268
12269
12270/** Opcode 0xdd !11/0. */
12271FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12272{
12273 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12274 IEM_MC_BEGIN(3, 3, 0, 0);
12275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12277
12278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12281 IEM_MC_PREPARE_FPU_USAGE();
12282
12283 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12284 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12285 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12286
12287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12288 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12289 IEM_MC_LOCAL(uint16_t, u16Fsw);
12290 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12291 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12292 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12293 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12294 } IEM_MC_ELSE() {
12295 IEM_MC_IF_FCW_IM() {
12296 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12297 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12298 } IEM_MC_ELSE() {
12299 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12300 } IEM_MC_ENDIF();
12301 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12302 } IEM_MC_ENDIF();
12303 IEM_MC_ADVANCE_RIP_AND_FINISH();
12304
12305 IEM_MC_END();
12306}
12307
12308
12309/** Opcode 0xdd !11/0. */
12310FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12311{
12312 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12313 IEM_MC_BEGIN(3, 3, 0, 0);
12314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12316
12317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12320 IEM_MC_PREPARE_FPU_USAGE();
12321
12322 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12323 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12324 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12325
12326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12327 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12328 IEM_MC_LOCAL(uint16_t, u16Fsw);
12329 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12330 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12331 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12332 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12333 } IEM_MC_ELSE() {
12334 IEM_MC_IF_FCW_IM() {
12335 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12336 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12337 } IEM_MC_ELSE() {
12338 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12339 } IEM_MC_ENDIF();
12340 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12341 } IEM_MC_ENDIF();
12342 IEM_MC_ADVANCE_RIP_AND_FINISH();
12343
12344 IEM_MC_END();
12345}
12346
12347
12348
12349
12350/** Opcode 0xdd !11/0. */
12351FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12352{
12353 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12354 IEM_MC_BEGIN(3, 3, 0, 0);
12355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12357
12358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12359 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12360 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12361 IEM_MC_PREPARE_FPU_USAGE();
12362
12363 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12364 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12365 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12366
12367 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12368 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12369 IEM_MC_LOCAL(uint16_t, u16Fsw);
12370 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12371 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12372 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12373 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12374 } IEM_MC_ELSE() {
12375 IEM_MC_IF_FCW_IM() {
12376 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12377 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12378 } IEM_MC_ELSE() {
12379 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12380 } IEM_MC_ENDIF();
12381 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12382 } IEM_MC_ENDIF();
12383 IEM_MC_ADVANCE_RIP_AND_FINISH();
12384
12385 IEM_MC_END();
12386}
12387
12388
12389/** Opcode 0xdd !11/0. */
12390FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12391{
12392 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12393 IEM_MC_BEGIN(3, 0, 0, 0);
12394 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12396
12397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12400
12401 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12402 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12403 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12404 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12405 IEM_MC_END();
12406}
12407
12408
12409/** Opcode 0xdd !11/0. */
12410FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12411{
12412 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12413 IEM_MC_BEGIN(3, 0, 0, 0);
12414 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12416
12417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12419 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12420
12421 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12422 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12423 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12424 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12425 IEM_MC_END();
12426}
12427
12428/** Opcode 0xdd !11/0. */
12429FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12430{
12431 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12432
12433 IEM_MC_BEGIN(0, 2, 0, 0);
12434 IEM_MC_LOCAL(uint16_t, u16Tmp);
12435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12436
12437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12440
12441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12442 IEM_MC_FETCH_FSW(u16Tmp);
12443 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12444 IEM_MC_ADVANCE_RIP_AND_FINISH();
12445
12446/** @todo Debug / drop a hint to the verifier that things may differ
12447 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12448 * NT4SP1. (X86_FSW_PE) */
12449 IEM_MC_END();
12450}
12451
12452
12453/** Opcode 0xdd 11/0. */
12454FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12455{
12456 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12457 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12458 unmodified. */
12459 IEM_MC_BEGIN(0, 0, 0, 0);
12460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12461
12462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12463 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12464
12465 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12466 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12467 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12468
12469 IEM_MC_ADVANCE_RIP_AND_FINISH();
12470 IEM_MC_END();
12471}
12472
12473
12474/** Opcode 0xdd 11/1. */
12475FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12476{
12477 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12478 IEM_MC_BEGIN(0, 2, 0, 0);
12479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12480 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12481 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12484
12485 IEM_MC_PREPARE_FPU_USAGE();
12486 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12487 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12488 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12489 } IEM_MC_ELSE() {
12490 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12491 } IEM_MC_ENDIF();
12492
12493 IEM_MC_ADVANCE_RIP_AND_FINISH();
12494 IEM_MC_END();
12495}
12496
12497
12498/** Opcode 0xdd 11/3. */
12499FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12500{
12501 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12502 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12503}
12504
12505
12506/** Opcode 0xdd 11/4. */
12507FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12508{
12509 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12510 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12511}
12512
12513
12514/**
12515 * @opcode 0xdd
12516 */
12517FNIEMOP_DEF(iemOp_EscF5)
12518{
12519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12520 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12521 if (IEM_IS_MODRM_REG_MODE(bRm))
12522 {
12523 switch (IEM_GET_MODRM_REG_8(bRm))
12524 {
12525 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12526 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12527 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12528 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12529 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12530 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12531 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12532 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12534 }
12535 }
12536 else
12537 {
12538 switch (IEM_GET_MODRM_REG_8(bRm))
12539 {
12540 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12541 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12542 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12543 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12544 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12545 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12546 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12547 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12549 }
12550 }
12551}
12552
12553
12554/** Opcode 0xde 11/0. */
12555FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12556{
12557 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12558 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12559}
12560
12561
12562/** Opcode 0xde 11/0. */
12563FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12564{
12565 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12566 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12567}
12568
12569
12570/** Opcode 0xde 0xd9. */
12571FNIEMOP_DEF(iemOp_fcompp)
12572{
12573 IEMOP_MNEMONIC(fcompp, "fcompp");
12574 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12575}
12576
12577
12578/** Opcode 0xde 11/4. */
12579FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12580{
12581 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12582 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12583}
12584
12585
12586/** Opcode 0xde 11/5. */
12587FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12588{
12589 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12590 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12591}
12592
12593
12594/** Opcode 0xde 11/6. */
12595FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12596{
12597 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12598 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12599}
12600
12601
12602/** Opcode 0xde 11/7. */
12603FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12604{
12605 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12606 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12607}
12608
12609
12610/**
12611 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12612 * the result in ST0.
12613 *
12614 * @param bRm Mod R/M byte.
12615 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12616 */
12617FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12618{
12619 IEM_MC_BEGIN(3, 3, 0, 0);
12620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12621 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12622 IEM_MC_LOCAL(int16_t, i16Val2);
12623 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12624 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12625 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12626
12627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12629
12630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12631 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12632 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12633
12634 IEM_MC_PREPARE_FPU_USAGE();
12635 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12636 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12637 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12638 } IEM_MC_ELSE() {
12639 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12640 } IEM_MC_ENDIF();
12641 IEM_MC_ADVANCE_RIP_AND_FINISH();
12642
12643 IEM_MC_END();
12644}
12645
12646
12647/** Opcode 0xde !11/0. */
12648FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12649{
12650 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12651 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12652}
12653
12654
12655/** Opcode 0xde !11/1. */
12656FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12657{
12658 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12659 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12660}
12661
12662
12663/** Opcode 0xde !11/2. */
12664FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12665{
12666 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12667
12668 IEM_MC_BEGIN(3, 3, 0, 0);
12669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12670 IEM_MC_LOCAL(uint16_t, u16Fsw);
12671 IEM_MC_LOCAL(int16_t, i16Val2);
12672 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12674 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12675
12676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12678
12679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12681 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12682
12683 IEM_MC_PREPARE_FPU_USAGE();
12684 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12685 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12686 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12687 } IEM_MC_ELSE() {
12688 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12689 } IEM_MC_ENDIF();
12690 IEM_MC_ADVANCE_RIP_AND_FINISH();
12691
12692 IEM_MC_END();
12693}
12694
12695
12696/** Opcode 0xde !11/3. */
12697FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12698{
12699 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12700
12701 IEM_MC_BEGIN(3, 3, 0, 0);
12702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12703 IEM_MC_LOCAL(uint16_t, u16Fsw);
12704 IEM_MC_LOCAL(int16_t, i16Val2);
12705 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12706 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12707 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12708
12709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12711
12712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12714 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12715
12716 IEM_MC_PREPARE_FPU_USAGE();
12717 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12718 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12719 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12720 } IEM_MC_ELSE() {
12721 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12722 } IEM_MC_ENDIF();
12723 IEM_MC_ADVANCE_RIP_AND_FINISH();
12724
12725 IEM_MC_END();
12726}
12727
12728
12729/** Opcode 0xde !11/4. */
12730FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12731{
12732 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12733 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12734}
12735
12736
12737/** Opcode 0xde !11/5. */
12738FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12739{
12740 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12741 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12742}
12743
12744
12745/** Opcode 0xde !11/6. */
12746FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12747{
12748 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12749 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12750}
12751
12752
12753/** Opcode 0xde !11/7. */
12754FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12755{
12756 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12757 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12758}
12759
12760
12761/**
12762 * @opcode 0xde
12763 */
12764FNIEMOP_DEF(iemOp_EscF6)
12765{
12766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12767 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12768 if (IEM_IS_MODRM_REG_MODE(bRm))
12769 {
12770 switch (IEM_GET_MODRM_REG_8(bRm))
12771 {
12772 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12773 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12774 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12775 case 3: if (bRm == 0xd9)
12776 return FNIEMOP_CALL(iemOp_fcompp);
12777 IEMOP_RAISE_INVALID_OPCODE_RET();
12778 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12779 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12780 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12781 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12783 }
12784 }
12785 else
12786 {
12787 switch (IEM_GET_MODRM_REG_8(bRm))
12788 {
12789 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12790 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12791 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12792 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12793 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12794 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12795 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12796 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12798 }
12799 }
12800}
12801
12802
12803/** Opcode 0xdf 11/0.
12804 * Undocument instruction, assumed to work like ffree + fincstp. */
12805FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12806{
12807 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12808 IEM_MC_BEGIN(0, 0, 0, 0);
12809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12810
12811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12813
12814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12815 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12816 IEM_MC_FPU_STACK_INC_TOP();
12817 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12818
12819 IEM_MC_ADVANCE_RIP_AND_FINISH();
12820 IEM_MC_END();
12821}
12822
12823
12824/** Opcode 0xdf 0xe0. */
12825FNIEMOP_DEF(iemOp_fnstsw_ax)
12826{
12827 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12828 IEM_MC_BEGIN(0, 1, 0, 0);
12829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12830 IEM_MC_LOCAL(uint16_t, u16Tmp);
12831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12832 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12833 IEM_MC_FETCH_FSW(u16Tmp);
12834 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12835 IEM_MC_ADVANCE_RIP_AND_FINISH();
12836 IEM_MC_END();
12837}
12838
12839
12840/** Opcode 0xdf 11/5. */
12841FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12842{
12843 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12844 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12845 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12846 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12847}
12848
12849
12850/** Opcode 0xdf 11/6. */
12851FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12852{
12853 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12854 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12855 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12856 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12857}
12858
12859
12860/** Opcode 0xdf !11/0. */
12861FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12862{
12863 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12864
12865 IEM_MC_BEGIN(2, 3, 0, 0);
12866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12867 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12868 IEM_MC_LOCAL(int16_t, i16Val);
12869 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12870 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12871
12872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12874
12875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12877 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12878
12879 IEM_MC_PREPARE_FPU_USAGE();
12880 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12881 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12882 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12883 } IEM_MC_ELSE() {
12884 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12885 } IEM_MC_ENDIF();
12886 IEM_MC_ADVANCE_RIP_AND_FINISH();
12887
12888 IEM_MC_END();
12889}
12890
12891
12892/** Opcode 0xdf !11/1. */
12893FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12894{
12895 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12896 IEM_MC_BEGIN(3, 3, 0, 0);
12897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12899
12900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12903 IEM_MC_PREPARE_FPU_USAGE();
12904
12905 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12906 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12907 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12908
12909 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12910 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12911 IEM_MC_LOCAL(uint16_t, u16Fsw);
12912 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12913 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12914 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12915 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12916 } IEM_MC_ELSE() {
12917 IEM_MC_IF_FCW_IM() {
12918 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12919 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12920 } IEM_MC_ELSE() {
12921 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12922 } IEM_MC_ENDIF();
12923 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12924 } IEM_MC_ENDIF();
12925 IEM_MC_ADVANCE_RIP_AND_FINISH();
12926
12927 IEM_MC_END();
12928}
12929
12930
12931/** Opcode 0xdf !11/2. */
12932FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12933{
12934 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12935 IEM_MC_BEGIN(3, 3, 0, 0);
12936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12938
12939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12942 IEM_MC_PREPARE_FPU_USAGE();
12943
12944 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12945 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12946 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12947
12948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12950 IEM_MC_LOCAL(uint16_t, u16Fsw);
12951 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12952 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12953 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12954 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12955 } IEM_MC_ELSE() {
12956 IEM_MC_IF_FCW_IM() {
12957 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12958 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12959 } IEM_MC_ELSE() {
12960 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12961 } IEM_MC_ENDIF();
12962 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12963 } IEM_MC_ENDIF();
12964 IEM_MC_ADVANCE_RIP_AND_FINISH();
12965
12966 IEM_MC_END();
12967}
12968
12969
12970/** Opcode 0xdf !11/3. */
12971FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12972{
12973 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12974 IEM_MC_BEGIN(3, 3, 0, 0);
12975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12977
12978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12981 IEM_MC_PREPARE_FPU_USAGE();
12982
12983 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12984 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12985 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12986
12987 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12989 IEM_MC_LOCAL(uint16_t, u16Fsw);
12990 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12991 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12992 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12993 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12994 } IEM_MC_ELSE() {
12995 IEM_MC_IF_FCW_IM() {
12996 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12997 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12998 } IEM_MC_ELSE() {
12999 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13000 } IEM_MC_ENDIF();
13001 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13002 } IEM_MC_ENDIF();
13003 IEM_MC_ADVANCE_RIP_AND_FINISH();
13004
13005 IEM_MC_END();
13006}
13007
13008
13009/** Opcode 0xdf !11/4. */
13010FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13011{
13012 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13013
13014 IEM_MC_BEGIN(2, 3, 0, 0);
13015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13016 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13017 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13018 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13019 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13020
13021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13023
13024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13026 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13027
13028 IEM_MC_PREPARE_FPU_USAGE();
13029 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13030 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13031 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13032 } IEM_MC_ELSE() {
13033 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13034 } IEM_MC_ENDIF();
13035 IEM_MC_ADVANCE_RIP_AND_FINISH();
13036
13037 IEM_MC_END();
13038}
13039
13040
13041/** Opcode 0xdf !11/5. */
13042FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13043{
13044 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13045
13046 IEM_MC_BEGIN(2, 3, 0, 0);
13047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13048 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13049 IEM_MC_LOCAL(int64_t, i64Val);
13050 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13051 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13052
13053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13055
13056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13058 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13059
13060 IEM_MC_PREPARE_FPU_USAGE();
13061 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13062 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13063 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13064 } IEM_MC_ELSE() {
13065 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13066 } IEM_MC_ENDIF();
13067 IEM_MC_ADVANCE_RIP_AND_FINISH();
13068
13069 IEM_MC_END();
13070}
13071
13072
13073/** Opcode 0xdf !11/6. */
13074FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13075{
13076 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13077 IEM_MC_BEGIN(3, 3, 0, 0);
13078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13080
13081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13084 IEM_MC_PREPARE_FPU_USAGE();
13085
13086 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13087 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13088 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13089
13090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13091 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13092 IEM_MC_LOCAL(uint16_t, u16Fsw);
13093 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13094 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13095 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13096 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13097 } IEM_MC_ELSE() {
13098 IEM_MC_IF_FCW_IM() {
13099 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13100 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13101 } IEM_MC_ELSE() {
13102 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13103 } IEM_MC_ENDIF();
13104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13105 } IEM_MC_ENDIF();
13106 IEM_MC_ADVANCE_RIP_AND_FINISH();
13107
13108 IEM_MC_END();
13109}
13110
13111
13112/** Opcode 0xdf !11/7. */
13113FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13114{
13115 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13116 IEM_MC_BEGIN(3, 3, 0, 0);
13117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13119
13120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13123 IEM_MC_PREPARE_FPU_USAGE();
13124
13125 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13126 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13127 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13128
13129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13131 IEM_MC_LOCAL(uint16_t, u16Fsw);
13132 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13133 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13134 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13135 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13136 } IEM_MC_ELSE() {
13137 IEM_MC_IF_FCW_IM() {
13138 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13139 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13140 } IEM_MC_ELSE() {
13141 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13142 } IEM_MC_ENDIF();
13143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13144 } IEM_MC_ENDIF();
13145 IEM_MC_ADVANCE_RIP_AND_FINISH();
13146
13147 IEM_MC_END();
13148}
13149
13150
13151/**
13152 * @opcode 0xdf
13153 */
13154FNIEMOP_DEF(iemOp_EscF7)
13155{
13156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13157 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13158 if (IEM_IS_MODRM_REG_MODE(bRm))
13159 {
13160 switch (IEM_GET_MODRM_REG_8(bRm))
13161 {
13162 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13163 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13164 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13165 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13166 case 4: if (bRm == 0xe0)
13167 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13168 IEMOP_RAISE_INVALID_OPCODE_RET();
13169 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13170 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13171 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13173 }
13174 }
13175 else
13176 {
13177 switch (IEM_GET_MODRM_REG_8(bRm))
13178 {
13179 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13180 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13181 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13182 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13183 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13184 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13185 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13186 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13188 }
13189 }
13190}
13191
13192
13193/**
13194 * @opcode 0xe0
13195 * @opfltest zf
13196 */
13197FNIEMOP_DEF(iemOp_loopne_Jb)
13198{
13199 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13200 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13201 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13202
13203 switch (pVCpu->iem.s.enmEffAddrMode)
13204 {
13205 case IEMMODE_16BIT:
13206 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13209 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13210 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13211 } IEM_MC_ELSE() {
13212 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13213 IEM_MC_ADVANCE_RIP_AND_FINISH();
13214 } IEM_MC_ENDIF();
13215 IEM_MC_END();
13216 break;
13217
13218 case IEMMODE_32BIT:
13219 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13221 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13222 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13223 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13224 } IEM_MC_ELSE() {
13225 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13226 IEM_MC_ADVANCE_RIP_AND_FINISH();
13227 } IEM_MC_ENDIF();
13228 IEM_MC_END();
13229 break;
13230
13231 case IEMMODE_64BIT:
13232 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13234 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13235 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13236 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13237 } IEM_MC_ELSE() {
13238 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13239 IEM_MC_ADVANCE_RIP_AND_FINISH();
13240 } IEM_MC_ENDIF();
13241 IEM_MC_END();
13242 break;
13243
13244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13245 }
13246}
13247
13248
13249/**
13250 * @opcode 0xe1
13251 * @opfltest zf
13252 */
13253FNIEMOP_DEF(iemOp_loope_Jb)
13254{
13255 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13256 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13258
13259 switch (pVCpu->iem.s.enmEffAddrMode)
13260 {
13261 case IEMMODE_16BIT:
13262 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13264 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13265 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13266 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13267 } IEM_MC_ELSE() {
13268 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13269 IEM_MC_ADVANCE_RIP_AND_FINISH();
13270 } IEM_MC_ENDIF();
13271 IEM_MC_END();
13272 break;
13273
13274 case IEMMODE_32BIT:
13275 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13277 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13278 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13279 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13280 } IEM_MC_ELSE() {
13281 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13282 IEM_MC_ADVANCE_RIP_AND_FINISH();
13283 } IEM_MC_ENDIF();
13284 IEM_MC_END();
13285 break;
13286
13287 case IEMMODE_64BIT:
13288 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13290 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13291 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13292 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13293 } IEM_MC_ELSE() {
13294 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13295 IEM_MC_ADVANCE_RIP_AND_FINISH();
13296 } IEM_MC_ENDIF();
13297 IEM_MC_END();
13298 break;
13299
13300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13301 }
13302}
13303
13304
13305/**
13306 * @opcode 0xe2
13307 */
13308FNIEMOP_DEF(iemOp_loop_Jb)
13309{
13310 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13311 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13312 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13313
13314 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13315 * using the 32-bit operand size override. How can that be restarted? See
13316 * weird pseudo code in intel manual. */
13317
13318 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13319 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13320 * the loop causes guest crashes, but when logging it's nice to skip a few million
13321 * lines of useless output. */
13322#if defined(LOG_ENABLED)
13323 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13324 switch (pVCpu->iem.s.enmEffAddrMode)
13325 {
13326 case IEMMODE_16BIT:
13327 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13329 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13330 IEM_MC_ADVANCE_RIP_AND_FINISH();
13331 IEM_MC_END();
13332 break;
13333
13334 case IEMMODE_32BIT:
13335 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13337 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13338 IEM_MC_ADVANCE_RIP_AND_FINISH();
13339 IEM_MC_END();
13340 break;
13341
13342 case IEMMODE_64BIT:
13343 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13345 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13346 IEM_MC_ADVANCE_RIP_AND_FINISH();
13347 IEM_MC_END();
13348 break;
13349
13350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13351 }
13352#endif
13353
13354 switch (pVCpu->iem.s.enmEffAddrMode)
13355 {
13356 case IEMMODE_16BIT:
13357 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13359 IEM_MC_IF_CX_IS_NOT_ONE() {
13360 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13361 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13362 } IEM_MC_ELSE() {
13363 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13364 IEM_MC_ADVANCE_RIP_AND_FINISH();
13365 } IEM_MC_ENDIF();
13366 IEM_MC_END();
13367 break;
13368
13369 case IEMMODE_32BIT:
13370 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13372 IEM_MC_IF_ECX_IS_NOT_ONE() {
13373 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13374 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13375 } IEM_MC_ELSE() {
13376 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13377 IEM_MC_ADVANCE_RIP_AND_FINISH();
13378 } IEM_MC_ENDIF();
13379 IEM_MC_END();
13380 break;
13381
13382 case IEMMODE_64BIT:
13383 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13385 IEM_MC_IF_RCX_IS_NOT_ONE() {
13386 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13387 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13388 } IEM_MC_ELSE() {
13389 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13390 IEM_MC_ADVANCE_RIP_AND_FINISH();
13391 } IEM_MC_ENDIF();
13392 IEM_MC_END();
13393 break;
13394
13395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13396 }
13397}
13398
13399
13400/**
13401 * @opcode 0xe3
13402 */
13403FNIEMOP_DEF(iemOp_jecxz_Jb)
13404{
13405 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13406 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13408
13409 switch (pVCpu->iem.s.enmEffAddrMode)
13410 {
13411 case IEMMODE_16BIT:
13412 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13414 IEM_MC_IF_CX_IS_NZ() {
13415 IEM_MC_ADVANCE_RIP_AND_FINISH();
13416 } IEM_MC_ELSE() {
13417 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13418 } IEM_MC_ENDIF();
13419 IEM_MC_END();
13420 break;
13421
13422 case IEMMODE_32BIT:
13423 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13425 IEM_MC_IF_ECX_IS_NZ() {
13426 IEM_MC_ADVANCE_RIP_AND_FINISH();
13427 } IEM_MC_ELSE() {
13428 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13429 } IEM_MC_ENDIF();
13430 IEM_MC_END();
13431 break;
13432
13433 case IEMMODE_64BIT:
13434 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13436 IEM_MC_IF_RCX_IS_NZ() {
13437 IEM_MC_ADVANCE_RIP_AND_FINISH();
13438 } IEM_MC_ELSE() {
13439 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13440 } IEM_MC_ENDIF();
13441 IEM_MC_END();
13442 break;
13443
13444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13445 }
13446}
13447
13448
13449/**
13450 * @opcode 0xe4
13451 * @opfltest iopl
13452 */
13453FNIEMOP_DEF(iemOp_in_AL_Ib)
13454{
13455 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13456 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13458 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13459 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13460}
13461
13462
13463/**
13464 * @opcode 0xe5
13465 * @opfltest iopl
13466 */
13467FNIEMOP_DEF(iemOp_in_eAX_Ib)
13468{
13469 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13470 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13472 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13473 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13474 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13475}
13476
13477
13478/**
13479 * @opcode 0xe6
13480 * @opfltest iopl
13481 */
13482FNIEMOP_DEF(iemOp_out_Ib_AL)
13483{
13484 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13485 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13487 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13488 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13489}
13490
13491
13492/**
13493 * @opcode 0xe7
13494 * @opfltest iopl
13495 */
13496FNIEMOP_DEF(iemOp_out_Ib_eAX)
13497{
13498 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13499 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13501 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13502 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13503 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13504}
13505
13506
13507/**
13508 * @opcode 0xe8
13509 */
13510FNIEMOP_DEF(iemOp_call_Jv)
13511{
13512 IEMOP_MNEMONIC(call_Jv, "call Jv");
13513 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13514 switch (pVCpu->iem.s.enmEffOpSize)
13515 {
13516 case IEMMODE_16BIT:
13517 {
13518 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13519 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13520 iemCImpl_call_rel_16, (int16_t)u16Imm);
13521 }
13522
13523 case IEMMODE_32BIT:
13524 {
13525 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13526 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13527 iemCImpl_call_rel_32, (int32_t)u32Imm);
13528 }
13529
13530 case IEMMODE_64BIT:
13531 {
13532 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13533 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13534 iemCImpl_call_rel_64, u64Imm);
13535 }
13536
13537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13538 }
13539}
13540
13541
13542/**
13543 * @opcode 0xe9
13544 */
13545FNIEMOP_DEF(iemOp_jmp_Jv)
13546{
13547 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13548 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13549 switch (pVCpu->iem.s.enmEffOpSize)
13550 {
13551 case IEMMODE_16BIT:
13552 IEM_MC_BEGIN(0, 0, 0, 0);
13553 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13555 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13556 IEM_MC_END();
13557 break;
13558
13559 case IEMMODE_64BIT:
13560 case IEMMODE_32BIT:
13561 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13562 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13564 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13565 IEM_MC_END();
13566 break;
13567
13568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13569 }
13570}
13571
13572
13573/**
13574 * @opcode 0xea
13575 */
13576FNIEMOP_DEF(iemOp_jmp_Ap)
13577{
13578 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13579 IEMOP_HLP_NO_64BIT();
13580
13581 /* Decode the far pointer address and pass it on to the far call C implementation. */
13582 uint32_t off32Seg;
13583 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13584 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13585 else
13586 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13587 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13589 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13590 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13591 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13592 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13593}
13594
13595
13596/**
13597 * @opcode 0xeb
13598 */
13599FNIEMOP_DEF(iemOp_jmp_Jb)
13600{
13601 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13602 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13603 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13604
13605 IEM_MC_BEGIN(0, 0, 0, 0);
13606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13607 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13608 IEM_MC_END();
13609}
13610
13611
13612/**
13613 * @opcode 0xec
13614 * @opfltest iopl
13615 */
13616FNIEMOP_DEF(iemOp_in_AL_DX)
13617{
13618 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13620 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13622 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13623}
13624
13625
13626/**
13627 * @opcode 0xed
13628 * @opfltest iopl
13629 */
13630FNIEMOP_DEF(iemOp_in_eAX_DX)
13631{
13632 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13634 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13635 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13636 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13637 pVCpu->iem.s.enmEffAddrMode);
13638}
13639
13640
13641/**
13642 * @opcode 0xee
13643 * @opfltest iopl
13644 */
13645FNIEMOP_DEF(iemOp_out_DX_AL)
13646{
13647 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13649 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13650 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13651}
13652
13653
13654/**
13655 * @opcode 0xef
13656 * @opfltest iopl
13657 */
13658FNIEMOP_DEF(iemOp_out_DX_eAX)
13659{
13660 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13662 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13663 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13664 pVCpu->iem.s.enmEffAddrMode);
13665}
13666
13667
13668/**
13669 * @opcode 0xf0
13670 */
13671FNIEMOP_DEF(iemOp_lock)
13672{
13673 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13674 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13675
13676 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13677 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13678}
13679
13680
13681/**
13682 * @opcode 0xf1
13683 */
13684FNIEMOP_DEF(iemOp_int1)
13685{
13686 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13687 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13688 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13689 * LOADALL memo. Needs some testing. */
13690 IEMOP_HLP_MIN_386();
13691 /** @todo testcase! */
13692 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13693 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13694 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13695}
13696
13697
13698/**
13699 * @opcode 0xf2
13700 */
13701FNIEMOP_DEF(iemOp_repne)
13702{
13703 /* This overrides any previous REPE prefix. */
13704 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13705 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13706 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13707
13708 /* For the 4 entry opcode tables, REPNZ overrides any previous
13709 REPZ and operand size prefixes. */
13710 pVCpu->iem.s.idxPrefix = 3;
13711
13712 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13713 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13714}
13715
13716
13717/**
13718 * @opcode 0xf3
13719 */
13720FNIEMOP_DEF(iemOp_repe)
13721{
13722 /* This overrides any previous REPNE prefix. */
13723 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13724 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13725 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13726
13727 /* For the 4 entry opcode tables, REPNZ overrides any previous
13728 REPNZ and operand size prefixes. */
13729 pVCpu->iem.s.idxPrefix = 2;
13730
13731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13733}
13734
13735
13736/**
13737 * @opcode 0xf4
13738 */
13739FNIEMOP_DEF(iemOp_hlt)
13740{
13741 IEMOP_MNEMONIC(hlt, "hlt");
13742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13743 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13744}
13745
13746
13747/**
13748 * @opcode 0xf5
13749 * @opflmodify cf
13750 */
13751FNIEMOP_DEF(iemOp_cmc)
13752{
13753 IEMOP_MNEMONIC(cmc, "cmc");
13754 IEM_MC_BEGIN(0, 0, 0, 0);
13755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13756 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13757 IEM_MC_ADVANCE_RIP_AND_FINISH();
13758 IEM_MC_END();
13759}
13760
13761
13762/**
13763 * Body for of 'inc/dec/not/neg Eb'.
13764 */
13765#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13766 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13767 { \
13768 /* register access */ \
13769 IEM_MC_BEGIN(2, 0, 0, 0); \
13770 IEMOP_HLP_DONE_DECODING(); \
13771 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13772 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13773 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13774 IEM_MC_REF_EFLAGS(pEFlags); \
13775 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13777 IEM_MC_END(); \
13778 } \
13779 else \
13780 { \
13781 /* memory access. */ \
13782 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13783 { \
13784 IEM_MC_BEGIN(2, 2, 0, 0); \
13785 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13786 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13788 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13789 \
13790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13791 IEMOP_HLP_DONE_DECODING(); \
13792 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13793 IEM_MC_FETCH_EFLAGS(EFlags); \
13794 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13795 \
13796 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13797 IEM_MC_COMMIT_EFLAGS(EFlags); \
13798 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13799 IEM_MC_END(); \
13800 } \
13801 else \
13802 { \
13803 IEM_MC_BEGIN(2, 2, 0, 0); \
13804 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13805 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13807 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13808 \
13809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13810 IEMOP_HLP_DONE_DECODING(); \
13811 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13812 IEM_MC_FETCH_EFLAGS(EFlags); \
13813 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13814 \
13815 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13816 IEM_MC_COMMIT_EFLAGS(EFlags); \
13817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13818 IEM_MC_END(); \
13819 } \
13820 } \
13821 (void)0
13822
13823
13824/**
13825 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13826 */
13827#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13828 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13829 { \
13830 /* \
13831 * Register target \
13832 */ \
13833 switch (pVCpu->iem.s.enmEffOpSize) \
13834 { \
13835 case IEMMODE_16BIT: \
13836 IEM_MC_BEGIN(2, 0, 0, 0); \
13837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13838 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13839 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13840 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13841 IEM_MC_REF_EFLAGS(pEFlags); \
13842 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13843 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13844 IEM_MC_END(); \
13845 break; \
13846 \
13847 case IEMMODE_32BIT: \
13848 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13850 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13851 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13852 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13853 IEM_MC_REF_EFLAGS(pEFlags); \
13854 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13855 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13857 IEM_MC_END(); \
13858 break; \
13859 \
13860 case IEMMODE_64BIT: \
13861 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13863 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13864 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13865 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13866 IEM_MC_REF_EFLAGS(pEFlags); \
13867 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13869 IEM_MC_END(); \
13870 break; \
13871 \
13872 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13873 } \
13874 } \
13875 else \
13876 { \
13877 /* \
13878 * Memory target. \
13879 */ \
13880 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13881 { \
13882 switch (pVCpu->iem.s.enmEffOpSize) \
13883 { \
13884 case IEMMODE_16BIT: \
13885 IEM_MC_BEGIN(2, 3, 0, 0); \
13886 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13887 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13889 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13890 \
13891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13892 IEMOP_HLP_DONE_DECODING(); \
13893 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13894 IEM_MC_FETCH_EFLAGS(EFlags); \
13895 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13896 \
13897 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13898 IEM_MC_COMMIT_EFLAGS(EFlags); \
13899 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13900 IEM_MC_END(); \
13901 break; \
13902 \
13903 case IEMMODE_32BIT: \
13904 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13905 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13908 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13909 \
13910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13911 IEMOP_HLP_DONE_DECODING(); \
13912 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13913 IEM_MC_FETCH_EFLAGS(EFlags); \
13914 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13915 \
13916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13917 IEM_MC_COMMIT_EFLAGS(EFlags); \
13918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13919 IEM_MC_END(); \
13920 break; \
13921 \
13922 case IEMMODE_64BIT: \
13923 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13924 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13925 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13927 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13928 \
13929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13930 IEMOP_HLP_DONE_DECODING(); \
13931 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13932 IEM_MC_FETCH_EFLAGS(EFlags); \
13933 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13934 \
13935 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13936 IEM_MC_COMMIT_EFLAGS(EFlags); \
13937 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13938 IEM_MC_END(); \
13939 break; \
13940 \
13941 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13942 } \
13943 } \
13944 else \
13945 { \
13946 (void)0
13947
13948#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13949 switch (pVCpu->iem.s.enmEffOpSize) \
13950 { \
13951 case IEMMODE_16BIT: \
13952 IEM_MC_BEGIN(2, 3, 0, 0); \
13953 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13954 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13956 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13957 \
13958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13959 IEMOP_HLP_DONE_DECODING(); \
13960 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13961 IEM_MC_FETCH_EFLAGS(EFlags); \
13962 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13963 \
13964 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13965 IEM_MC_COMMIT_EFLAGS(EFlags); \
13966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13967 IEM_MC_END(); \
13968 break; \
13969 \
13970 case IEMMODE_32BIT: \
13971 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13972 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13973 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13975 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13976 \
13977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13978 IEMOP_HLP_DONE_DECODING(); \
13979 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13980 IEM_MC_FETCH_EFLAGS(EFlags); \
13981 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13982 \
13983 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13984 IEM_MC_COMMIT_EFLAGS(EFlags); \
13985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13986 IEM_MC_END(); \
13987 break; \
13988 \
13989 case IEMMODE_64BIT: \
13990 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13991 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13994 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13995 \
13996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13997 IEMOP_HLP_DONE_DECODING(); \
13998 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13999 IEM_MC_FETCH_EFLAGS(EFlags); \
14000 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14001 \
14002 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14003 IEM_MC_COMMIT_EFLAGS(EFlags); \
14004 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14005 IEM_MC_END(); \
14006 break; \
14007 \
14008 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14009 } \
14010 } \
14011 } \
14012 (void)0
14013
14014
14015/**
14016 * @opmaps grp3_f6
14017 * @opcode /0
14018 * @opflclass logical
14019 * @todo also /1
14020 */
14021FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14022{
14023 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14025
14026 if (IEM_IS_MODRM_REG_MODE(bRm))
14027 {
14028 /* register access */
14029 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14030 IEM_MC_BEGIN(3, 0, 0, 0);
14031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14032 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14033 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14035 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14036 IEM_MC_REF_EFLAGS(pEFlags);
14037 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14038 IEM_MC_ADVANCE_RIP_AND_FINISH();
14039 IEM_MC_END();
14040 }
14041 else
14042 {
14043 /* memory access. */
14044 IEM_MC_BEGIN(3, 3, 0, 0);
14045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14047
14048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14050
14051 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14052 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
14053 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14054
14055 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
14056 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14057 IEM_MC_FETCH_EFLAGS(EFlags);
14058 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14059
14060 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14061 IEM_MC_COMMIT_EFLAGS(EFlags);
14062 IEM_MC_ADVANCE_RIP_AND_FINISH();
14063 IEM_MC_END();
14064 }
14065}
14066
14067
14068/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14069#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14070 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14071 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14072 { \
14073 /* register access */ \
14074 IEM_MC_BEGIN(3, 1, 0, 0); \
14075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14076 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14077 IEM_MC_ARG(uint8_t, u8Value, 1); \
14078 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14079 IEM_MC_LOCAL(int32_t, rc); \
14080 \
14081 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14082 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14083 IEM_MC_REF_EFLAGS(pEFlags); \
14084 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14085 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14086 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14087 } IEM_MC_ELSE() { \
14088 IEM_MC_RAISE_DIVIDE_ERROR(); \
14089 } IEM_MC_ENDIF(); \
14090 \
14091 IEM_MC_END(); \
14092 } \
14093 else \
14094 { \
14095 /* memory access. */ \
14096 IEM_MC_BEGIN(3, 2, 0, 0); \
14097 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14098 IEM_MC_ARG(uint8_t, u8Value, 1); \
14099 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14101 IEM_MC_LOCAL(int32_t, rc); \
14102 \
14103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14105 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14106 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14107 IEM_MC_REF_EFLAGS(pEFlags); \
14108 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14109 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14111 } IEM_MC_ELSE() { \
14112 IEM_MC_RAISE_DIVIDE_ERROR(); \
14113 } IEM_MC_ENDIF(); \
14114 \
14115 IEM_MC_END(); \
14116 } (void)0
14117
14118
14119/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14120#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14121 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14122 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14123 { \
14124 /* register access */ \
14125 switch (pVCpu->iem.s.enmEffOpSize) \
14126 { \
14127 case IEMMODE_16BIT: \
14128 IEM_MC_BEGIN(4, 1, 0, 0); \
14129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14130 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14131 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14132 IEM_MC_ARG(uint16_t, u16Value, 2); \
14133 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14134 IEM_MC_LOCAL(int32_t, rc); \
14135 \
14136 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14137 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14138 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14139 IEM_MC_REF_EFLAGS(pEFlags); \
14140 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14141 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14143 } IEM_MC_ELSE() { \
14144 IEM_MC_RAISE_DIVIDE_ERROR(); \
14145 } IEM_MC_ENDIF(); \
14146 \
14147 IEM_MC_END(); \
14148 break; \
14149 \
14150 case IEMMODE_32BIT: \
14151 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14153 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14154 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14155 IEM_MC_ARG(uint32_t, u32Value, 2); \
14156 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14157 IEM_MC_LOCAL(int32_t, rc); \
14158 \
14159 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14160 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14161 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14162 IEM_MC_REF_EFLAGS(pEFlags); \
14163 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14164 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14165 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14166 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14168 } IEM_MC_ELSE() { \
14169 IEM_MC_RAISE_DIVIDE_ERROR(); \
14170 } IEM_MC_ENDIF(); \
14171 \
14172 IEM_MC_END(); \
14173 break; \
14174 \
14175 case IEMMODE_64BIT: \
14176 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14178 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14179 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14180 IEM_MC_ARG(uint64_t, u64Value, 2); \
14181 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14182 IEM_MC_LOCAL(int32_t, rc); \
14183 \
14184 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14185 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14186 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14187 IEM_MC_REF_EFLAGS(pEFlags); \
14188 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14189 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14191 } IEM_MC_ELSE() { \
14192 IEM_MC_RAISE_DIVIDE_ERROR(); \
14193 } IEM_MC_ENDIF(); \
14194 \
14195 IEM_MC_END(); \
14196 break; \
14197 \
14198 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14199 } \
14200 } \
14201 else \
14202 { \
14203 /* memory access. */ \
14204 switch (pVCpu->iem.s.enmEffOpSize) \
14205 { \
14206 case IEMMODE_16BIT: \
14207 IEM_MC_BEGIN(4, 2, 0, 0); \
14208 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14209 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14210 IEM_MC_ARG(uint16_t, u16Value, 2); \
14211 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14213 IEM_MC_LOCAL(int32_t, rc); \
14214 \
14215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14217 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14218 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14219 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14220 IEM_MC_REF_EFLAGS(pEFlags); \
14221 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14222 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14224 } IEM_MC_ELSE() { \
14225 IEM_MC_RAISE_DIVIDE_ERROR(); \
14226 } IEM_MC_ENDIF(); \
14227 \
14228 IEM_MC_END(); \
14229 break; \
14230 \
14231 case IEMMODE_32BIT: \
14232 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14233 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14234 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14235 IEM_MC_ARG(uint32_t, u32Value, 2); \
14236 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14238 IEM_MC_LOCAL(int32_t, rc); \
14239 \
14240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14242 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14243 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14244 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14245 IEM_MC_REF_EFLAGS(pEFlags); \
14246 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14247 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14248 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14249 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14250 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14251 } IEM_MC_ELSE() { \
14252 IEM_MC_RAISE_DIVIDE_ERROR(); \
14253 } IEM_MC_ENDIF(); \
14254 \
14255 IEM_MC_END(); \
14256 break; \
14257 \
14258 case IEMMODE_64BIT: \
14259 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14260 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14261 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14262 IEM_MC_ARG(uint64_t, u64Value, 2); \
14263 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14265 IEM_MC_LOCAL(int32_t, rc); \
14266 \
14267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14269 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14270 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14271 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14272 IEM_MC_REF_EFLAGS(pEFlags); \
14273 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14274 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14276 } IEM_MC_ELSE() { \
14277 IEM_MC_RAISE_DIVIDE_ERROR(); \
14278 } IEM_MC_ENDIF(); \
14279 \
14280 IEM_MC_END(); \
14281 break; \
14282 \
14283 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14284 } \
14285 } (void)0
14286
14287
14288/**
14289 * @opmaps grp3_f6
14290 * @opcode /2
14291 * @opflclass unchanged
14292 */
14293FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14294{
14295/** @todo does not modify EFLAGS. */
14296 IEMOP_MNEMONIC(not_Eb, "not Eb");
14297 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14298}
14299
14300
14301/**
14302 * @opmaps grp3_f6
14303 * @opcode /3
14304 * @opflclass arithmetic
14305 */
14306FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14307{
14308 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14309 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14310}
14311
14312
14313/**
14314 * @opcode 0xf6
14315 */
14316FNIEMOP_DEF(iemOp_Grp3_Eb)
14317{
14318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14319 switch (IEM_GET_MODRM_REG_8(bRm))
14320 {
14321 case 0:
14322 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14323 case 1:
14324 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14325 case 2:
14326 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14327 case 3:
14328 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14329 case 4:
14330 {
14331 /**
14332 * @opdone
14333 * @opmaps grp3_f6
14334 * @opcode /4
14335 * @opflclass multiply
14336 */
14337 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14339 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14340 break;
14341 }
14342 case 5:
14343 {
14344 /**
14345 * @opdone
14346 * @opmaps grp3_f6
14347 * @opcode /5
14348 * @opflclass multiply
14349 */
14350 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14351 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14352 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14353 break;
14354 }
14355 case 6:
14356 {
14357 /**
14358 * @opdone
14359 * @opmaps grp3_f6
14360 * @opcode /6
14361 * @opflclass division
14362 */
14363 IEMOP_MNEMONIC(div_Eb, "div Eb");
14364 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14365 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14366 break;
14367 }
14368 case 7:
14369 {
14370 /**
14371 * @opdone
14372 * @opmaps grp3_f6
14373 * @opcode /7
14374 * @opflclass division
14375 */
14376 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14378 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14379 break;
14380 }
14381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14382 }
14383}
14384
14385
14386/**
14387 * @opmaps grp3_f7
14388 * @opcode /0
14389 * @opflclass logical
14390 */
14391FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14392{
14393 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14394 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14395
14396 if (IEM_IS_MODRM_REG_MODE(bRm))
14397 {
14398 /* register access */
14399 switch (pVCpu->iem.s.enmEffOpSize)
14400 {
14401 case IEMMODE_16BIT:
14402 IEM_MC_BEGIN(3, 0, 0, 0);
14403 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14405 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14406 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14407 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14408 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14409 IEM_MC_REF_EFLAGS(pEFlags);
14410 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14411 IEM_MC_ADVANCE_RIP_AND_FINISH();
14412 IEM_MC_END();
14413 break;
14414
14415 case IEMMODE_32BIT:
14416 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14417 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14419 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14420 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14421 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14422 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14423 IEM_MC_REF_EFLAGS(pEFlags);
14424 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14425 /* No clearing the high dword here - test doesn't write back the result. */
14426 IEM_MC_ADVANCE_RIP_AND_FINISH();
14427 IEM_MC_END();
14428 break;
14429
14430 case IEMMODE_64BIT:
14431 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14432 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14434 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14435 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14436 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14437 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14438 IEM_MC_REF_EFLAGS(pEFlags);
14439 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14440 IEM_MC_ADVANCE_RIP_AND_FINISH();
14441 IEM_MC_END();
14442 break;
14443
14444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14445 }
14446 }
14447 else
14448 {
14449 /* memory access. */
14450 switch (pVCpu->iem.s.enmEffOpSize)
14451 {
14452 case IEMMODE_16BIT:
14453 IEM_MC_BEGIN(3, 3, 0, 0);
14454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14456
14457 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14459
14460 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14461 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14462 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14463
14464 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14465 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14466 IEM_MC_FETCH_EFLAGS(EFlags);
14467 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14468
14469 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14470 IEM_MC_COMMIT_EFLAGS(EFlags);
14471 IEM_MC_ADVANCE_RIP_AND_FINISH();
14472 IEM_MC_END();
14473 break;
14474
14475 case IEMMODE_32BIT:
14476 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14479
14480 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14482
14483 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14484 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14485 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14486
14487 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14489 IEM_MC_FETCH_EFLAGS(EFlags);
14490 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14491
14492 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14493 IEM_MC_COMMIT_EFLAGS(EFlags);
14494 IEM_MC_ADVANCE_RIP_AND_FINISH();
14495 IEM_MC_END();
14496 break;
14497
14498 case IEMMODE_64BIT:
14499 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14502
14503 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14505
14506 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14507 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14508 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14509
14510 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14511 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14512 IEM_MC_FETCH_EFLAGS(EFlags);
14513 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14514
14515 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14516 IEM_MC_COMMIT_EFLAGS(EFlags);
14517 IEM_MC_ADVANCE_RIP_AND_FINISH();
14518 IEM_MC_END();
14519 break;
14520
14521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14522 }
14523 }
14524}
14525
14526
14527/**
14528 * @opmaps grp3_f7
14529 * @opcode /2
14530 * @opflclass unchanged
14531 */
14532FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14533{
14534/** @todo does not modify EFLAGS */
14535 IEMOP_MNEMONIC(not_Ev, "not Ev");
14536 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14537 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14538}
14539
14540
14541/**
14542 * @opmaps grp3_f7
14543 * @opcode /3
14544 * @opflclass arithmetic
14545 */
14546FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14547{
14548 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14549 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14550 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14551}
14552
14553
14554/**
14555 * @opmaps grp3_f7
14556 * @opcode /4
14557 * @opflclass multiply
14558 */
14559FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14560{
14561 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14563 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14564}
14565
14566
14567/**
14568 * @opmaps grp3_f7
14569 * @opcode /5
14570 * @opflclass multiply
14571 */
14572FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14573{
14574 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14576 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14577}
14578
14579
14580/**
14581 * @opmaps grp3_f7
14582 * @opcode /6
14583 * @opflclass division
14584 */
14585FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14586{
14587 IEMOP_MNEMONIC(div_Ev, "div Ev");
14588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14589 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14590}
14591
14592
14593/**
14594 * @opmaps grp3_f7
14595 * @opcode /7
14596 * @opflclass division
14597 */
14598FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14599{
14600 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14601 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14602 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14603}
14604
14605
14606/**
14607 * @opcode 0xf7
14608 */
14609FNIEMOP_DEF(iemOp_Grp3_Ev)
14610{
14611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14612 switch (IEM_GET_MODRM_REG_8(bRm))
14613 {
14614 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14615 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14616 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14617 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14618 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14619 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14620 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14621 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14623 }
14624}
14625
14626
14627/**
14628 * @opcode 0xf8
14629 * @opflmodify cf
14630 * @opflclear cf
14631 */
14632FNIEMOP_DEF(iemOp_clc)
14633{
14634 IEMOP_MNEMONIC(clc, "clc");
14635 IEM_MC_BEGIN(0, 0, 0, 0);
14636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14637 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14638 IEM_MC_ADVANCE_RIP_AND_FINISH();
14639 IEM_MC_END();
14640}
14641
14642
14643/**
14644 * @opcode 0xf9
14645 * @opflmodify cf
14646 * @opflset cf
14647 */
14648FNIEMOP_DEF(iemOp_stc)
14649{
14650 IEMOP_MNEMONIC(stc, "stc");
14651 IEM_MC_BEGIN(0, 0, 0, 0);
14652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14653 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14654 IEM_MC_ADVANCE_RIP_AND_FINISH();
14655 IEM_MC_END();
14656}
14657
14658
14659/**
14660 * @opcode 0xfa
14661 * @opfltest iopl,vm
14662 * @opflmodify if,vif
14663 */
14664FNIEMOP_DEF(iemOp_cli)
14665{
14666 IEMOP_MNEMONIC(cli, "cli");
14667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14668 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14669}
14670
14671
14672/**
14673 * @opcode 0xfb
14674 * @opfltest iopl,vm
14675 * @opflmodify if,vif
14676 */
14677FNIEMOP_DEF(iemOp_sti)
14678{
14679 IEMOP_MNEMONIC(sti, "sti");
14680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14681 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14682 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14683}
14684
14685
14686/**
14687 * @opcode 0xfc
14688 * @opflmodify df
14689 * @opflclear df
14690 */
14691FNIEMOP_DEF(iemOp_cld)
14692{
14693 IEMOP_MNEMONIC(cld, "cld");
14694 IEM_MC_BEGIN(0, 0, 0, 0);
14695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14696 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14697 IEM_MC_ADVANCE_RIP_AND_FINISH();
14698 IEM_MC_END();
14699}
14700
14701
14702/**
14703 * @opcode 0xfd
14704 * @opflmodify df
14705 * @opflset df
14706 */
14707FNIEMOP_DEF(iemOp_std)
14708{
14709 IEMOP_MNEMONIC(std, "std");
14710 IEM_MC_BEGIN(0, 0, 0, 0);
14711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14712 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14713 IEM_MC_ADVANCE_RIP_AND_FINISH();
14714 IEM_MC_END();
14715}
14716
14717
14718/**
14719 * @opmaps grp4
14720 * @opcode /0
14721 * @opflclass incdec
14722 */
14723FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14724{
14725 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14726 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14727}
14728
14729
14730/**
14731 * @opmaps grp4
14732 * @opcode /1
14733 * @opflclass incdec
14734 */
14735FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14736{
14737 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14738 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14739}
14740
14741
14742/**
14743 * @opcode 0xfe
14744 */
14745FNIEMOP_DEF(iemOp_Grp4)
14746{
14747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14748 switch (IEM_GET_MODRM_REG_8(bRm))
14749 {
14750 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14751 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14752 default:
14753 /** @todo is the eff-addr decoded? */
14754 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14755 IEMOP_RAISE_INVALID_OPCODE_RET();
14756 }
14757}
14758
14759/**
14760 * @opmaps grp5
14761 * @opcode /0
14762 * @opflclass incdec
14763 */
14764FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14765{
14766 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14767 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14768 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14769}
14770
14771
14772/**
14773 * @opmaps grp5
14774 * @opcode /1
14775 * @opflclass incdec
14776 */
14777FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14778{
14779 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14780 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14781 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14782}
14783
14784
14785/**
14786 * Opcode 0xff /2.
14787 * @param bRm The RM byte.
14788 */
14789FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14790{
14791 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14793
14794 if (IEM_IS_MODRM_REG_MODE(bRm))
14795 {
14796 /* The new RIP is taken from a register. */
14797 switch (pVCpu->iem.s.enmEffOpSize)
14798 {
14799 case IEMMODE_16BIT:
14800 IEM_MC_BEGIN(1, 0, 0, 0);
14801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14802 IEM_MC_ARG(uint16_t, u16Target, 0);
14803 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14804 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14805 IEM_MC_END();
14806 break;
14807
14808 case IEMMODE_32BIT:
14809 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14811 IEM_MC_ARG(uint32_t, u32Target, 0);
14812 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14813 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14814 IEM_MC_END();
14815 break;
14816
14817 case IEMMODE_64BIT:
14818 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14820 IEM_MC_ARG(uint64_t, u64Target, 0);
14821 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14822 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14823 IEM_MC_END();
14824 break;
14825
14826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14827 }
14828 }
14829 else
14830 {
14831 /* The new RIP is taken from a register. */
14832 switch (pVCpu->iem.s.enmEffOpSize)
14833 {
14834 case IEMMODE_16BIT:
14835 IEM_MC_BEGIN(1, 1, 0, 0);
14836 IEM_MC_ARG(uint16_t, u16Target, 0);
14837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14840 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14841 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14842 IEM_MC_END();
14843 break;
14844
14845 case IEMMODE_32BIT:
14846 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14847 IEM_MC_ARG(uint32_t, u32Target, 0);
14848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14851 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14852 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14853 IEM_MC_END();
14854 break;
14855
14856 case IEMMODE_64BIT:
14857 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14858 IEM_MC_ARG(uint64_t, u64Target, 0);
14859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14862 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14863 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14864 IEM_MC_END();
14865 break;
14866
14867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14868 }
14869 }
14870}
14871
14872#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14873 /* Registers? How?? */ \
14874 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14875 { /* likely */ } \
14876 else \
14877 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14878 \
14879 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14880 /** @todo what does VIA do? */ \
14881 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14882 { /* likely */ } \
14883 else \
14884 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14885 \
14886 /* Far pointer loaded from memory. */ \
14887 switch (pVCpu->iem.s.enmEffOpSize) \
14888 { \
14889 case IEMMODE_16BIT: \
14890 IEM_MC_BEGIN(3, 1, 0, 0); \
14891 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14892 IEM_MC_ARG(uint16_t, offSeg, 1); \
14893 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14897 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14898 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14899 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14900 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14901 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14902 IEM_MC_END(); \
14903 break; \
14904 \
14905 case IEMMODE_32BIT: \
14906 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14907 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14908 IEM_MC_ARG(uint32_t, offSeg, 1); \
14909 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14913 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14914 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14915 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14916 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14917 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14918 IEM_MC_END(); \
14919 break; \
14920 \
14921 case IEMMODE_64BIT: \
14922 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14923 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14924 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14925 IEM_MC_ARG(uint64_t, offSeg, 1); \
14926 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14930 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14931 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14932 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14933 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14934 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14935 IEM_MC_END(); \
14936 break; \
14937 \
14938 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14939 } do {} while (0)
14940
14941
14942/**
14943 * Opcode 0xff /3.
14944 * @param bRm The RM byte.
14945 */
14946FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14947{
14948 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14949 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14950}
14951
14952
14953/**
14954 * Opcode 0xff /4.
14955 * @param bRm The RM byte.
14956 */
14957FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14958{
14959 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14961
14962 if (IEM_IS_MODRM_REG_MODE(bRm))
14963 {
14964 /* The new RIP is taken from a register. */
14965 switch (pVCpu->iem.s.enmEffOpSize)
14966 {
14967 case IEMMODE_16BIT:
14968 IEM_MC_BEGIN(0, 1, 0, 0);
14969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14970 IEM_MC_LOCAL(uint16_t, u16Target);
14971 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14972 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14973 IEM_MC_END();
14974 break;
14975
14976 case IEMMODE_32BIT:
14977 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14979 IEM_MC_LOCAL(uint32_t, u32Target);
14980 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14981 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14982 IEM_MC_END();
14983 break;
14984
14985 case IEMMODE_64BIT:
14986 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14988 IEM_MC_LOCAL(uint64_t, u64Target);
14989 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14990 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14991 IEM_MC_END();
14992 break;
14993
14994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14995 }
14996 }
14997 else
14998 {
14999 /* The new RIP is taken from a memory location. */
15000 switch (pVCpu->iem.s.enmEffOpSize)
15001 {
15002 case IEMMODE_16BIT:
15003 IEM_MC_BEGIN(0, 2, 0, 0);
15004 IEM_MC_LOCAL(uint16_t, u16Target);
15005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15008 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15009 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15010 IEM_MC_END();
15011 break;
15012
15013 case IEMMODE_32BIT:
15014 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
15015 IEM_MC_LOCAL(uint32_t, u32Target);
15016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15019 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15020 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15021 IEM_MC_END();
15022 break;
15023
15024 case IEMMODE_64BIT:
15025 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15026 IEM_MC_LOCAL(uint64_t, u64Target);
15027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15030 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15031 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15032 IEM_MC_END();
15033 break;
15034
15035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15036 }
15037 }
15038}
15039
15040
15041/**
15042 * Opcode 0xff /5.
15043 * @param bRm The RM byte.
15044 */
15045FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15046{
15047 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15048 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15049}
15050
15051
15052/**
15053 * Opcode 0xff /6.
15054 * @param bRm The RM byte.
15055 */
15056FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15057{
15058 IEMOP_MNEMONIC(push_Ev, "push Ev");
15059
15060 /* Registers are handled by a common worker. */
15061 if (IEM_IS_MODRM_REG_MODE(bRm))
15062 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15063
15064 /* Memory we do here. */
15065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15066 switch (pVCpu->iem.s.enmEffOpSize)
15067 {
15068 case IEMMODE_16BIT:
15069 IEM_MC_BEGIN(0, 2, 0, 0);
15070 IEM_MC_LOCAL(uint16_t, u16Src);
15071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15074 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15075 IEM_MC_PUSH_U16(u16Src);
15076 IEM_MC_ADVANCE_RIP_AND_FINISH();
15077 IEM_MC_END();
15078 break;
15079
15080 case IEMMODE_32BIT:
15081 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15082 IEM_MC_LOCAL(uint32_t, u32Src);
15083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15086 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15087 IEM_MC_PUSH_U32(u32Src);
15088 IEM_MC_ADVANCE_RIP_AND_FINISH();
15089 IEM_MC_END();
15090 break;
15091
15092 case IEMMODE_64BIT:
15093 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15094 IEM_MC_LOCAL(uint64_t, u64Src);
15095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15098 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15099 IEM_MC_PUSH_U64(u64Src);
15100 IEM_MC_ADVANCE_RIP_AND_FINISH();
15101 IEM_MC_END();
15102 break;
15103
15104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15105 }
15106}
15107
15108
15109/**
15110 * @opcode 0xff
15111 */
15112FNIEMOP_DEF(iemOp_Grp5)
15113{
15114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15115 switch (IEM_GET_MODRM_REG_8(bRm))
15116 {
15117 case 0:
15118 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15119 case 1:
15120 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15121 case 2:
15122 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15123 case 3:
15124 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15125 case 4:
15126 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15127 case 5:
15128 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15129 case 6:
15130 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15131 case 7:
15132 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15133 IEMOP_RAISE_INVALID_OPCODE_RET();
15134 }
15135 AssertFailedReturn(VERR_IEM_IPE_3);
15136}
15137
15138
15139
15140const PFNIEMOP g_apfnOneByteMap[256] =
15141{
15142 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15143 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15144 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15145 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15146 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15147 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15148 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15149 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15150 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15151 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15152 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15153 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15154 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15155 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15156 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15157 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15158 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15159 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15160 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15161 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15162 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15163 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15164 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15165 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15166 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15167 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15168 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15169 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15170 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15171 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15172 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15173 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15174 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15175 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15176 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15177 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15178 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15179 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15180 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15181 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15182 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15183 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15184 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15185 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15186 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15187 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15188 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15189 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15190 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15191 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15192 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15193 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15194 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15195 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15196 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15197 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15198 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15199 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15200 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15201 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15202 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15203 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15204 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15205 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15206};
15207
15208
15209/** @} */
15210
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette