VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h

Last change on this file was 104439, checked in by vboxsync, 3 weeks ago

VMM/IEM: Implement maskmovq, [v]maskmovdqu instruction decoding, dispatch & emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 515.4 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 104439 2024-04-26 10:30:18Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
720 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
721
722 IEM_MC_ADVANCE_RIP_AND_FINISH();
723 IEM_MC_END();
724 }
725 else
726 {
727 /*
728 * XMM128, [mem128].
729 */
730 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
731 IEM_MC_LOCAL(X86XMMREG, SseRes);
732 IEM_MC_LOCAL(X86XMMREG, uSrc2);
733 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
734 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
735 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
737
738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
740 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
741 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
742
743 IEM_MC_PREPARE_SSE_USAGE();
744 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
745 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
746 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
747 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
748
749 IEM_MC_ADVANCE_RIP_AND_FINISH();
750 IEM_MC_END();
751 }
752}
753
754
755/**
756 * Common worker for SSE instructions on the forms:
757 * pxxs xmm1, xmm2/mem32
758 *
759 * Proper alignment of the 128-bit operand is enforced.
760 * Exceptions type 2. SSE cpuid checks.
761 *
762 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
763 */
764FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
765{
766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
767 if (IEM_IS_MODRM_REG_MODE(bRm))
768 {
769 /*
770 * XMM128, XMM32.
771 */
772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
774 IEM_MC_LOCAL(X86XMMREG, SseRes);
775 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
776 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
777 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
779 IEM_MC_PREPARE_SSE_USAGE();
780 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
781 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
782 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
783 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
784 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
785
786 IEM_MC_ADVANCE_RIP_AND_FINISH();
787 IEM_MC_END();
788 }
789 else
790 {
791 /*
792 * XMM128, [mem32].
793 */
794 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
795 IEM_MC_LOCAL(X86XMMREG, SseRes);
796 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
797 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
798 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
799 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
801
802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
805 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
806
807 IEM_MC_PREPARE_SSE_USAGE();
808 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
809 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
810 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
811 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
812
813 IEM_MC_ADVANCE_RIP_AND_FINISH();
814 IEM_MC_END();
815 }
816}
817
818
819/**
820 * Common worker for SSE2 instructions on the forms:
821 * pxxd xmm1, xmm2/mem128
822 *
823 * Proper alignment of the 128-bit operand is enforced.
824 * Exceptions type 2. SSE cpuid checks.
825 *
826 * @sa iemOpCommonSseFp_FullFull_To_Full
827 */
828FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
829{
830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
831 if (IEM_IS_MODRM_REG_MODE(bRm))
832 {
833 /*
834 * XMM128, XMM128.
835 */
836 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
838 IEM_MC_LOCAL(X86XMMREG, SseRes);
839 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
840 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
841 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
843 IEM_MC_PREPARE_SSE_USAGE();
844 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
845 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
846 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
847 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
848 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
849
850 IEM_MC_ADVANCE_RIP_AND_FINISH();
851 IEM_MC_END();
852 }
853 else
854 {
855 /*
856 * XMM128, [mem128].
857 */
858 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
859 IEM_MC_LOCAL(X86XMMREG, SseRes);
860 IEM_MC_LOCAL(X86XMMREG, uSrc2);
861 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
862 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
863 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
865
866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
868 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
869 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
870
871 IEM_MC_PREPARE_SSE_USAGE();
872 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
873 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
874 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
875 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
876
877 IEM_MC_ADVANCE_RIP_AND_FINISH();
878 IEM_MC_END();
879 }
880}
881
882
883/**
884 * Common worker for SSE2 instructions on the forms:
885 * pxxs xmm1, xmm2/mem64
886 *
887 * Proper alignment of the 128-bit operand is enforced.
888 * Exceptions type 2. SSE2 cpuid checks.
889 *
890 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
891 */
892FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
893{
894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
895 if (IEM_IS_MODRM_REG_MODE(bRm))
896 {
897 /*
898 * XMM, XMM.
899 */
900 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
902 IEM_MC_LOCAL(X86XMMREG, SseRes);
903 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
904 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
905 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
906 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
907 IEM_MC_PREPARE_SSE_USAGE();
908 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
909 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
910 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
911 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
912 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
913
914 IEM_MC_ADVANCE_RIP_AND_FINISH();
915 IEM_MC_END();
916 }
917 else
918 {
919 /*
920 * XMM, [mem64].
921 */
922 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
923 IEM_MC_LOCAL(X86XMMREG, SseRes);
924 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
925 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
926 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
927 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
929
930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
932 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
933 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
934
935 IEM_MC_PREPARE_SSE_USAGE();
936 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
937 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
938 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
939 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
940
941 IEM_MC_ADVANCE_RIP_AND_FINISH();
942 IEM_MC_END();
943 }
944}
945
946
947/**
948 * Common worker for SSE2 instructions on the form:
949 * pxxxx xmm1, xmm2/mem128
950 *
951 * The 2nd operand is the second half of a register, which for SSE a 128-bit
952 * aligned access where it may read the full 128 bits or only the upper 64 bits.
953 *
954 * Exceptions type 4.
955 */
956FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
957{
958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
959 if (IEM_IS_MODRM_REG_MODE(bRm))
960 {
961 /*
962 * XMM, XMM.
963 */
964 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
966 IEM_MC_ARG(PRTUINT128U, puDst, 0);
967 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
968 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
969 IEM_MC_PREPARE_SSE_USAGE();
970 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
971 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
972 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
973 IEM_MC_ADVANCE_RIP_AND_FINISH();
974 IEM_MC_END();
975 }
976 else
977 {
978 /*
979 * XMM, [mem128].
980 */
981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
982 IEM_MC_ARG(PRTUINT128U, puDst, 0);
983 IEM_MC_LOCAL(RTUINT128U, uSrc);
984 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 /** @todo Most CPUs probably only read the high qword. We read everything to
991 * make sure we apply segmentation and alignment checks correctly.
992 * When we have time, it would be interesting to explore what real
993 * CPUs actually does and whether it will do a TLB load for the lower
994 * part or skip any associated \#PF. */
995 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
996
997 IEM_MC_PREPARE_SSE_USAGE();
998 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
999 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1000
1001 IEM_MC_ADVANCE_RIP_AND_FINISH();
1002 IEM_MC_END();
1003 }
1004}
1005
1006
1007/**
1008 * Common worker for SSE3 instructions on the forms:
1009 * hxxx xmm1, xmm2/mem128
1010 *
1011 * Proper alignment of the 128-bit operand is enforced.
1012 * Exceptions type 2. SSE3 cpuid checks.
1013 *
1014 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1015 */
1016FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1017{
1018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1019 if (IEM_IS_MODRM_REG_MODE(bRm))
1020 {
1021 /*
1022 * XMM, XMM.
1023 */
1024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1026 IEM_MC_LOCAL(X86XMMREG, SseRes);
1027 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1028 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1029 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_PREPARE_SSE_USAGE();
1032 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1034 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1035 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1036 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1037
1038 IEM_MC_ADVANCE_RIP_AND_FINISH();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * XMM, [mem128].
1045 */
1046 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1047 IEM_MC_LOCAL(X86XMMREG, SseRes);
1048 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1049 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1050 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1051 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1053
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1056 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1057 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1058
1059 IEM_MC_PREPARE_SSE_USAGE();
1060 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1061 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1062 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1063 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1064
1065 IEM_MC_ADVANCE_RIP_AND_FINISH();
1066 IEM_MC_END();
1067 }
1068}
1069
1070
1071/** Opcode 0x0f 0x00 /0. */
1072FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1073{
1074 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1075 IEMOP_HLP_MIN_286();
1076 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1077
1078 if (IEM_IS_MODRM_REG_MODE(bRm))
1079 {
1080 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1081 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1082 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1083 }
1084
1085 /* Ignore operand size here, memory refs are always 16-bit. */
1086 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1087 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1089 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1090 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1091 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1092 IEM_MC_END();
1093}
1094
1095
1096/** Opcode 0x0f 0x00 /1. */
1097FNIEMOPRM_DEF(iemOp_Grp6_str)
1098{
1099 IEMOP_MNEMONIC(str, "str Rv/Mw");
1100 IEMOP_HLP_MIN_286();
1101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1102
1103
1104 if (IEM_IS_MODRM_REG_MODE(bRm))
1105 {
1106 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1107 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1108 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1109 }
1110
1111 /* Ignore operand size here, memory refs are always 16-bit. */
1112 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1113 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1115 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1116 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1117 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1118 IEM_MC_END();
1119}
1120
1121
1122/** Opcode 0x0f 0x00 /2. */
1123FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1124{
1125 IEMOP_MNEMONIC(lldt, "lldt Ew");
1126 IEMOP_HLP_MIN_286();
1127 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1128
1129 if (IEM_IS_MODRM_REG_MODE(bRm))
1130 {
1131 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1132 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1133 IEM_MC_ARG(uint16_t, u16Sel, 0);
1134 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1135 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1136 IEM_MC_END();
1137 }
1138 else
1139 {
1140 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1141 IEM_MC_ARG(uint16_t, u16Sel, 0);
1142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1144 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1145 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1146 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1147 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1148 IEM_MC_END();
1149 }
1150}
1151
1152
1153/** Opcode 0x0f 0x00 /3. */
1154FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1155{
1156 IEMOP_MNEMONIC(ltr, "ltr Ew");
1157 IEMOP_HLP_MIN_286();
1158 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1159
1160 if (IEM_IS_MODRM_REG_MODE(bRm))
1161 {
1162 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1164 IEM_MC_ARG(uint16_t, u16Sel, 0);
1165 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1166 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1167 IEM_MC_END();
1168 }
1169 else
1170 {
1171 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1172 IEM_MC_ARG(uint16_t, u16Sel, 0);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1176 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1177 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1178 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1179 IEM_MC_END();
1180 }
1181}
1182
1183
1184/* Need to associate flag info with the blocks, so duplicate the code. */
1185#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1186 IEMOP_HLP_MIN_286(); \
1187 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1188 \
1189 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1190 { \
1191 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1192 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1193 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1194 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1195 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1196 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1197 IEM_MC_END(); \
1198 } \
1199 else \
1200 { \
1201 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1202 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1203 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1206 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1207 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1208 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1209 IEM_MC_END(); \
1210 } (void)0
1211
1212/**
1213 * @opmaps grp6
1214 * @opcode /4
1215 * @opflmodify zf
1216 */
1217FNIEMOPRM_DEF(iemOp_Grp6_verr)
1218{
1219 IEMOP_MNEMONIC(verr, "verr Ew");
1220 IEMOP_BODY_GRP6_VERX(bRm, false);
1221}
1222
1223
1224/**
1225 * @opmaps grp6
1226 * @opcode /5
1227 * @opflmodify zf
1228 */
1229FNIEMOPRM_DEF(iemOp_Grp6_verw)
1230{
1231 IEMOP_MNEMONIC(verw, "verw Ew");
1232 IEMOP_BODY_GRP6_VERX(bRm, true);
1233}
1234
1235
1236/**
1237 * Group 6 jump table.
1238 */
1239IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1240{
1241 iemOp_Grp6_sldt,
1242 iemOp_Grp6_str,
1243 iemOp_Grp6_lldt,
1244 iemOp_Grp6_ltr,
1245 iemOp_Grp6_verr,
1246 iemOp_Grp6_verw,
1247 iemOp_InvalidWithRM,
1248 iemOp_InvalidWithRM
1249};
1250
1251/** Opcode 0x0f 0x00. */
1252FNIEMOP_DEF(iemOp_Grp6)
1253{
1254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1255 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1256}
1257
1258
1259/** Opcode 0x0f 0x01 /0. */
1260FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1261{
1262 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1263 IEMOP_HLP_MIN_286();
1264 IEMOP_HLP_64BIT_OP_SIZE();
1265 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1266 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1269 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1270 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1271 IEM_MC_END();
1272}
1273
1274
1275/** Opcode 0x0f 0x01 /0. */
1276FNIEMOP_DEF(iemOp_Grp7_vmcall)
1277{
1278 IEMOP_MNEMONIC(vmcall, "vmcall");
1279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1280
1281 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1282 want all hypercalls regardless of instruction used, and if a
1283 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1284 (NEM/win makes ASSUMPTIONS about this behavior.) */
1285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1286}
1287
1288
1289/** Opcode 0x0f 0x01 /0. */
1290#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1291FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1292{
1293 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1294 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1295 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1296 IEMOP_HLP_DONE_DECODING();
1297 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1298 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1299 iemCImpl_vmlaunch);
1300}
1301#else
1302FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1303{
1304 IEMOP_BITCH_ABOUT_STUB();
1305 IEMOP_RAISE_INVALID_OPCODE_RET();
1306}
1307#endif
1308
1309
1310/** Opcode 0x0f 0x01 /0. */
1311#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1312FNIEMOP_DEF(iemOp_Grp7_vmresume)
1313{
1314 IEMOP_MNEMONIC(vmresume, "vmresume");
1315 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1316 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1317 IEMOP_HLP_DONE_DECODING();
1318 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1319 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1320 iemCImpl_vmresume);
1321}
1322#else
1323FNIEMOP_DEF(iemOp_Grp7_vmresume)
1324{
1325 IEMOP_BITCH_ABOUT_STUB();
1326 IEMOP_RAISE_INVALID_OPCODE_RET();
1327}
1328#endif
1329
1330
1331/** Opcode 0x0f 0x01 /0. */
1332#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1333FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1334{
1335 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1336 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1337 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1338 IEMOP_HLP_DONE_DECODING();
1339 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1340}
1341#else
1342FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1343{
1344 IEMOP_BITCH_ABOUT_STUB();
1345 IEMOP_RAISE_INVALID_OPCODE_RET();
1346}
1347#endif
1348
1349
1350/** Opcode 0x0f 0x01 /1. */
1351FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1352{
1353 IEMOP_MNEMONIC(sidt, "sidt Ms");
1354 IEMOP_HLP_MIN_286();
1355 IEMOP_HLP_64BIT_OP_SIZE();
1356 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1357 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1360 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1361 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1362 IEM_MC_END();
1363}
1364
1365
1366/** Opcode 0x0f 0x01 /1. */
1367FNIEMOP_DEF(iemOp_Grp7_monitor)
1368{
1369 IEMOP_MNEMONIC(monitor, "monitor");
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1371 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /1. */
1376FNIEMOP_DEF(iemOp_Grp7_mwait)
1377{
1378 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1381}
1382
1383
1384/** Opcode 0x0f 0x01 /2. */
1385FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1386{
1387 IEMOP_MNEMONIC(lgdt, "lgdt");
1388 IEMOP_HLP_64BIT_OP_SIZE();
1389 IEM_MC_BEGIN(0, 0);
1390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1394 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1395 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1396 IEM_MC_END();
1397}
1398
1399
1400/** Opcode 0x0f 0x01 0xd0. */
1401FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1402{
1403 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1404 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1405 {
1406 /** @todo r=ramshankar: We should use
1407 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1408 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1409/** @todo testcase: test prefixes and exceptions. currently not checking for the
1410 * OPSIZE one ... */
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1412 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1413 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1414 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1415 iemCImpl_xgetbv);
1416 }
1417 IEMOP_RAISE_INVALID_OPCODE_RET();
1418}
1419
1420
1421/** Opcode 0x0f 0x01 0xd1. */
1422FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1423{
1424 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1425 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1426 {
1427 /** @todo r=ramshankar: We should use
1428 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1429 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1430/** @todo testcase: test prefixes and exceptions. currently not checking for the
1431 * OPSIZE one ... */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1433 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1434 }
1435 IEMOP_RAISE_INVALID_OPCODE_RET();
1436}
1437
1438
1439/** Opcode 0x0f 0x01 /3. */
1440FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1441{
1442 IEMOP_MNEMONIC(lidt, "lidt");
1443 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1444 IEM_MC_BEGIN(0, 0);
1445 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1450 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1451 IEM_MC_END();
1452}
1453
1454
1455/** Opcode 0x0f 0x01 0xd8. */
1456#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1457FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1458{
1459 IEMOP_MNEMONIC(vmrun, "vmrun");
1460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1461 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1462 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1463 iemCImpl_vmrun);
1464}
1465#else
1466FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1467#endif
1468
1469/** Opcode 0x0f 0x01 0xd9. */
1470FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1471{
1472 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1473 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1474 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1475 * here cannot be right... */
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1477
1478 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1479 want all hypercalls regardless of instruction used, and if a
1480 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1481 (NEM/win makes ASSUMPTIONS about this behavior.) */
1482 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1483}
1484
1485/** Opcode 0x0f 0x01 0xda. */
1486#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1487FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1488{
1489 IEMOP_MNEMONIC(vmload, "vmload");
1490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1491 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1492}
1493#else
1494FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1495#endif
1496
1497
1498/** Opcode 0x0f 0x01 0xdb. */
1499#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1500FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1501{
1502 IEMOP_MNEMONIC(vmsave, "vmsave");
1503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1504 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1505}
1506#else
1507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1508#endif
1509
1510
1511/** Opcode 0x0f 0x01 0xdc. */
1512#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1513FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1514{
1515 IEMOP_MNEMONIC(stgi, "stgi");
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1517 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1518}
1519#else
1520FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1521#endif
1522
1523
1524/** Opcode 0x0f 0x01 0xdd. */
1525#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1526FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1527{
1528 IEMOP_MNEMONIC(clgi, "clgi");
1529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1531}
1532#else
1533FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1534#endif
1535
1536
1537/** Opcode 0x0f 0x01 0xdf. */
1538#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1539FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1540{
1541 IEMOP_MNEMONIC(invlpga, "invlpga");
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1543 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1544}
1545#else
1546FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1547#endif
1548
1549
1550/** Opcode 0x0f 0x01 0xde. */
1551#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1552FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1553{
1554 IEMOP_MNEMONIC(skinit, "skinit");
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1556 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1557}
1558#else
1559FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x01 /4. */
1564FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1565{
1566 IEMOP_MNEMONIC(smsw, "smsw");
1567 IEMOP_HLP_MIN_286();
1568 if (IEM_IS_MODRM_REG_MODE(bRm))
1569 {
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1572 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1573 }
1574
1575 /* Ignore operand size here, memory refs are always 16-bit. */
1576 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1577 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1581 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1582 IEM_MC_END();
1583}
1584
1585
1586/** Opcode 0x0f 0x01 /6. */
1587FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1588{
1589 /* The operand size is effectively ignored, all is 16-bit and only the
1590 lower 3-bits are used. */
1591 IEMOP_MNEMONIC(lmsw, "lmsw");
1592 IEMOP_HLP_MIN_286();
1593 if (IEM_IS_MODRM_REG_MODE(bRm))
1594 {
1595 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1597 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1598 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1599 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1600 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1601 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1602 IEM_MC_END();
1603 }
1604 else
1605 {
1606 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1607 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1608 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1611 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1612 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1613 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1614 IEM_MC_END();
1615 }
1616}
1617
1618
1619/** Opcode 0x0f 0x01 /7. */
1620FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1621{
1622 IEMOP_MNEMONIC(invlpg, "invlpg");
1623 IEMOP_HLP_MIN_486();
1624 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1625 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1629 IEM_MC_END();
1630}
1631
1632
1633/** Opcode 0x0f 0x01 0xf8. */
1634FNIEMOP_DEF(iemOp_Grp7_swapgs)
1635{
1636 IEMOP_MNEMONIC(swapgs, "swapgs");
1637 IEMOP_HLP_ONLY_64BIT();
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1640}
1641
1642
1643/** Opcode 0x0f 0x01 0xf9. */
1644FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1645{
1646 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1652 iemCImpl_rdtscp);
1653}
1654
1655
1656/**
1657 * Group 7 jump table, memory variant.
1658 */
1659IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1660{
1661 iemOp_Grp7_sgdt,
1662 iemOp_Grp7_sidt,
1663 iemOp_Grp7_lgdt,
1664 iemOp_Grp7_lidt,
1665 iemOp_Grp7_smsw,
1666 iemOp_InvalidWithRM,
1667 iemOp_Grp7_lmsw,
1668 iemOp_Grp7_invlpg
1669};
1670
1671
1672/** Opcode 0x0f 0x01. */
1673FNIEMOP_DEF(iemOp_Grp7)
1674{
1675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1676 if (IEM_IS_MODRM_MEM_MODE(bRm))
1677 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1678
1679 switch (IEM_GET_MODRM_REG_8(bRm))
1680 {
1681 case 0:
1682 switch (IEM_GET_MODRM_RM_8(bRm))
1683 {
1684 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1685 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1686 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1687 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1688 }
1689 IEMOP_RAISE_INVALID_OPCODE_RET();
1690
1691 case 1:
1692 switch (IEM_GET_MODRM_RM_8(bRm))
1693 {
1694 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1695 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1696 }
1697 IEMOP_RAISE_INVALID_OPCODE_RET();
1698
1699 case 2:
1700 switch (IEM_GET_MODRM_RM_8(bRm))
1701 {
1702 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1703 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1704 }
1705 IEMOP_RAISE_INVALID_OPCODE_RET();
1706
1707 case 3:
1708 switch (IEM_GET_MODRM_RM_8(bRm))
1709 {
1710 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1711 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1712 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1713 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1714 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1715 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1716 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1717 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1719 }
1720
1721 case 4:
1722 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1723
1724 case 5:
1725 IEMOP_RAISE_INVALID_OPCODE_RET();
1726
1727 case 6:
1728 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1729
1730 case 7:
1731 switch (IEM_GET_MODRM_RM_8(bRm))
1732 {
1733 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1734 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1735 }
1736 IEMOP_RAISE_INVALID_OPCODE_RET();
1737
1738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1739 }
1740}
1741
1742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1743{
1744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1746
1747 if (IEM_IS_MODRM_REG_MODE(bRm))
1748 {
1749 switch (pVCpu->iem.s.enmEffOpSize)
1750 {
1751 case IEMMODE_16BIT:
1752 IEM_MC_BEGIN(0, 0);
1753 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1754 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1755 IEM_MC_ARG(uint16_t, u16Sel, 1);
1756 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1757
1758 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1759 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1760 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1761 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1762
1763 IEM_MC_END();
1764 break;
1765
1766 case IEMMODE_32BIT:
1767 case IEMMODE_64BIT:
1768 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1769 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1770 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1771 IEM_MC_ARG(uint16_t, u16Sel, 1);
1772 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1773
1774 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1775 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1776 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1777 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1778
1779 IEM_MC_END();
1780 break;
1781
1782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1783 }
1784 }
1785 else
1786 {
1787 switch (pVCpu->iem.s.enmEffOpSize)
1788 {
1789 case IEMMODE_16BIT:
1790 IEM_MC_BEGIN(0, 0);
1791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1792 IEM_MC_ARG(uint16_t, u16Sel, 1);
1793 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1795
1796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1797 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1798
1799 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1800 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1801 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1802 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1803
1804 IEM_MC_END();
1805 break;
1806
1807 case IEMMODE_32BIT:
1808 case IEMMODE_64BIT:
1809 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1811 IEM_MC_ARG(uint16_t, u16Sel, 1);
1812 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1814
1815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1816 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1817/** @todo testcase: make sure it's a 16-bit read. */
1818
1819 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1820 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1821 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1822 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1823
1824 IEM_MC_END();
1825 break;
1826
1827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1828 }
1829 }
1830}
1831
1832
1833
1834/**
1835 * @opcode 0x02
1836 * @opflmodify zf
1837 */
1838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1839{
1840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1842}
1843
1844
1845/**
1846 * @opcode 0x03
1847 * @opflmodify zf
1848 */
1849FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1850{
1851 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1852 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1853}
1854
1855
1856/** Opcode 0x0f 0x05. */
1857FNIEMOP_DEF(iemOp_syscall)
1858{
1859 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1861 /** @todo r=aeichner Clobbers cr0 only if this is a 286 LOADALL instruction. */
1862 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1863 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1864 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_syscall);
1865}
1866
1867
1868/** Opcode 0x0f 0x06. */
1869FNIEMOP_DEF(iemOp_clts)
1870{
1871 IEMOP_MNEMONIC(clts, "clts");
1872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1873 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1874}
1875
1876
1877/** Opcode 0x0f 0x07. */
1878FNIEMOP_DEF(iemOp_sysret)
1879{
1880 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1882 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1883 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1884 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1885}
1886
1887
1888/** Opcode 0x0f 0x08. */
1889FNIEMOP_DEF(iemOp_invd)
1890{
1891 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1892 IEMOP_HLP_MIN_486();
1893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1894 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1895}
1896
1897
1898/** Opcode 0x0f 0x09. */
1899FNIEMOP_DEF(iemOp_wbinvd)
1900{
1901 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1902 IEMOP_HLP_MIN_486();
1903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1904 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1905}
1906
1907
1908/** Opcode 0x0f 0x0b. */
1909FNIEMOP_DEF(iemOp_ud2)
1910{
1911 IEMOP_MNEMONIC(ud2, "ud2");
1912 IEMOP_RAISE_INVALID_OPCODE_RET();
1913}
1914
1915/** Opcode 0x0f 0x0d. */
1916FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1917{
1918 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1920 {
1921 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1922 IEMOP_RAISE_INVALID_OPCODE_RET();
1923 }
1924
1925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1926 if (IEM_IS_MODRM_REG_MODE(bRm))
1927 {
1928 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1929 IEMOP_RAISE_INVALID_OPCODE_RET();
1930 }
1931
1932 switch (IEM_GET_MODRM_REG_8(bRm))
1933 {
1934 case 2: /* Aliased to /0 for the time being. */
1935 case 4: /* Aliased to /0 for the time being. */
1936 case 5: /* Aliased to /0 for the time being. */
1937 case 6: /* Aliased to /0 for the time being. */
1938 case 7: /* Aliased to /0 for the time being. */
1939 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1940 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1941 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1943 }
1944
1945 IEM_MC_BEGIN(0, 0);
1946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1949 /* Currently a NOP. */
1950 IEM_MC_NOREF(GCPtrEffSrc);
1951 IEM_MC_ADVANCE_RIP_AND_FINISH();
1952 IEM_MC_END();
1953}
1954
1955
1956/** Opcode 0x0f 0x0e. */
1957FNIEMOP_DEF(iemOp_femms)
1958{
1959 IEMOP_MNEMONIC(femms, "femms");
1960
1961 IEM_MC_BEGIN(0, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1965 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1966 IEM_MC_FPU_FROM_MMX_MODE();
1967 IEM_MC_ADVANCE_RIP_AND_FINISH();
1968 IEM_MC_END();
1969}
1970
1971
1972/** Opcode 0x0f 0x0f. */
1973FNIEMOP_DEF(iemOp_3Dnow)
1974{
1975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1976 {
1977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1978 IEMOP_RAISE_INVALID_OPCODE_RET();
1979 }
1980
1981#ifdef IEM_WITH_3DNOW
1982 /* This is pretty sparse, use switch instead of table. */
1983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1985#else
1986 IEMOP_BITCH_ABOUT_STUB();
1987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1988#endif
1989}
1990
1991
1992/**
1993 * @opcode 0x10
1994 * @oppfx none
1995 * @opcpuid sse
1996 * @opgroup og_sse_simdfp_datamove
1997 * @opxcpttype 4UA
1998 * @optest op1=1 op2=2 -> op1=2
1999 * @optest op1=0 op2=-22 -> op1=-22
2000 */
2001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2002{
2003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2005 if (IEM_IS_MODRM_REG_MODE(bRm))
2006 {
2007 /*
2008 * XMM128, XMM128.
2009 */
2010 IEM_MC_BEGIN(0, 0);
2011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2014 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2015 IEM_GET_MODRM_RM(pVCpu, bRm));
2016 IEM_MC_ADVANCE_RIP_AND_FINISH();
2017 IEM_MC_END();
2018 }
2019 else
2020 {
2021 /*
2022 * XMM128, [mem128].
2023 */
2024 IEM_MC_BEGIN(0, 0);
2025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2027
2028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2032
2033 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2034 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2035
2036 IEM_MC_ADVANCE_RIP_AND_FINISH();
2037 IEM_MC_END();
2038 }
2039
2040}
2041
2042
2043/**
2044 * @opcode 0x10
2045 * @oppfx 0x66
2046 * @opcpuid sse2
2047 * @opgroup og_sse2_pcksclr_datamove
2048 * @opxcpttype 4UA
2049 * @optest op1=1 op2=2 -> op1=2
2050 * @optest op1=0 op2=-42 -> op1=-42
2051 */
2052FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2053{
2054 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if (IEM_IS_MODRM_REG_MODE(bRm))
2057 {
2058 /*
2059 * XMM128, XMM128.
2060 */
2061 IEM_MC_BEGIN(0, 0);
2062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2065 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2066 IEM_GET_MODRM_RM(pVCpu, bRm));
2067 IEM_MC_ADVANCE_RIP_AND_FINISH();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 /*
2073 * XMM128, [mem128].
2074 */
2075 IEM_MC_BEGIN(0, 0);
2076 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2081 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090}
2091
2092
2093/**
2094 * @opcode 0x10
2095 * @oppfx 0xf3
2096 * @opcpuid sse
2097 * @opgroup og_sse_simdfp_datamove
2098 * @opxcpttype 5
2099 * @optest op1=1 op2=2 -> op1=2
2100 * @optest op1=0 op2=-22 -> op1=-22
2101 */
2102FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2103{
2104 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2106 if (IEM_IS_MODRM_REG_MODE(bRm))
2107 {
2108 /*
2109 * XMM32, XMM32.
2110 */
2111 IEM_MC_BEGIN(0, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2113 IEM_MC_LOCAL(uint32_t, uSrc);
2114
2115 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2117 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2118 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2119
2120 IEM_MC_ADVANCE_RIP_AND_FINISH();
2121 IEM_MC_END();
2122 }
2123 else
2124 {
2125 /*
2126 * XMM128, [mem32].
2127 */
2128 IEM_MC_BEGIN(0, 0);
2129 IEM_MC_LOCAL(uint32_t, uSrc);
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2131
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2134 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2136
2137 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2138 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2139
2140 IEM_MC_ADVANCE_RIP_AND_FINISH();
2141 IEM_MC_END();
2142 }
2143}
2144
2145
2146/**
2147 * @opcode 0x10
2148 * @oppfx 0xf2
2149 * @opcpuid sse2
2150 * @opgroup og_sse2_pcksclr_datamove
2151 * @opxcpttype 5
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-42 -> op1=-42
2154 */
2155FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2156{
2157 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 if (IEM_IS_MODRM_REG_MODE(bRm))
2160 {
2161 /*
2162 * XMM64, XMM64.
2163 */
2164 IEM_MC_BEGIN(0, 0);
2165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2166 IEM_MC_LOCAL(uint64_t, uSrc);
2167
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2170 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2171 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2172
2173 IEM_MC_ADVANCE_RIP_AND_FINISH();
2174 IEM_MC_END();
2175 }
2176 else
2177 {
2178 /*
2179 * XMM128, [mem64].
2180 */
2181 IEM_MC_BEGIN(0, 0);
2182 IEM_MC_LOCAL(uint64_t, uSrc);
2183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2184
2185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2188 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2189
2190 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2191 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2192
2193 IEM_MC_ADVANCE_RIP_AND_FINISH();
2194 IEM_MC_END();
2195 }
2196}
2197
2198
2199/**
2200 * @opcode 0x11
2201 * @oppfx none
2202 * @opcpuid sse
2203 * @opgroup og_sse_simdfp_datamove
2204 * @opxcpttype 4UA
2205 * @optest op1=1 op2=2 -> op1=2
2206 * @optest op1=0 op2=-42 -> op1=-42
2207 */
2208FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2209{
2210 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2212 if (IEM_IS_MODRM_REG_MODE(bRm))
2213 {
2214 /*
2215 * XMM128, XMM128.
2216 */
2217 IEM_MC_BEGIN(0, 0);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2219 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2220 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2221 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2222 IEM_GET_MODRM_REG(pVCpu, bRm));
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 }
2226 else
2227 {
2228 /*
2229 * [mem128], XMM128.
2230 */
2231 IEM_MC_BEGIN(0, 0);
2232 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2234
2235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2239
2240 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2241 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2242
2243 IEM_MC_ADVANCE_RIP_AND_FINISH();
2244 IEM_MC_END();
2245 }
2246}
2247
2248
2249/**
2250 * @opcode 0x11
2251 * @oppfx 0x66
2252 * @opcpuid sse2
2253 * @opgroup og_sse2_pcksclr_datamove
2254 * @opxcpttype 4UA
2255 * @optest op1=1 op2=2 -> op1=2
2256 * @optest op1=0 op2=-42 -> op1=-42
2257 */
2258FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2259{
2260 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 if (IEM_IS_MODRM_REG_MODE(bRm))
2263 {
2264 /*
2265 * XMM128, XMM128.
2266 */
2267 IEM_MC_BEGIN(0, 0);
2268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2270 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2271 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2272 IEM_GET_MODRM_REG(pVCpu, bRm));
2273 IEM_MC_ADVANCE_RIP_AND_FINISH();
2274 IEM_MC_END();
2275 }
2276 else
2277 {
2278 /*
2279 * [mem128], XMM128.
2280 */
2281 IEM_MC_BEGIN(0, 0);
2282 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2284
2285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2287 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2288 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2289
2290 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2291 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2292
2293 IEM_MC_ADVANCE_RIP_AND_FINISH();
2294 IEM_MC_END();
2295 }
2296}
2297
2298
2299/**
2300 * @opcode 0x11
2301 * @oppfx 0xf3
2302 * @opcpuid sse
2303 * @opgroup og_sse_simdfp_datamove
2304 * @opxcpttype 5
2305 * @optest op1=1 op2=2 -> op1=2
2306 * @optest op1=0 op2=-22 -> op1=-22
2307 */
2308FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2309{
2310 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2312 if (IEM_IS_MODRM_REG_MODE(bRm))
2313 {
2314 /*
2315 * XMM32, XMM32.
2316 */
2317 IEM_MC_BEGIN(0, 0);
2318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2319 IEM_MC_LOCAL(uint32_t, uSrc);
2320
2321 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2323 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2324 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2325
2326 IEM_MC_ADVANCE_RIP_AND_FINISH();
2327 IEM_MC_END();
2328 }
2329 else
2330 {
2331 /*
2332 * [mem32], XMM32.
2333 */
2334 IEM_MC_BEGIN(0, 0);
2335 IEM_MC_LOCAL(uint32_t, uSrc);
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2337
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2342
2343 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2344 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2345
2346 IEM_MC_ADVANCE_RIP_AND_FINISH();
2347 IEM_MC_END();
2348 }
2349}
2350
2351
2352/**
2353 * @opcode 0x11
2354 * @oppfx 0xf2
2355 * @opcpuid sse2
2356 * @opgroup og_sse2_pcksclr_datamove
2357 * @opxcpttype 5
2358 * @optest op1=1 op2=2 -> op1=2
2359 * @optest op1=0 op2=-42 -> op1=-42
2360 */
2361FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2362{
2363 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2365 if (IEM_IS_MODRM_REG_MODE(bRm))
2366 {
2367 /*
2368 * XMM64, XMM64.
2369 */
2370 IEM_MC_BEGIN(0, 0);
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2372 IEM_MC_LOCAL(uint64_t, uSrc);
2373
2374 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2375 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2376 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2377 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2378
2379 IEM_MC_ADVANCE_RIP_AND_FINISH();
2380 IEM_MC_END();
2381 }
2382 else
2383 {
2384 /*
2385 * [mem64], XMM64.
2386 */
2387 IEM_MC_BEGIN(0, 0);
2388 IEM_MC_LOCAL(uint64_t, uSrc);
2389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2390
2391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2393 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2394 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2395
2396 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2397 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2398
2399 IEM_MC_ADVANCE_RIP_AND_FINISH();
2400 IEM_MC_END();
2401 }
2402}
2403
2404
2405FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2406{
2407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2408 if (IEM_IS_MODRM_REG_MODE(bRm))
2409 {
2410 /**
2411 * @opcode 0x12
2412 * @opcodesub 11 mr/reg
2413 * @oppfx none
2414 * @opcpuid sse
2415 * @opgroup og_sse_simdfp_datamove
2416 * @opxcpttype 5
2417 * @optest op1=1 op2=2 -> op1=2
2418 * @optest op1=0 op2=-42 -> op1=-42
2419 */
2420 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2421
2422 IEM_MC_BEGIN(0, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2424 IEM_MC_LOCAL(uint64_t, uSrc);
2425
2426 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2428 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2429 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2430
2431 IEM_MC_ADVANCE_RIP_AND_FINISH();
2432 IEM_MC_END();
2433 }
2434 else
2435 {
2436 /**
2437 * @opdone
2438 * @opcode 0x12
2439 * @opcodesub !11 mr/reg
2440 * @oppfx none
2441 * @opcpuid sse
2442 * @opgroup og_sse_simdfp_datamove
2443 * @opxcpttype 5
2444 * @optest op1=1 op2=2 -> op1=2
2445 * @optest op1=0 op2=-42 -> op1=-42
2446 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2447 */
2448 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2449
2450 IEM_MC_BEGIN(0, 0);
2451 IEM_MC_LOCAL(uint64_t, uSrc);
2452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2453
2454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2456 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2457 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2458
2459 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2460 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2461
2462 IEM_MC_ADVANCE_RIP_AND_FINISH();
2463 IEM_MC_END();
2464 }
2465}
2466
2467
2468/**
2469 * @opcode 0x12
2470 * @opcodesub !11 mr/reg
2471 * @oppfx 0x66
2472 * @opcpuid sse2
2473 * @opgroup og_sse2_pcksclr_datamove
2474 * @opxcpttype 5
2475 * @optest op1=1 op2=2 -> op1=2
2476 * @optest op1=0 op2=-42 -> op1=-42
2477 */
2478FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2479{
2480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2481 if (IEM_IS_MODRM_MEM_MODE(bRm))
2482 {
2483 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2484
2485 IEM_MC_BEGIN(0, 0);
2486 IEM_MC_LOCAL(uint64_t, uSrc);
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2493
2494 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2495 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2496
2497 IEM_MC_ADVANCE_RIP_AND_FINISH();
2498 IEM_MC_END();
2499 }
2500
2501 /**
2502 * @opdone
2503 * @opmnemonic ud660f12m3
2504 * @opcode 0x12
2505 * @opcodesub 11 mr/reg
2506 * @oppfx 0x66
2507 * @opunused immediate
2508 * @opcpuid sse
2509 * @optest ->
2510 */
2511 else
2512 IEMOP_RAISE_INVALID_OPCODE_RET();
2513}
2514
2515
2516/**
2517 * @opcode 0x12
2518 * @oppfx 0xf3
2519 * @opcpuid sse3
2520 * @opgroup og_sse3_pcksclr_datamove
2521 * @opxcpttype 4
2522 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2523 * op1=0x00000002000000020000000100000001
2524 */
2525FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2526{
2527 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * XMM, XMM.
2533 */
2534 IEM_MC_BEGIN(0, 0);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2536 IEM_MC_LOCAL(RTUINT128U, uSrc);
2537
2538 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2539 IEM_MC_PREPARE_SSE_USAGE();
2540
2541 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2542 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2543 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2544 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2545 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2546
2547 IEM_MC_ADVANCE_RIP_AND_FINISH();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /*
2553 * XMM, [mem128].
2554 */
2555 IEM_MC_BEGIN(0, 0);
2556 IEM_MC_LOCAL(RTUINT128U, uSrc);
2557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2558
2559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2561 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2562 IEM_MC_PREPARE_SSE_USAGE();
2563
2564 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2565 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2566 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2567 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2568 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2569
2570 IEM_MC_ADVANCE_RIP_AND_FINISH();
2571 IEM_MC_END();
2572 }
2573}
2574
2575
2576/**
2577 * @opcode 0x12
2578 * @oppfx 0xf2
2579 * @opcpuid sse3
2580 * @opgroup og_sse3_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2583 * op1=0x22222222111111112222222211111111
2584 */
2585FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2586{
2587 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2589 if (IEM_IS_MODRM_REG_MODE(bRm))
2590 {
2591 /*
2592 * XMM128, XMM64.
2593 */
2594 IEM_MC_BEGIN(0, 0);
2595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2596 IEM_MC_LOCAL(uint64_t, uSrc);
2597
2598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2599 IEM_MC_PREPARE_SSE_USAGE();
2600
2601 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2603 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2604
2605 IEM_MC_ADVANCE_RIP_AND_FINISH();
2606 IEM_MC_END();
2607 }
2608 else
2609 {
2610 /*
2611 * XMM128, [mem64].
2612 */
2613 IEM_MC_BEGIN(0, 0);
2614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2615 IEM_MC_LOCAL(uint64_t, uSrc);
2616
2617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2620 IEM_MC_PREPARE_SSE_USAGE();
2621
2622 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2623 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2624 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2625
2626 IEM_MC_ADVANCE_RIP_AND_FINISH();
2627 IEM_MC_END();
2628 }
2629}
2630
2631
2632/**
2633 * @opcode 0x13
2634 * @opcodesub !11 mr/reg
2635 * @oppfx none
2636 * @opcpuid sse
2637 * @opgroup og_sse_simdfp_datamove
2638 * @opxcpttype 5
2639 * @optest op1=1 op2=2 -> op1=2
2640 * @optest op1=0 op2=-42 -> op1=-42
2641 */
2642FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 if (IEM_IS_MODRM_MEM_MODE(bRm))
2646 {
2647 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2648
2649 IEM_MC_BEGIN(0, 0);
2650 IEM_MC_LOCAL(uint64_t, uSrc);
2651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2657
2658 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2659 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2660
2661 IEM_MC_ADVANCE_RIP_AND_FINISH();
2662 IEM_MC_END();
2663 }
2664
2665 /**
2666 * @opdone
2667 * @opmnemonic ud0f13m3
2668 * @opcode 0x13
2669 * @opcodesub 11 mr/reg
2670 * @oppfx none
2671 * @opunused immediate
2672 * @opcpuid sse
2673 * @optest ->
2674 */
2675 else
2676 IEMOP_RAISE_INVALID_OPCODE_RET();
2677}
2678
2679
2680/**
2681 * @opcode 0x13
2682 * @opcodesub !11 mr/reg
2683 * @oppfx 0x66
2684 * @opcpuid sse2
2685 * @opgroup og_sse2_pcksclr_datamove
2686 * @opxcpttype 5
2687 * @optest op1=1 op2=2 -> op1=2
2688 * @optest op1=0 op2=-42 -> op1=-42
2689 */
2690FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2691{
2692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2693 if (IEM_IS_MODRM_MEM_MODE(bRm))
2694 {
2695 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2696
2697 IEM_MC_BEGIN(0, 0);
2698 IEM_MC_LOCAL(uint64_t, uSrc);
2699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2700
2701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2703 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2705
2706 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2707 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2708
2709 IEM_MC_ADVANCE_RIP_AND_FINISH();
2710 IEM_MC_END();
2711 }
2712
2713 /**
2714 * @opdone
2715 * @opmnemonic ud660f13m3
2716 * @opcode 0x13
2717 * @opcodesub 11 mr/reg
2718 * @oppfx 0x66
2719 * @opunused immediate
2720 * @opcpuid sse
2721 * @optest ->
2722 */
2723 else
2724 IEMOP_RAISE_INVALID_OPCODE_RET();
2725}
2726
2727
2728/**
2729 * @opmnemonic udf30f13
2730 * @opcode 0x13
2731 * @oppfx 0xf3
2732 * @opunused intel-modrm
2733 * @opcpuid sse
2734 * @optest ->
2735 * @opdone
2736 */
2737
2738/**
2739 * @opmnemonic udf20f13
2740 * @opcode 0x13
2741 * @oppfx 0xf2
2742 * @opunused intel-modrm
2743 * @opcpuid sse
2744 * @optest ->
2745 * @opdone
2746 */
2747
2748/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2749FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2750{
2751 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2752 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2753}
2754
2755
2756/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2757FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2758{
2759 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2760 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2761}
2762
2763
2764/**
2765 * @opdone
2766 * @opmnemonic udf30f14
2767 * @opcode 0x14
2768 * @oppfx 0xf3
2769 * @opunused intel-modrm
2770 * @opcpuid sse
2771 * @optest ->
2772 * @opdone
2773 */
2774
2775/**
2776 * @opmnemonic udf20f14
2777 * @opcode 0x14
2778 * @oppfx 0xf2
2779 * @opunused intel-modrm
2780 * @opcpuid sse
2781 * @optest ->
2782 * @opdone
2783 */
2784
2785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2786FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2787{
2788 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2789 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2790}
2791
2792
2793/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2794FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2795{
2796 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2797 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2798}
2799
2800
2801/* Opcode 0xf3 0x0f 0x15 - invalid */
2802/* Opcode 0xf2 0x0f 0x15 - invalid */
2803
2804/**
2805 * @opdone
2806 * @opmnemonic udf30f15
2807 * @opcode 0x15
2808 * @oppfx 0xf3
2809 * @opunused intel-modrm
2810 * @opcpuid sse
2811 * @optest ->
2812 * @opdone
2813 */
2814
2815/**
2816 * @opmnemonic udf20f15
2817 * @opcode 0x15
2818 * @oppfx 0xf2
2819 * @opunused intel-modrm
2820 * @opcpuid sse
2821 * @optest ->
2822 * @opdone
2823 */
2824
2825FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2826{
2827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2828 if (IEM_IS_MODRM_REG_MODE(bRm))
2829 {
2830 /**
2831 * @opcode 0x16
2832 * @opcodesub 11 mr/reg
2833 * @oppfx none
2834 * @opcpuid sse
2835 * @opgroup og_sse_simdfp_datamove
2836 * @opxcpttype 5
2837 * @optest op1=1 op2=2 -> op1=2
2838 * @optest op1=0 op2=-42 -> op1=-42
2839 */
2840 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2841
2842 IEM_MC_BEGIN(0, 0);
2843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2844 IEM_MC_LOCAL(uint64_t, uSrc);
2845
2846 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2848 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2849 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2850
2851 IEM_MC_ADVANCE_RIP_AND_FINISH();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /**
2857 * @opdone
2858 * @opcode 0x16
2859 * @opcodesub !11 mr/reg
2860 * @oppfx none
2861 * @opcpuid sse
2862 * @opgroup og_sse_simdfp_datamove
2863 * @opxcpttype 5
2864 * @optest op1=1 op2=2 -> op1=2
2865 * @optest op1=0 op2=-42 -> op1=-42
2866 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2867 */
2868 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2869
2870 IEM_MC_BEGIN(0, 0);
2871 IEM_MC_LOCAL(uint64_t, uSrc);
2872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2873
2874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2878
2879 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2880 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2881
2882 IEM_MC_ADVANCE_RIP_AND_FINISH();
2883 IEM_MC_END();
2884 }
2885}
2886
2887
2888/**
2889 * @opcode 0x16
2890 * @opcodesub !11 mr/reg
2891 * @oppfx 0x66
2892 * @opcpuid sse2
2893 * @opgroup og_sse2_pcksclr_datamove
2894 * @opxcpttype 5
2895 * @optest op1=1 op2=2 -> op1=2
2896 * @optest op1=0 op2=-42 -> op1=-42
2897 */
2898FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2899{
2900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2901 if (IEM_IS_MODRM_MEM_MODE(bRm))
2902 {
2903 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2904
2905 IEM_MC_BEGIN(0, 0);
2906 IEM_MC_LOCAL(uint64_t, uSrc);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2911 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2912 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2913
2914 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2915 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2916
2917 IEM_MC_ADVANCE_RIP_AND_FINISH();
2918 IEM_MC_END();
2919 }
2920
2921 /**
2922 * @opdone
2923 * @opmnemonic ud660f16m3
2924 * @opcode 0x16
2925 * @opcodesub 11 mr/reg
2926 * @oppfx 0x66
2927 * @opunused immediate
2928 * @opcpuid sse
2929 * @optest ->
2930 */
2931 else
2932 IEMOP_RAISE_INVALID_OPCODE_RET();
2933}
2934
2935
2936/**
2937 * @opcode 0x16
2938 * @oppfx 0xf3
2939 * @opcpuid sse3
2940 * @opgroup og_sse3_pcksclr_datamove
2941 * @opxcpttype 4
2942 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2943 * op1=0x00000002000000020000000100000001
2944 */
2945FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2946{
2947 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 if (IEM_IS_MODRM_REG_MODE(bRm))
2950 {
2951 /*
2952 * XMM128, XMM128.
2953 */
2954 IEM_MC_BEGIN(0, 0);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2956 IEM_MC_LOCAL(RTUINT128U, uSrc);
2957
2958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2959 IEM_MC_PREPARE_SSE_USAGE();
2960
2961 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2962 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2963 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2964 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2965 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 else
2971 {
2972 /*
2973 * XMM128, [mem128].
2974 */
2975 IEM_MC_BEGIN(0, 0);
2976 IEM_MC_LOCAL(RTUINT128U, uSrc);
2977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2978
2979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2982 IEM_MC_PREPARE_SSE_USAGE();
2983
2984 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2985 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2986 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2987 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2988 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2989
2990 IEM_MC_ADVANCE_RIP_AND_FINISH();
2991 IEM_MC_END();
2992 }
2993}
2994
2995/**
2996 * @opdone
2997 * @opmnemonic udf30f16
2998 * @opcode 0x16
2999 * @oppfx 0xf2
3000 * @opunused intel-modrm
3001 * @opcpuid sse
3002 * @optest ->
3003 * @opdone
3004 */
3005
3006
3007/**
3008 * @opcode 0x17
3009 * @opcodesub !11 mr/reg
3010 * @oppfx none
3011 * @opcpuid sse
3012 * @opgroup og_sse_simdfp_datamove
3013 * @opxcpttype 5
3014 * @optest op1=1 op2=2 -> op1=2
3015 * @optest op1=0 op2=-42 -> op1=-42
3016 */
3017FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3018{
3019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3020 if (IEM_IS_MODRM_MEM_MODE(bRm))
3021 {
3022 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3023
3024 IEM_MC_BEGIN(0, 0);
3025 IEM_MC_LOCAL(uint64_t, uSrc);
3026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3027
3028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3032
3033 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3034 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3035
3036 IEM_MC_ADVANCE_RIP_AND_FINISH();
3037 IEM_MC_END();
3038 }
3039
3040 /**
3041 * @opdone
3042 * @opmnemonic ud0f17m3
3043 * @opcode 0x17
3044 * @opcodesub 11 mr/reg
3045 * @oppfx none
3046 * @opunused immediate
3047 * @opcpuid sse
3048 * @optest ->
3049 */
3050 else
3051 IEMOP_RAISE_INVALID_OPCODE_RET();
3052}
3053
3054
3055/**
3056 * @opcode 0x17
3057 * @opcodesub !11 mr/reg
3058 * @oppfx 0x66
3059 * @opcpuid sse2
3060 * @opgroup og_sse2_pcksclr_datamove
3061 * @opxcpttype 5
3062 * @optest op1=1 op2=2 -> op1=2
3063 * @optest op1=0 op2=-42 -> op1=-42
3064 */
3065FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3066{
3067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3068 if (IEM_IS_MODRM_MEM_MODE(bRm))
3069 {
3070 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3071
3072 IEM_MC_BEGIN(0, 0);
3073 IEM_MC_LOCAL(uint64_t, uSrc);
3074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3075
3076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3078 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3079 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3080
3081 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3082 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3083
3084 IEM_MC_ADVANCE_RIP_AND_FINISH();
3085 IEM_MC_END();
3086 }
3087
3088 /**
3089 * @opdone
3090 * @opmnemonic ud660f17m3
3091 * @opcode 0x17
3092 * @opcodesub 11 mr/reg
3093 * @oppfx 0x66
3094 * @opunused immediate
3095 * @opcpuid sse
3096 * @optest ->
3097 */
3098 else
3099 IEMOP_RAISE_INVALID_OPCODE_RET();
3100}
3101
3102
3103/**
3104 * @opdone
3105 * @opmnemonic udf30f17
3106 * @opcode 0x17
3107 * @oppfx 0xf3
3108 * @opunused intel-modrm
3109 * @opcpuid sse
3110 * @optest ->
3111 * @opdone
3112 */
3113
3114/**
3115 * @opmnemonic udf20f17
3116 * @opcode 0x17
3117 * @oppfx 0xf2
3118 * @opunused intel-modrm
3119 * @opcpuid sse
3120 * @optest ->
3121 * @opdone
3122 */
3123
3124
3125/** Opcode 0x0f 0x18. */
3126FNIEMOP_DEF(iemOp_prefetch_Grp16)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 if (IEM_IS_MODRM_MEM_MODE(bRm))
3130 {
3131 switch (IEM_GET_MODRM_REG_8(bRm))
3132 {
3133 case 4: /* Aliased to /0 for the time being according to AMD. */
3134 case 5: /* Aliased to /0 for the time being according to AMD. */
3135 case 6: /* Aliased to /0 for the time being according to AMD. */
3136 case 7: /* Aliased to /0 for the time being according to AMD. */
3137 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3138 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3139 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3140 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3142 }
3143
3144 IEM_MC_BEGIN(0, 0);
3145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3148 /* Currently a NOP. */
3149 IEM_MC_NOREF(GCPtrEffSrc);
3150 IEM_MC_ADVANCE_RIP_AND_FINISH();
3151 IEM_MC_END();
3152 }
3153 else
3154 IEMOP_RAISE_INVALID_OPCODE_RET();
3155}
3156
3157
3158/** Opcode 0x0f 0x19..0x1f. */
3159FNIEMOP_DEF(iemOp_nop_Ev)
3160{
3161 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3163 if (IEM_IS_MODRM_REG_MODE(bRm))
3164 {
3165 IEM_MC_BEGIN(0, 0);
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3167 IEM_MC_ADVANCE_RIP_AND_FINISH();
3168 IEM_MC_END();
3169 }
3170 else
3171 {
3172 IEM_MC_BEGIN(0, 0);
3173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3176 /* Currently a NOP. */
3177 IEM_MC_NOREF(GCPtrEffSrc);
3178 IEM_MC_ADVANCE_RIP_AND_FINISH();
3179 IEM_MC_END();
3180 }
3181}
3182
3183
3184/** Opcode 0x0f 0x20. */
3185FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3186{
3187 /* mod is ignored, as is operand size overrides. */
3188/** @todo testcase: check memory encoding. */
3189 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3190 IEMOP_HLP_MIN_386();
3191 if (IEM_IS_64BIT_CODE(pVCpu))
3192 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3193 else
3194 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3195
3196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3197 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3198 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3199 {
3200 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3201 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3202 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3203 iCrReg |= 8;
3204 }
3205 switch (iCrReg)
3206 {
3207 case 0: case 2: case 3: case 4: case 8:
3208 break;
3209 default:
3210 IEMOP_RAISE_INVALID_OPCODE_RET();
3211 }
3212 IEMOP_HLP_DONE_DECODING();
3213
3214 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3215 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3216 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3217}
3218
3219
3220/** Opcode 0x0f 0x21. */
3221FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3222{
3223/** @todo testcase: check memory encoding. */
3224 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3225 IEMOP_HLP_MIN_386();
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3229 IEMOP_RAISE_INVALID_OPCODE_RET();
3230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3231 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3232 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3233}
3234
3235
3236/** Opcode 0x0f 0x22. */
3237FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3238{
3239 /* mod is ignored, as is operand size overrides. */
3240 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3241 IEMOP_HLP_MIN_386();
3242 if (IEM_IS_64BIT_CODE(pVCpu))
3243 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3244 else
3245 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3246
3247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3248 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3249 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3250 {
3251 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3252 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3253 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3254 iCrReg |= 8;
3255 }
3256 switch (iCrReg)
3257 {
3258 case 0: case 2: case 3: case 4: case 8:
3259 break;
3260 default:
3261 IEMOP_RAISE_INVALID_OPCODE_RET();
3262 }
3263 IEMOP_HLP_DONE_DECODING();
3264
3265 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3266 if (iCrReg & (2 | 8))
3267 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3268 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3269 else
3270 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3271 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3272}
3273
3274
3275/** Opcode 0x0f 0x23. */
3276FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3277{
3278 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3279 IEMOP_HLP_MIN_386();
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3283 IEMOP_RAISE_INVALID_OPCODE_RET();
3284 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3285 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3286}
3287
3288
3289/** Opcode 0x0f 0x24. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Td)
3291{
3292 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3293 IEMOP_HLP_MIN_386();
3294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3296 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3297 IEMOP_RAISE_INVALID_OPCODE_RET();
3298 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3299 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3300}
3301
3302
3303/** Opcode 0x0f 0x26. */
3304FNIEMOP_DEF(iemOp_mov_Td_Rd)
3305{
3306 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3307 IEMOP_HLP_MIN_386();
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3311 IEMOP_RAISE_INVALID_OPCODE_RET();
3312 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3313}
3314
3315
3316/**
3317 * @opcode 0x28
3318 * @oppfx none
3319 * @opcpuid sse
3320 * @opgroup og_sse_simdfp_datamove
3321 * @opxcpttype 1
3322 * @optest op1=1 op2=2 -> op1=2
3323 * @optest op1=0 op2=-42 -> op1=-42
3324 */
3325FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3326{
3327 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 if (IEM_IS_MODRM_REG_MODE(bRm))
3330 {
3331 /*
3332 * Register, register.
3333 */
3334 IEM_MC_BEGIN(0, 0);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3336 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3337 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3338 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3339 IEM_GET_MODRM_RM(pVCpu, bRm));
3340 IEM_MC_ADVANCE_RIP_AND_FINISH();
3341 IEM_MC_END();
3342 }
3343 else
3344 {
3345 /*
3346 * Register, memory.
3347 */
3348 IEM_MC_BEGIN(0, 0);
3349 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3351
3352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3354 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3355 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3356
3357 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3358 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3359
3360 IEM_MC_ADVANCE_RIP_AND_FINISH();
3361 IEM_MC_END();
3362 }
3363}
3364
3365/**
3366 * @opcode 0x28
3367 * @oppfx 66
3368 * @opcpuid sse2
3369 * @opgroup og_sse2_pcksclr_datamove
3370 * @opxcpttype 1
3371 * @optest op1=1 op2=2 -> op1=2
3372 * @optest op1=0 op2=-42 -> op1=-42
3373 */
3374FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3375{
3376 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3378 if (IEM_IS_MODRM_REG_MODE(bRm))
3379 {
3380 /*
3381 * Register, register.
3382 */
3383 IEM_MC_BEGIN(0, 0);
3384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3387 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3388 IEM_GET_MODRM_RM(pVCpu, bRm));
3389 IEM_MC_ADVANCE_RIP_AND_FINISH();
3390 IEM_MC_END();
3391 }
3392 else
3393 {
3394 /*
3395 * Register, memory.
3396 */
3397 IEM_MC_BEGIN(0, 0);
3398 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3400
3401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3403 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3404 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3405
3406 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3407 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3408
3409 IEM_MC_ADVANCE_RIP_AND_FINISH();
3410 IEM_MC_END();
3411 }
3412}
3413
3414/* Opcode 0xf3 0x0f 0x28 - invalid */
3415/* Opcode 0xf2 0x0f 0x28 - invalid */
3416
3417/**
3418 * @opcode 0x29
3419 * @oppfx none
3420 * @opcpuid sse
3421 * @opgroup og_sse_simdfp_datamove
3422 * @opxcpttype 1
3423 * @optest op1=1 op2=2 -> op1=2
3424 * @optest op1=0 op2=-42 -> op1=-42
3425 */
3426FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3427{
3428 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3430 if (IEM_IS_MODRM_REG_MODE(bRm))
3431 {
3432 /*
3433 * Register, register.
3434 */
3435 IEM_MC_BEGIN(0, 0);
3436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3439 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3440 IEM_GET_MODRM_REG(pVCpu, bRm));
3441 IEM_MC_ADVANCE_RIP_AND_FINISH();
3442 IEM_MC_END();
3443 }
3444 else
3445 {
3446 /*
3447 * Memory, register.
3448 */
3449 IEM_MC_BEGIN(0, 0);
3450 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3452
3453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3455 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3456 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3457
3458 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3459 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3460
3461 IEM_MC_ADVANCE_RIP_AND_FINISH();
3462 IEM_MC_END();
3463 }
3464}
3465
3466/**
3467 * @opcode 0x29
3468 * @oppfx 66
3469 * @opcpuid sse2
3470 * @opgroup og_sse2_pcksclr_datamove
3471 * @opxcpttype 1
3472 * @optest op1=1 op2=2 -> op1=2
3473 * @optest op1=0 op2=-42 -> op1=-42
3474 */
3475FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3476{
3477 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3479 if (IEM_IS_MODRM_REG_MODE(bRm))
3480 {
3481 /*
3482 * Register, register.
3483 */
3484 IEM_MC_BEGIN(0, 0);
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3488 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3489 IEM_GET_MODRM_REG(pVCpu, bRm));
3490 IEM_MC_ADVANCE_RIP_AND_FINISH();
3491 IEM_MC_END();
3492 }
3493 else
3494 {
3495 /*
3496 * Memory, register.
3497 */
3498 IEM_MC_BEGIN(0, 0);
3499 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3501
3502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3504 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3506
3507 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3508 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3509
3510 IEM_MC_ADVANCE_RIP_AND_FINISH();
3511 IEM_MC_END();
3512 }
3513}
3514
3515/* Opcode 0xf3 0x0f 0x29 - invalid */
3516/* Opcode 0xf2 0x0f 0x29 - invalid */
3517
3518
3519/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3520FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3521{
3522 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3524 if (IEM_IS_MODRM_REG_MODE(bRm))
3525 {
3526 /*
3527 * XMM, MMX
3528 */
3529 IEM_MC_BEGIN(0, 0);
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3531 IEM_MC_LOCAL(X86XMMREG, Dst);
3532 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3533 IEM_MC_ARG(uint64_t, u64Src, 1);
3534 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3536 IEM_MC_PREPARE_FPU_USAGE();
3537 IEM_MC_FPU_TO_MMX_MODE();
3538
3539 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3540 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3541
3542 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3543 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3544 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3545
3546 IEM_MC_ADVANCE_RIP_AND_FINISH();
3547 IEM_MC_END();
3548 }
3549 else
3550 {
3551 /*
3552 * XMM, [mem64]
3553 */
3554 IEM_MC_BEGIN(0, 0);
3555 IEM_MC_LOCAL(X86XMMREG, Dst);
3556 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3557 IEM_MC_ARG(uint64_t, u64Src, 1);
3558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3559
3560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3563 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3564 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3565
3566 IEM_MC_PREPARE_FPU_USAGE();
3567 IEM_MC_FPU_TO_MMX_MODE();
3568
3569 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3570 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3571 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3572
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 IEM_MC_END();
3575 }
3576}
3577
3578
3579/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3580FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3581{
3582 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3584 if (IEM_IS_MODRM_REG_MODE(bRm))
3585 {
3586 /*
3587 * XMM, MMX
3588 */
3589 IEM_MC_BEGIN(0, 0);
3590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3591 IEM_MC_LOCAL(X86XMMREG, Dst);
3592 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3593 IEM_MC_ARG(uint64_t, u64Src, 1);
3594 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3596 IEM_MC_PREPARE_FPU_USAGE();
3597 IEM_MC_FPU_TO_MMX_MODE();
3598
3599 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3600
3601 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3602 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3603 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3604
3605 IEM_MC_ADVANCE_RIP_AND_FINISH();
3606 IEM_MC_END();
3607 }
3608 else
3609 {
3610 /*
3611 * XMM, [mem64]
3612 */
3613 IEM_MC_BEGIN(0, 0);
3614 IEM_MC_LOCAL(X86XMMREG, Dst);
3615 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3616 IEM_MC_ARG(uint64_t, u64Src, 1);
3617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3618
3619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3623 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3624
3625 /* Doesn't cause a transition to MMX mode. */
3626 IEM_MC_PREPARE_SSE_USAGE();
3627
3628 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3629 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3630 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635}
3636
3637
3638/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3639FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3640{
3641 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3642
3643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3644 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3645 {
3646 if (IEM_IS_MODRM_REG_MODE(bRm))
3647 {
3648 /* XMM, greg64 */
3649 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3650 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3651 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3652 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3653
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3656 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3657
3658 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3659 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3660 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3661 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3662
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /* XMM, [mem64] */
3669 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3671 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3672 IEM_MC_LOCAL(int64_t, i64Src);
3673 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3674 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3675
3676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3679 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3680
3681 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3682 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3683 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3684 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3685
3686 IEM_MC_ADVANCE_RIP_AND_FINISH();
3687 IEM_MC_END();
3688 }
3689 }
3690 else
3691 {
3692 if (IEM_IS_MODRM_REG_MODE(bRm))
3693 {
3694 /* greg, XMM */
3695 IEM_MC_BEGIN(0, 0);
3696 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3697 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3698 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3699
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3701 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3702 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3703
3704 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3705 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3706 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3707 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3708
3709 IEM_MC_ADVANCE_RIP_AND_FINISH();
3710 IEM_MC_END();
3711 }
3712 else
3713 {
3714 /* greg, [mem32] */
3715 IEM_MC_BEGIN(0, 0);
3716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3717 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3718 IEM_MC_LOCAL(int32_t, i32Src);
3719 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3720 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3721
3722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3725 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3726
3727 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3728 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3729 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3730 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3731
3732 IEM_MC_ADVANCE_RIP_AND_FINISH();
3733 IEM_MC_END();
3734 }
3735 }
3736}
3737
3738
3739/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3740FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3741{
3742 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3743
3744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3745 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3746 {
3747 if (IEM_IS_MODRM_REG_MODE(bRm))
3748 {
3749 /* XMM, greg64 */
3750 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3751 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3752 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3753 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3754
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3757 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3758
3759 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3760 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3761 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3762 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3763
3764 IEM_MC_ADVANCE_RIP_AND_FINISH();
3765 IEM_MC_END();
3766 }
3767 else
3768 {
3769 /* XMM, [mem64] */
3770 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3772 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3773 IEM_MC_LOCAL(int64_t, i64Src);
3774 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3775 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3776
3777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3779 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3780 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3781
3782 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3783 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3784 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3785 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3786
3787 IEM_MC_ADVANCE_RIP_AND_FINISH();
3788 IEM_MC_END();
3789 }
3790 }
3791 else
3792 {
3793 if (IEM_IS_MODRM_REG_MODE(bRm))
3794 {
3795 /* XMM, greg32 */
3796 IEM_MC_BEGIN(0, 0);
3797 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3798 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3799 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3800
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3803 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3804
3805 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3806 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3807 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3808 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3809
3810 IEM_MC_ADVANCE_RIP_AND_FINISH();
3811 IEM_MC_END();
3812 }
3813 else
3814 {
3815 /* XMM, [mem32] */
3816 IEM_MC_BEGIN(0, 0);
3817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3818 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3819 IEM_MC_LOCAL(int32_t, i32Src);
3820 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3821 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3822
3823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3825 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3826 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3827
3828 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3829 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3830 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3831 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3832
3833 IEM_MC_ADVANCE_RIP_AND_FINISH();
3834 IEM_MC_END();
3835 }
3836 }
3837}
3838
3839
3840/**
3841 * @opcode 0x2b
3842 * @opcodesub !11 mr/reg
3843 * @oppfx none
3844 * @opcpuid sse
3845 * @opgroup og_sse1_cachect
3846 * @opxcpttype 1
3847 * @optest op1=1 op2=2 -> op1=2
3848 * @optest op1=0 op2=-42 -> op1=-42
3849 */
3850FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3851{
3852 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3854 if (IEM_IS_MODRM_MEM_MODE(bRm))
3855 {
3856 /*
3857 * memory, register.
3858 */
3859 IEM_MC_BEGIN(0, 0);
3860 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3862
3863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3865 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3866 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3867
3868 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3869 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3870
3871 IEM_MC_ADVANCE_RIP_AND_FINISH();
3872 IEM_MC_END();
3873 }
3874 /* The register, register encoding is invalid. */
3875 else
3876 IEMOP_RAISE_INVALID_OPCODE_RET();
3877}
3878
3879/**
3880 * @opcode 0x2b
3881 * @opcodesub !11 mr/reg
3882 * @oppfx 0x66
3883 * @opcpuid sse2
3884 * @opgroup og_sse2_cachect
3885 * @opxcpttype 1
3886 * @optest op1=1 op2=2 -> op1=2
3887 * @optest op1=0 op2=-42 -> op1=-42
3888 */
3889FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3890{
3891 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3893 if (IEM_IS_MODRM_MEM_MODE(bRm))
3894 {
3895 /*
3896 * memory, register.
3897 */
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3901
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3905 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3906
3907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3908 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3909
3910 IEM_MC_ADVANCE_RIP_AND_FINISH();
3911 IEM_MC_END();
3912 }
3913 /* The register, register encoding is invalid. */
3914 else
3915 IEMOP_RAISE_INVALID_OPCODE_RET();
3916}
3917/* Opcode 0xf3 0x0f 0x2b - invalid */
3918/* Opcode 0xf2 0x0f 0x2b - invalid */
3919
3920
3921/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3922FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3923{
3924 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3926 if (IEM_IS_MODRM_REG_MODE(bRm))
3927 {
3928 /*
3929 * Register, register.
3930 */
3931 IEM_MC_BEGIN(0, 0);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3933 IEM_MC_LOCAL(uint64_t, u64Dst);
3934 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3935 IEM_MC_ARG(uint64_t, u64Src, 1);
3936 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3937 IEM_MC_PREPARE_FPU_USAGE();
3938 IEM_MC_FPU_TO_MMX_MODE();
3939
3940 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3941
3942 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3943 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3944 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3945
3946 IEM_MC_ADVANCE_RIP_AND_FINISH();
3947 IEM_MC_END();
3948 }
3949 else
3950 {
3951 /*
3952 * Register, memory.
3953 */
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_LOCAL(uint64_t, u64Dst);
3956 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3957 IEM_MC_ARG(uint64_t, u64Src, 1);
3958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3959
3960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3963 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3964
3965 IEM_MC_PREPARE_FPU_USAGE();
3966 IEM_MC_FPU_TO_MMX_MODE();
3967
3968 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3970 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3971
3972 IEM_MC_ADVANCE_RIP_AND_FINISH();
3973 IEM_MC_END();
3974 }
3975}
3976
3977
3978/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3979FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3980{
3981 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3983 if (IEM_IS_MODRM_REG_MODE(bRm))
3984 {
3985 /*
3986 * Register, register.
3987 */
3988 IEM_MC_BEGIN(0, 0);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3990 IEM_MC_LOCAL(uint64_t, u64Dst);
3991 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3992 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
3993 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3994 IEM_MC_PREPARE_FPU_USAGE();
3995 IEM_MC_FPU_TO_MMX_MODE();
3996
3997 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3998
3999 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4000 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4001 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4002
4003 IEM_MC_ADVANCE_RIP_AND_FINISH();
4004 IEM_MC_END();
4005 }
4006 else
4007 {
4008 /*
4009 * Register, memory.
4010 */
4011 IEM_MC_BEGIN(0, 0);
4012 IEM_MC_LOCAL(uint64_t, u64Dst);
4013 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4014 IEM_MC_LOCAL(X86XMMREG, uSrc);
4015 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4017
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4020 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4021 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4022
4023 IEM_MC_PREPARE_FPU_USAGE();
4024 IEM_MC_FPU_TO_MMX_MODE();
4025
4026 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4027 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4028 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4029
4030 IEM_MC_ADVANCE_RIP_AND_FINISH();
4031 IEM_MC_END();
4032 }
4033}
4034
4035
4036/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4037FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4038{
4039 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4040
4041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4042 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4043 {
4044 if (IEM_IS_MODRM_REG_MODE(bRm))
4045 {
4046 /* greg64, XMM */
4047 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4048 IEM_MC_LOCAL(int64_t, i64Dst);
4049 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4050 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4051
4052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4054 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4055
4056 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4057 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4058 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4059 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4060
4061 IEM_MC_ADVANCE_RIP_AND_FINISH();
4062 IEM_MC_END();
4063 }
4064 else
4065 {
4066 /* greg64, [mem64] */
4067 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4069 IEM_MC_LOCAL(int64_t, i64Dst);
4070 IEM_MC_LOCAL(uint32_t, u32Src);
4071 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4072 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4073
4074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4076 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4077 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4078
4079 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4080 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4081 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4082 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4083
4084 IEM_MC_ADVANCE_RIP_AND_FINISH();
4085 IEM_MC_END();
4086 }
4087 }
4088 else
4089 {
4090 if (IEM_IS_MODRM_REG_MODE(bRm))
4091 {
4092 /* greg, XMM */
4093 IEM_MC_BEGIN(0, 0);
4094 IEM_MC_LOCAL(int32_t, i32Dst);
4095 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4096 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4097
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4099 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4100 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4101
4102 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4103 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4104 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4105 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4106
4107 IEM_MC_ADVANCE_RIP_AND_FINISH();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /* greg, [mem] */
4113 IEM_MC_BEGIN(0, 0);
4114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4115 IEM_MC_LOCAL(int32_t, i32Dst);
4116 IEM_MC_LOCAL(uint32_t, u32Src);
4117 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4118 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4119
4120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4123 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4124
4125 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4126 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4127 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4128 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4129
4130 IEM_MC_ADVANCE_RIP_AND_FINISH();
4131 IEM_MC_END();
4132 }
4133 }
4134}
4135
4136
4137/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4138FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4139{
4140 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4141
4142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4143 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4144 {
4145 if (IEM_IS_MODRM_REG_MODE(bRm))
4146 {
4147 /* greg64, XMM */
4148 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4149 IEM_MC_LOCAL(int64_t, i64Dst);
4150 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4151 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4152
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4154 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4155 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4156
4157 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4158 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4159 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4160 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4161
4162 IEM_MC_ADVANCE_RIP_AND_FINISH();
4163 IEM_MC_END();
4164 }
4165 else
4166 {
4167 /* greg64, [mem64] */
4168 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4170 IEM_MC_LOCAL(int64_t, i64Dst);
4171 IEM_MC_LOCAL(uint64_t, u64Src);
4172 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4173 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4174
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4178 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4179
4180 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4181 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4182 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4183 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4184
4185 IEM_MC_ADVANCE_RIP_AND_FINISH();
4186 IEM_MC_END();
4187 }
4188 }
4189 else
4190 {
4191 if (IEM_IS_MODRM_REG_MODE(bRm))
4192 {
4193 /* greg, XMM */
4194 IEM_MC_BEGIN(0, 0);
4195 IEM_MC_LOCAL(int32_t, i32Dst);
4196 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4197 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4198
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4202
4203 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4204 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4206 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4207
4208 IEM_MC_ADVANCE_RIP_AND_FINISH();
4209 IEM_MC_END();
4210 }
4211 else
4212 {
4213 /* greg32, [mem32] */
4214 IEM_MC_BEGIN(0, 0);
4215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4216 IEM_MC_LOCAL(int32_t, i32Dst);
4217 IEM_MC_LOCAL(uint64_t, u64Src);
4218 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4219 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4220
4221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4224 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4225
4226 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4227 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4228 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4229 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4230
4231 IEM_MC_ADVANCE_RIP_AND_FINISH();
4232 IEM_MC_END();
4233 }
4234 }
4235}
4236
4237
4238/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4239FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4240{
4241 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4243 if (IEM_IS_MODRM_REG_MODE(bRm))
4244 {
4245 /*
4246 * Register, register.
4247 */
4248 IEM_MC_BEGIN(0, 0);
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4250 IEM_MC_LOCAL(uint64_t, u64Dst);
4251 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4252 IEM_MC_ARG(uint64_t, u64Src, 1);
4253
4254 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4255 IEM_MC_PREPARE_FPU_USAGE();
4256 IEM_MC_FPU_TO_MMX_MODE();
4257
4258 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4259
4260 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4261 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4262 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4263
4264 IEM_MC_ADVANCE_RIP_AND_FINISH();
4265 IEM_MC_END();
4266 }
4267 else
4268 {
4269 /*
4270 * Register, memory.
4271 */
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_LOCAL(uint64_t, u64Dst);
4274 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4275 IEM_MC_ARG(uint64_t, u64Src, 1);
4276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4277
4278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4281 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4282
4283 IEM_MC_PREPARE_FPU_USAGE();
4284 IEM_MC_FPU_TO_MMX_MODE();
4285
4286 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4287 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4288 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4289
4290 IEM_MC_ADVANCE_RIP_AND_FINISH();
4291 IEM_MC_END();
4292 }
4293}
4294
4295
4296/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4297FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4298{
4299 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4301 if (IEM_IS_MODRM_REG_MODE(bRm))
4302 {
4303 /*
4304 * Register, register.
4305 */
4306 IEM_MC_BEGIN(0, 0);
4307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4308 IEM_MC_LOCAL(uint64_t, u64Dst);
4309 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4310 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4311
4312 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4313 IEM_MC_PREPARE_FPU_USAGE();
4314 IEM_MC_FPU_TO_MMX_MODE();
4315
4316 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4317
4318 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4319 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4320 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4321
4322 IEM_MC_ADVANCE_RIP_AND_FINISH();
4323 IEM_MC_END();
4324 }
4325 else
4326 {
4327 /*
4328 * Register, memory.
4329 */
4330 IEM_MC_BEGIN(0, 0);
4331 IEM_MC_LOCAL(uint64_t, u64Dst);
4332 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4333 IEM_MC_LOCAL(X86XMMREG, uSrc);
4334 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4336
4337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4340 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4341
4342 IEM_MC_PREPARE_FPU_USAGE();
4343 IEM_MC_FPU_TO_MMX_MODE();
4344
4345 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4346 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4347 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4348
4349 IEM_MC_ADVANCE_RIP_AND_FINISH();
4350 IEM_MC_END();
4351 }
4352}
4353
4354
4355/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4356FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4357{
4358 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4359
4360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4361 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4362 {
4363 if (IEM_IS_MODRM_REG_MODE(bRm))
4364 {
4365 /* greg64, XMM */
4366 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4367 IEM_MC_LOCAL(int64_t, i64Dst);
4368 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4369 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4370
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4373 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4374
4375 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4376 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4377 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4378 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4379
4380 IEM_MC_ADVANCE_RIP_AND_FINISH();
4381 IEM_MC_END();
4382 }
4383 else
4384 {
4385 /* greg64, [mem64] */
4386 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4388 IEM_MC_LOCAL(int64_t, i64Dst);
4389 IEM_MC_LOCAL(uint32_t, u32Src);
4390 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4391 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4392
4393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4395 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4396 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4397
4398 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4399 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4400 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4401 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4402
4403 IEM_MC_ADVANCE_RIP_AND_FINISH();
4404 IEM_MC_END();
4405 }
4406 }
4407 else
4408 {
4409 if (IEM_IS_MODRM_REG_MODE(bRm))
4410 {
4411 /* greg, XMM */
4412 IEM_MC_BEGIN(0, 0);
4413 IEM_MC_LOCAL(int32_t, i32Dst);
4414 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4415 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4416
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4419 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4420
4421 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4422 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4423 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4424 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4425
4426 IEM_MC_ADVANCE_RIP_AND_FINISH();
4427 IEM_MC_END();
4428 }
4429 else
4430 {
4431 /* greg, [mem] */
4432 IEM_MC_BEGIN(0, 0);
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4434 IEM_MC_LOCAL(int32_t, i32Dst);
4435 IEM_MC_LOCAL(uint32_t, u32Src);
4436 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4437 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4438
4439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4442 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4443
4444 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4445 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4446 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4447 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4448
4449 IEM_MC_ADVANCE_RIP_AND_FINISH();
4450 IEM_MC_END();
4451 }
4452 }
4453}
4454
4455
4456/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4457FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4458{
4459 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4460
4461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4462 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4463 {
4464 if (IEM_IS_MODRM_REG_MODE(bRm))
4465 {
4466 /* greg64, XMM */
4467 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4468 IEM_MC_LOCAL(int64_t, i64Dst);
4469 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4470 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4471
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4473 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4474 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4475
4476 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4477 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4478 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4479 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4480
4481 IEM_MC_ADVANCE_RIP_AND_FINISH();
4482 IEM_MC_END();
4483 }
4484 else
4485 {
4486 /* greg64, [mem64] */
4487 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4489 IEM_MC_LOCAL(int64_t, i64Dst);
4490 IEM_MC_LOCAL(uint64_t, u64Src);
4491 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4492 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4493
4494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4496 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4497 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4498
4499 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4500 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4501 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4502 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4503
4504 IEM_MC_ADVANCE_RIP_AND_FINISH();
4505 IEM_MC_END();
4506 }
4507 }
4508 else
4509 {
4510 if (IEM_IS_MODRM_REG_MODE(bRm))
4511 {
4512 /* greg32, XMM */
4513 IEM_MC_BEGIN(0, 0);
4514 IEM_MC_LOCAL(int32_t, i32Dst);
4515 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4516 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4517
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4519 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4520 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4521
4522 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4523 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4524 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4525 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4526
4527 IEM_MC_ADVANCE_RIP_AND_FINISH();
4528 IEM_MC_END();
4529 }
4530 else
4531 {
4532 /* greg32, [mem64] */
4533 IEM_MC_BEGIN(0, 0);
4534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4535 IEM_MC_LOCAL(int32_t, i32Dst);
4536 IEM_MC_LOCAL(uint64_t, u64Src);
4537 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4538 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4539
4540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4542 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4543 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4544
4545 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4546 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4547 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4548 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4549
4550 IEM_MC_ADVANCE_RIP_AND_FINISH();
4551 IEM_MC_END();
4552 }
4553 }
4554}
4555
4556
4557/**
4558 * @opcode 0x2e
4559 * @oppfx none
4560 * @opflmodify cf,pf,af,zf,sf,of
4561 * @opflclear af,sf,of
4562 */
4563FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4564{
4565 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4567 if (IEM_IS_MODRM_REG_MODE(bRm))
4568 {
4569 /*
4570 * Register, register.
4571 */
4572 IEM_MC_BEGIN(0, 0);
4573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4574 IEM_MC_LOCAL(uint32_t, fEFlags);
4575 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4576 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4577 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4579 IEM_MC_PREPARE_SSE_USAGE();
4580 IEM_MC_FETCH_EFLAGS(fEFlags);
4581 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4582 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4583 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4584 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4585 IEM_MC_COMMIT_EFLAGS(fEFlags);
4586
4587 IEM_MC_ADVANCE_RIP_AND_FINISH();
4588 IEM_MC_END();
4589 }
4590 else
4591 {
4592 /*
4593 * Register, memory.
4594 */
4595 IEM_MC_BEGIN(0, 0);
4596 IEM_MC_LOCAL(uint32_t, fEFlags);
4597 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4598 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4599 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4601
4602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4604 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4605 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4606
4607 IEM_MC_PREPARE_SSE_USAGE();
4608 IEM_MC_FETCH_EFLAGS(fEFlags);
4609 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4610 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4611 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4612 IEM_MC_COMMIT_EFLAGS(fEFlags);
4613
4614 IEM_MC_ADVANCE_RIP_AND_FINISH();
4615 IEM_MC_END();
4616 }
4617}
4618
4619
4620/**
4621 * @opcode 0x2e
4622 * @oppfx 0x66
4623 * @opflmodify cf,pf,af,zf,sf,of
4624 * @opflclear af,sf,of
4625 */
4626FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4627{
4628 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4630 if (IEM_IS_MODRM_REG_MODE(bRm))
4631 {
4632 /*
4633 * Register, register.
4634 */
4635 IEM_MC_BEGIN(0, 0);
4636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4637 IEM_MC_LOCAL(uint32_t, fEFlags);
4638 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4639 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4640 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4641 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4642 IEM_MC_PREPARE_SSE_USAGE();
4643 IEM_MC_FETCH_EFLAGS(fEFlags);
4644 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4645 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4646 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4647 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4648 IEM_MC_COMMIT_EFLAGS(fEFlags);
4649
4650 IEM_MC_ADVANCE_RIP_AND_FINISH();
4651 IEM_MC_END();
4652 }
4653 else
4654 {
4655 /*
4656 * Register, memory.
4657 */
4658 IEM_MC_BEGIN(0, 0);
4659 IEM_MC_LOCAL(uint32_t, fEFlags);
4660 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4661 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4662 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4664
4665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4668 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4669
4670 IEM_MC_PREPARE_SSE_USAGE();
4671 IEM_MC_FETCH_EFLAGS(fEFlags);
4672 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4673 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4674 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4675 IEM_MC_COMMIT_EFLAGS(fEFlags);
4676
4677 IEM_MC_ADVANCE_RIP_AND_FINISH();
4678 IEM_MC_END();
4679 }
4680}
4681
4682
4683/* Opcode 0xf3 0x0f 0x2e - invalid */
4684/* Opcode 0xf2 0x0f 0x2e - invalid */
4685
4686
4687/**
4688 * @opcode 0x2e
4689 * @oppfx none
4690 * @opflmodify cf,pf,af,zf,sf,of
4691 * @opflclear af,sf,of
4692 */
4693FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4694{
4695 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4697 if (IEM_IS_MODRM_REG_MODE(bRm))
4698 {
4699 /*
4700 * Register, register.
4701 */
4702 IEM_MC_BEGIN(0, 0);
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4704 IEM_MC_LOCAL(uint32_t, fEFlags);
4705 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4706 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4707 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4709 IEM_MC_PREPARE_SSE_USAGE();
4710 IEM_MC_FETCH_EFLAGS(fEFlags);
4711 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4712 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4713 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4714 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4715 IEM_MC_COMMIT_EFLAGS(fEFlags);
4716
4717 IEM_MC_ADVANCE_RIP_AND_FINISH();
4718 IEM_MC_END();
4719 }
4720 else
4721 {
4722 /*
4723 * Register, memory.
4724 */
4725 IEM_MC_BEGIN(0, 0);
4726 IEM_MC_LOCAL(uint32_t, fEFlags);
4727 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4728 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4729 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4731
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4735 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4736
4737 IEM_MC_PREPARE_SSE_USAGE();
4738 IEM_MC_FETCH_EFLAGS(fEFlags);
4739 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4740 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4741 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4742 IEM_MC_COMMIT_EFLAGS(fEFlags);
4743
4744 IEM_MC_ADVANCE_RIP_AND_FINISH();
4745 IEM_MC_END();
4746 }
4747}
4748
4749
4750/**
4751 * @opcode 0x2f
4752 * @oppfx 0x66
4753 * @opflmodify cf,pf,af,zf,sf,of
4754 * @opflclear af,sf,of
4755 */
4756FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4757{
4758 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4760 if (IEM_IS_MODRM_REG_MODE(bRm))
4761 {
4762 /*
4763 * Register, register.
4764 */
4765 IEM_MC_BEGIN(0, 0);
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4767 IEM_MC_LOCAL(uint32_t, fEFlags);
4768 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4769 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4770 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4772 IEM_MC_PREPARE_SSE_USAGE();
4773 IEM_MC_FETCH_EFLAGS(fEFlags);
4774 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4775 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4776 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4777 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4778 IEM_MC_COMMIT_EFLAGS(fEFlags);
4779
4780 IEM_MC_ADVANCE_RIP_AND_FINISH();
4781 IEM_MC_END();
4782 }
4783 else
4784 {
4785 /*
4786 * Register, memory.
4787 */
4788 IEM_MC_BEGIN(0, 0);
4789 IEM_MC_LOCAL(uint32_t, fEFlags);
4790 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4791 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4792 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4794
4795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4797 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4798 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4799
4800 IEM_MC_PREPARE_SSE_USAGE();
4801 IEM_MC_FETCH_EFLAGS(fEFlags);
4802 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4803 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4804 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4805 IEM_MC_COMMIT_EFLAGS(fEFlags);
4806
4807 IEM_MC_ADVANCE_RIP_AND_FINISH();
4808 IEM_MC_END();
4809 }
4810}
4811
4812
4813/* Opcode 0xf3 0x0f 0x2f - invalid */
4814/* Opcode 0xf2 0x0f 0x2f - invalid */
4815
4816/** Opcode 0x0f 0x30. */
4817FNIEMOP_DEF(iemOp_wrmsr)
4818{
4819 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4822}
4823
4824
4825/** Opcode 0x0f 0x31. */
4826FNIEMOP_DEF(iemOp_rdtsc)
4827{
4828 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4833 iemCImpl_rdtsc);
4834}
4835
4836
4837/** Opcode 0x0f 0x33. */
4838FNIEMOP_DEF(iemOp_rdmsr)
4839{
4840 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4842 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4845 iemCImpl_rdmsr);
4846}
4847
4848
4849/** Opcode 0x0f 0x34. */
4850FNIEMOP_DEF(iemOp_rdpmc)
4851{
4852 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4855 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4856 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4857 iemCImpl_rdpmc);
4858}
4859
4860
4861/** Opcode 0x0f 0x34. */
4862FNIEMOP_DEF(iemOp_sysenter)
4863{
4864 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4867 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4868 iemCImpl_sysenter);
4869}
4870
4871/** Opcode 0x0f 0x35. */
4872FNIEMOP_DEF(iemOp_sysexit)
4873{
4874 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4876 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4877 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4878 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4879}
4880
4881/** Opcode 0x0f 0x37. */
4882FNIEMOP_STUB(iemOp_getsec);
4883
4884
4885/** Opcode 0x0f 0x38. */
4886FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4887{
4888#ifdef IEM_WITH_THREE_0F_38
4889 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4890 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4891#else
4892 IEMOP_BITCH_ABOUT_STUB();
4893 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4894#endif
4895}
4896
4897
4898/** Opcode 0x0f 0x3a. */
4899FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4900{
4901#ifdef IEM_WITH_THREE_0F_3A
4902 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4903 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4904#else
4905 IEMOP_BITCH_ABOUT_STUB();
4906 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4907#endif
4908}
4909
4910
4911/**
4912 * Implements a conditional move.
4913 *
4914 * Wish there was an obvious way to do this where we could share and reduce
4915 * code bloat.
4916 *
4917 * @param a_Cnd The conditional "microcode" operation.
4918 */
4919#define CMOV_X(a_Cnd) \
4920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4921 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4922 { \
4923 switch (pVCpu->iem.s.enmEffOpSize) \
4924 { \
4925 case IEMMODE_16BIT: \
4926 IEM_MC_BEGIN(0, 0); \
4927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4928 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4929 a_Cnd { \
4930 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4931 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4932 } IEM_MC_ENDIF(); \
4933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4934 IEM_MC_END(); \
4935 break; \
4936 \
4937 case IEMMODE_32BIT: \
4938 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4940 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4941 a_Cnd { \
4942 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4943 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4944 } IEM_MC_ELSE() { \
4945 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4946 } IEM_MC_ENDIF(); \
4947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4948 IEM_MC_END(); \
4949 break; \
4950 \
4951 case IEMMODE_64BIT: \
4952 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4954 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4955 a_Cnd { \
4956 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4957 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4958 } IEM_MC_ENDIF(); \
4959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4960 IEM_MC_END(); \
4961 break; \
4962 \
4963 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4964 } \
4965 } \
4966 else \
4967 { \
4968 switch (pVCpu->iem.s.enmEffOpSize) \
4969 { \
4970 case IEMMODE_16BIT: \
4971 IEM_MC_BEGIN(0, 0); \
4972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4973 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4976 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4977 a_Cnd { \
4978 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4979 } IEM_MC_ENDIF(); \
4980 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4981 IEM_MC_END(); \
4982 break; \
4983 \
4984 case IEMMODE_32BIT: \
4985 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4987 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4990 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4991 a_Cnd { \
4992 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4993 } IEM_MC_ELSE() { \
4994 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4995 } IEM_MC_ENDIF(); \
4996 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4997 IEM_MC_END(); \
4998 break; \
4999 \
5000 case IEMMODE_64BIT: \
5001 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5003 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5006 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5007 a_Cnd { \
5008 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5009 } IEM_MC_ENDIF(); \
5010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5011 IEM_MC_END(); \
5012 break; \
5013 \
5014 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5015 } \
5016 } do {} while (0)
5017
5018
5019
5020/**
5021 * @opcode 0x40
5022 * @opfltest of
5023 */
5024FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5025{
5026 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5027 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5028}
5029
5030
5031/**
5032 * @opcode 0x41
5033 * @opfltest of
5034 */
5035FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5036{
5037 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5038 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5039}
5040
5041
5042/**
5043 * @opcode 0x42
5044 * @opfltest cf
5045 */
5046FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5047{
5048 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5049 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5050}
5051
5052
5053/**
5054 * @opcode 0x43
5055 * @opfltest cf
5056 */
5057FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5058{
5059 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5060 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5061}
5062
5063
5064/**
5065 * @opcode 0x44
5066 * @opfltest zf
5067 */
5068FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5069{
5070 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5071 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5072}
5073
5074
5075/**
5076 * @opcode 0x45
5077 * @opfltest zf
5078 */
5079FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5080{
5081 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5082 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5083}
5084
5085
5086/**
5087 * @opcode 0x46
5088 * @opfltest cf,zf
5089 */
5090FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5091{
5092 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5093 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5094}
5095
5096
5097/**
5098 * @opcode 0x47
5099 * @opfltest cf,zf
5100 */
5101FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5102{
5103 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5104 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5105}
5106
5107
5108/**
5109 * @opcode 0x48
5110 * @opfltest sf
5111 */
5112FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5113{
5114 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5115 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5116}
5117
5118
5119/**
5120 * @opcode 0x49
5121 * @opfltest sf
5122 */
5123FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5124{
5125 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5126 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5127}
5128
5129
5130/**
5131 * @opcode 0x4a
5132 * @opfltest pf
5133 */
5134FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5135{
5136 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5137 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5138}
5139
5140
5141/**
5142 * @opcode 0x4b
5143 * @opfltest pf
5144 */
5145FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5146{
5147 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5148 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5149}
5150
5151
5152/**
5153 * @opcode 0x4c
5154 * @opfltest sf,of
5155 */
5156FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5157{
5158 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5159 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5160}
5161
5162
5163/**
5164 * @opcode 0x4d
5165 * @opfltest sf,of
5166 */
5167FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5168{
5169 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5170 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5171}
5172
5173
5174/**
5175 * @opcode 0x4e
5176 * @opfltest zf,sf,of
5177 */
5178FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5179{
5180 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5181 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5182}
5183
5184
5185/**
5186 * @opcode 0x4e
5187 * @opfltest zf,sf,of
5188 */
5189FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5190{
5191 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5192 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5193}
5194
5195#undef CMOV_X
5196
5197/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5198FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5199{
5200 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5202 if (IEM_IS_MODRM_REG_MODE(bRm))
5203 {
5204 /*
5205 * Register, register.
5206 */
5207 IEM_MC_BEGIN(0, 0);
5208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5209 IEM_MC_LOCAL(uint8_t, u8Dst);
5210 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5211 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5212 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5213 IEM_MC_PREPARE_SSE_USAGE();
5214 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5215 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5217 IEM_MC_ADVANCE_RIP_AND_FINISH();
5218 IEM_MC_END();
5219 }
5220 /* No memory operand. */
5221 else
5222 IEMOP_RAISE_INVALID_OPCODE_RET();
5223}
5224
5225
5226/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5227FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5228{
5229 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5231 if (IEM_IS_MODRM_REG_MODE(bRm))
5232 {
5233 /*
5234 * Register, register.
5235 */
5236 IEM_MC_BEGIN(0, 0);
5237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5238 IEM_MC_LOCAL(uint8_t, u8Dst);
5239 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5240 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5242 IEM_MC_PREPARE_SSE_USAGE();
5243 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5244 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5245 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5246 IEM_MC_ADVANCE_RIP_AND_FINISH();
5247 IEM_MC_END();
5248 }
5249 /* No memory operand. */
5250 else
5251 IEMOP_RAISE_INVALID_OPCODE_RET();
5252
5253}
5254
5255
5256/* Opcode 0xf3 0x0f 0x50 - invalid */
5257/* Opcode 0xf2 0x0f 0x50 - invalid */
5258
5259
5260/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5261FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5262{
5263 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5264 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5265}
5266
5267
5268/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5269FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5270{
5271 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5272 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5273}
5274
5275
5276/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5277FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5278{
5279 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5280 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5281}
5282
5283
5284/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5285FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5286{
5287 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5288 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5289}
5290
5291
5292/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5293FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5294{
5295 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5296 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5297}
5298
5299
5300/* Opcode 0x66 0x0f 0x52 - invalid */
5301
5302
5303/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5304FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5305{
5306 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5307 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5308}
5309
5310
5311/* Opcode 0xf2 0x0f 0x52 - invalid */
5312
5313
5314/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5315FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5316{
5317 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5318 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5319}
5320
5321
5322/* Opcode 0x66 0x0f 0x53 - invalid */
5323
5324
5325/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5326FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5327{
5328 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5329 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5330}
5331
5332
5333/* Opcode 0xf2 0x0f 0x53 - invalid */
5334
5335
5336/** Opcode 0x0f 0x54 - andps Vps, Wps */
5337FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5338{
5339 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5340 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5341}
5342
5343
5344/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5345FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5346{
5347 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5348 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5349}
5350
5351
5352/* Opcode 0xf3 0x0f 0x54 - invalid */
5353/* Opcode 0xf2 0x0f 0x54 - invalid */
5354
5355
5356/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5357FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5358{
5359 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5360 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5361}
5362
5363
5364/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5365FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5366{
5367 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5368 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5369}
5370
5371
5372/* Opcode 0xf3 0x0f 0x55 - invalid */
5373/* Opcode 0xf2 0x0f 0x55 - invalid */
5374
5375
5376/** Opcode 0x0f 0x56 - orps Vps, Wps */
5377FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5378{
5379 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5380 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_por_u128);
5381}
5382
5383
5384/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5385FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5386{
5387 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5388 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
5389}
5390
5391
5392/* Opcode 0xf3 0x0f 0x56 - invalid */
5393/* Opcode 0xf2 0x0f 0x56 - invalid */
5394
5395
5396/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5397FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5398{
5399 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5400 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5401}
5402
5403
5404/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5405FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5406{
5407 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5408 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5409}
5410
5411
5412/* Opcode 0xf3 0x0f 0x57 - invalid */
5413/* Opcode 0xf2 0x0f 0x57 - invalid */
5414
5415/** Opcode 0x0f 0x58 - addps Vps, Wps */
5416FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5417{
5418 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5419 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5420}
5421
5422
5423/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5424FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5425{
5426 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5427 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5428}
5429
5430
5431/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5432FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5433{
5434 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5435 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5436}
5437
5438
5439/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5440FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5441{
5442 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5443 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5444}
5445
5446
5447/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5448FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5449{
5450 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5451 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5452}
5453
5454
5455/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5456FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5457{
5458 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5459 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5460}
5461
5462
5463/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5464FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5465{
5466 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5467 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5468}
5469
5470
5471/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5472FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5473{
5474 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5475 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5476}
5477
5478
5479/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5480FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5481{
5482 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5483 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5484}
5485
5486
5487/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5488FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5489{
5490 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5491 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5492}
5493
5494
5495/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5496FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5497{
5498 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5499 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5500}
5501
5502
5503/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5504FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5505{
5506 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5507 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5508}
5509
5510
5511/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5512FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5513{
5514 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5515 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5516}
5517
5518
5519/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5520FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5521{
5522 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5523 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5524}
5525
5526
5527/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5528FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5529{
5530 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5531 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5532}
5533
5534
5535/* Opcode 0xf2 0x0f 0x5b - invalid */
5536
5537
5538/** Opcode 0x0f 0x5c - subps Vps, Wps */
5539FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5540{
5541 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5542 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5543}
5544
5545
5546/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5547FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5548{
5549 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5550 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5551}
5552
5553
5554/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5555FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5556{
5557 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5558 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5559}
5560
5561
5562/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5563FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5564{
5565 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5566 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5567}
5568
5569
5570/** Opcode 0x0f 0x5d - minps Vps, Wps */
5571FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5572{
5573 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5574 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5575}
5576
5577
5578/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5579FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5580{
5581 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5582 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5583}
5584
5585
5586/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5587FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5588{
5589 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5590 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5591}
5592
5593
5594/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5595FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5596{
5597 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5598 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5599}
5600
5601
5602/** Opcode 0x0f 0x5e - divps Vps, Wps */
5603FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5604{
5605 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5606 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5607}
5608
5609
5610/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5611FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5612{
5613 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5614 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5615}
5616
5617
5618/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5619FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5620{
5621 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5622 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5623}
5624
5625
5626/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5627FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5628{
5629 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5630 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5631}
5632
5633
5634/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5635FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5636{
5637 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5638 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5639}
5640
5641
5642/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5643FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5644{
5645 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5646 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5647}
5648
5649
5650/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5651FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5652{
5653 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5655}
5656
5657
5658/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5659FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5660{
5661 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5663}
5664
5665
5666/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5667FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5668{
5669 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5670 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5671}
5672
5673
5674/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5675FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5676{
5677 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5678 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5679}
5680
5681
5682/* Opcode 0xf3 0x0f 0x60 - invalid */
5683
5684
5685/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5686FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5687{
5688 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5689 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5690 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5691}
5692
5693
5694/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5695FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5696{
5697 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5698 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5699}
5700
5701
5702/* Opcode 0xf3 0x0f 0x61 - invalid */
5703
5704
5705/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5706FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5707{
5708 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5709 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5710}
5711
5712
5713/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5714FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5715{
5716 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5717 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5718}
5719
5720
5721/* Opcode 0xf3 0x0f 0x62 - invalid */
5722
5723
5724
5725/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5726FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5727{
5728 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5729 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5730}
5731
5732
5733/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5734FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5735{
5736 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5737 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5738}
5739
5740
5741/* Opcode 0xf3 0x0f 0x63 - invalid */
5742
5743
5744/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5745FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5746{
5747 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5748 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5749}
5750
5751
5752/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5753FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5754{
5755 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5756 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5757}
5758
5759
5760/* Opcode 0xf3 0x0f 0x64 - invalid */
5761
5762
5763/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5764FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5765{
5766 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5767 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5768}
5769
5770
5771/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5772FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5773{
5774 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5775 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5776}
5777
5778
5779/* Opcode 0xf3 0x0f 0x65 - invalid */
5780
5781
5782/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5783FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5784{
5785 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5786 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5787}
5788
5789
5790/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5791FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5792{
5793 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5794 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5795}
5796
5797
5798/* Opcode 0xf3 0x0f 0x66 - invalid */
5799
5800
5801/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5802FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5803{
5804 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5805 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5806}
5807
5808
5809/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5810FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5811{
5812 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5813 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5814}
5815
5816
5817/* Opcode 0xf3 0x0f 0x67 - invalid */
5818
5819
5820/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5821 * @note Intel and AMD both uses Qd for the second parameter, however they
5822 * both list it as a mmX/mem64 operand and intel describes it as being
5823 * loaded as a qword, so it should be Qq, shouldn't it? */
5824FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5825{
5826 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5827 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5828}
5829
5830
5831/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5832FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5833{
5834 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5835 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5836}
5837
5838
5839/* Opcode 0xf3 0x0f 0x68 - invalid */
5840
5841
5842/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5843 * @note Intel and AMD both uses Qd for the second parameter, however they
5844 * both list it as a mmX/mem64 operand and intel describes it as being
5845 * loaded as a qword, so it should be Qq, shouldn't it? */
5846FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5847{
5848 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5849 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5850}
5851
5852
5853/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5854FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5855{
5856 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5857 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5858
5859}
5860
5861
5862/* Opcode 0xf3 0x0f 0x69 - invalid */
5863
5864
5865/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5866 * @note Intel and AMD both uses Qd for the second parameter, however they
5867 * both list it as a mmX/mem64 operand and intel describes it as being
5868 * loaded as a qword, so it should be Qq, shouldn't it? */
5869FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5870{
5871 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5872 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5873}
5874
5875
5876/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5877FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5878{
5879 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5880 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5881}
5882
5883
5884/* Opcode 0xf3 0x0f 0x6a - invalid */
5885
5886
5887/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5888FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5889{
5890 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5891 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5892}
5893
5894
5895/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5896FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5897{
5898 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5899 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5900}
5901
5902
5903/* Opcode 0xf3 0x0f 0x6b - invalid */
5904
5905
5906/* Opcode 0x0f 0x6c - invalid */
5907
5908
5909/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5910FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5911{
5912 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5913 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5914}
5915
5916
5917/* Opcode 0xf3 0x0f 0x6c - invalid */
5918/* Opcode 0xf2 0x0f 0x6c - invalid */
5919
5920
5921/* Opcode 0x0f 0x6d - invalid */
5922
5923
5924/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5925FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5926{
5927 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5928 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5929}
5930
5931
5932/* Opcode 0xf3 0x0f 0x6d - invalid */
5933
5934
5935FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5936{
5937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5939 {
5940 /**
5941 * @opcode 0x6e
5942 * @opcodesub rex.w=1
5943 * @oppfx none
5944 * @opcpuid mmx
5945 * @opgroup og_mmx_datamove
5946 * @opxcpttype 5
5947 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5948 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5949 */
5950 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5951 if (IEM_IS_MODRM_REG_MODE(bRm))
5952 {
5953 /* MMX, greg64 */
5954 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5956 IEM_MC_LOCAL(uint64_t, u64Tmp);
5957
5958 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5959 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5960 IEM_MC_FPU_TO_MMX_MODE();
5961
5962 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5963 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5964
5965 IEM_MC_ADVANCE_RIP_AND_FINISH();
5966 IEM_MC_END();
5967 }
5968 else
5969 {
5970 /* MMX, [mem64] */
5971 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5973 IEM_MC_LOCAL(uint64_t, u64Tmp);
5974
5975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5977 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5978 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5979
5980 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5981 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5982 IEM_MC_FPU_TO_MMX_MODE();
5983
5984 IEM_MC_ADVANCE_RIP_AND_FINISH();
5985 IEM_MC_END();
5986 }
5987 }
5988 else
5989 {
5990 /**
5991 * @opdone
5992 * @opcode 0x6e
5993 * @opcodesub rex.w=0
5994 * @oppfx none
5995 * @opcpuid mmx
5996 * @opgroup og_mmx_datamove
5997 * @opxcpttype 5
5998 * @opfunction iemOp_movd_q_Pd_Ey
5999 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6000 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6001 */
6002 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6003 if (IEM_IS_MODRM_REG_MODE(bRm))
6004 {
6005 /* MMX, greg32 */
6006 IEM_MC_BEGIN(0, 0);
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6008 IEM_MC_LOCAL(uint32_t, u32Tmp);
6009
6010 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6011 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6012 IEM_MC_FPU_TO_MMX_MODE();
6013
6014 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6015 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6016
6017 IEM_MC_ADVANCE_RIP_AND_FINISH();
6018 IEM_MC_END();
6019 }
6020 else
6021 {
6022 /* MMX, [mem32] */
6023 IEM_MC_BEGIN(0, 0);
6024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6025 IEM_MC_LOCAL(uint32_t, u32Tmp);
6026
6027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6029 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6030 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6031
6032 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6033 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6034 IEM_MC_FPU_TO_MMX_MODE();
6035
6036 IEM_MC_ADVANCE_RIP_AND_FINISH();
6037 IEM_MC_END();
6038 }
6039 }
6040}
6041
6042FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6043{
6044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6045 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6046 {
6047 /**
6048 * @opcode 0x6e
6049 * @opcodesub rex.w=1
6050 * @oppfx 0x66
6051 * @opcpuid sse2
6052 * @opgroup og_sse2_simdint_datamove
6053 * @opxcpttype 5
6054 * @optest 64-bit / op1=1 op2=2 -> op1=2
6055 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6056 */
6057 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6058 if (IEM_IS_MODRM_REG_MODE(bRm))
6059 {
6060 /* XMM, greg64 */
6061 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6063 IEM_MC_LOCAL(uint64_t, u64Tmp);
6064
6065 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6067
6068 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6069 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6070
6071 IEM_MC_ADVANCE_RIP_AND_FINISH();
6072 IEM_MC_END();
6073 }
6074 else
6075 {
6076 /* XMM, [mem64] */
6077 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6079 IEM_MC_LOCAL(uint64_t, u64Tmp);
6080
6081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6083 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6085
6086 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6087 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6088
6089 IEM_MC_ADVANCE_RIP_AND_FINISH();
6090 IEM_MC_END();
6091 }
6092 }
6093 else
6094 {
6095 /**
6096 * @opdone
6097 * @opcode 0x6e
6098 * @opcodesub rex.w=0
6099 * @oppfx 0x66
6100 * @opcpuid sse2
6101 * @opgroup og_sse2_simdint_datamove
6102 * @opxcpttype 5
6103 * @opfunction iemOp_movd_q_Vy_Ey
6104 * @optest op1=1 op2=2 -> op1=2
6105 * @optest op1=0 op2=-42 -> op1=-42
6106 */
6107 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6108 if (IEM_IS_MODRM_REG_MODE(bRm))
6109 {
6110 /* XMM, greg32 */
6111 IEM_MC_BEGIN(0, 0);
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6113 IEM_MC_LOCAL(uint32_t, u32Tmp);
6114
6115 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6117
6118 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6119 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6120
6121 IEM_MC_ADVANCE_RIP_AND_FINISH();
6122 IEM_MC_END();
6123 }
6124 else
6125 {
6126 /* XMM, [mem32] */
6127 IEM_MC_BEGIN(0, 0);
6128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6129 IEM_MC_LOCAL(uint32_t, u32Tmp);
6130
6131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6135
6136 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6137 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6138
6139 IEM_MC_ADVANCE_RIP_AND_FINISH();
6140 IEM_MC_END();
6141 }
6142 }
6143}
6144
6145/* Opcode 0xf3 0x0f 0x6e - invalid */
6146
6147
6148/**
6149 * @opcode 0x6f
6150 * @oppfx none
6151 * @opcpuid mmx
6152 * @opgroup og_mmx_datamove
6153 * @opxcpttype 5
6154 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6155 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6156 */
6157FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6158{
6159 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6161 if (IEM_IS_MODRM_REG_MODE(bRm))
6162 {
6163 /*
6164 * Register, register.
6165 */
6166 IEM_MC_BEGIN(0, 0);
6167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6168 IEM_MC_LOCAL(uint64_t, u64Tmp);
6169
6170 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6171 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6172 IEM_MC_FPU_TO_MMX_MODE();
6173
6174 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6175 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6176
6177 IEM_MC_ADVANCE_RIP_AND_FINISH();
6178 IEM_MC_END();
6179 }
6180 else
6181 {
6182 /*
6183 * Register, memory.
6184 */
6185 IEM_MC_BEGIN(0, 0);
6186 IEM_MC_LOCAL(uint64_t, u64Tmp);
6187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6188
6189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6192 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6193
6194 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6195 IEM_MC_FPU_TO_MMX_MODE();
6196
6197 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6198
6199 IEM_MC_ADVANCE_RIP_AND_FINISH();
6200 IEM_MC_END();
6201 }
6202}
6203
6204/**
6205 * @opcode 0x6f
6206 * @oppfx 0x66
6207 * @opcpuid sse2
6208 * @opgroup og_sse2_simdint_datamove
6209 * @opxcpttype 1
6210 * @optest op1=1 op2=2 -> op1=2
6211 * @optest op1=0 op2=-42 -> op1=-42
6212 */
6213FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6214{
6215 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6217 if (IEM_IS_MODRM_REG_MODE(bRm))
6218 {
6219 /*
6220 * Register, register.
6221 */
6222 IEM_MC_BEGIN(0, 0);
6223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6224
6225 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6227
6228 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6229 IEM_GET_MODRM_RM(pVCpu, bRm));
6230 IEM_MC_ADVANCE_RIP_AND_FINISH();
6231 IEM_MC_END();
6232 }
6233 else
6234 {
6235 /*
6236 * Register, memory.
6237 */
6238 IEM_MC_BEGIN(0, 0);
6239 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6241
6242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6245 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6246
6247 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6248 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6249
6250 IEM_MC_ADVANCE_RIP_AND_FINISH();
6251 IEM_MC_END();
6252 }
6253}
6254
6255/**
6256 * @opcode 0x6f
6257 * @oppfx 0xf3
6258 * @opcpuid sse2
6259 * @opgroup og_sse2_simdint_datamove
6260 * @opxcpttype 4UA
6261 * @optest op1=1 op2=2 -> op1=2
6262 * @optest op1=0 op2=-42 -> op1=-42
6263 */
6264FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6265{
6266 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6268 if (IEM_IS_MODRM_REG_MODE(bRm))
6269 {
6270 /*
6271 * Register, register.
6272 */
6273 IEM_MC_BEGIN(0, 0);
6274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6275 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6277 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6278 IEM_GET_MODRM_RM(pVCpu, bRm));
6279 IEM_MC_ADVANCE_RIP_AND_FINISH();
6280 IEM_MC_END();
6281 }
6282 else
6283 {
6284 /*
6285 * Register, memory.
6286 */
6287 IEM_MC_BEGIN(0, 0);
6288 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6290
6291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6293 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6294 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6295 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6296 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6297
6298 IEM_MC_ADVANCE_RIP_AND_FINISH();
6299 IEM_MC_END();
6300 }
6301}
6302
6303
6304/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6305FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6306{
6307 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6309 if (IEM_IS_MODRM_REG_MODE(bRm))
6310 {
6311 /*
6312 * Register, register.
6313 */
6314 IEM_MC_BEGIN(0, 0);
6315 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6317 IEM_MC_ARG(uint64_t *, pDst, 0);
6318 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6319 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6320 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6321 IEM_MC_PREPARE_FPU_USAGE();
6322 IEM_MC_FPU_TO_MMX_MODE();
6323
6324 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6325 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6326 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6327 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6328
6329 IEM_MC_ADVANCE_RIP_AND_FINISH();
6330 IEM_MC_END();
6331 }
6332 else
6333 {
6334 /*
6335 * Register, memory.
6336 */
6337 IEM_MC_BEGIN(0, 0);
6338 IEM_MC_ARG(uint64_t *, pDst, 0);
6339 IEM_MC_LOCAL(uint64_t, uSrc);
6340 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6342
6343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6344 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6345 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6347 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6348 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6349
6350 IEM_MC_PREPARE_FPU_USAGE();
6351 IEM_MC_FPU_TO_MMX_MODE();
6352
6353 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6354 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6355 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6356
6357 IEM_MC_ADVANCE_RIP_AND_FINISH();
6358 IEM_MC_END();
6359 }
6360}
6361
6362
6363/**
6364 * Common worker for SSE2 instructions on the forms:
6365 * pshufd xmm1, xmm2/mem128, imm8
6366 * pshufhw xmm1, xmm2/mem128, imm8
6367 * pshuflw xmm1, xmm2/mem128, imm8
6368 *
6369 * Proper alignment of the 128-bit operand is enforced.
6370 * Exceptions type 4. SSE2 cpuid checks.
6371 */
6372FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6373{
6374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6375 if (IEM_IS_MODRM_REG_MODE(bRm))
6376 {
6377 /*
6378 * Register, register.
6379 */
6380 IEM_MC_BEGIN(0, 0);
6381 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6383 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6384 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6385 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6386 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6387 IEM_MC_PREPARE_SSE_USAGE();
6388 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6389 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6390 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6391 IEM_MC_ADVANCE_RIP_AND_FINISH();
6392 IEM_MC_END();
6393 }
6394 else
6395 {
6396 /*
6397 * Register, memory.
6398 */
6399 IEM_MC_BEGIN(0, 0);
6400 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6401 IEM_MC_LOCAL(RTUINT128U, uSrc);
6402 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6404
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6406 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6407 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6409 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6410
6411 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6412 IEM_MC_PREPARE_SSE_USAGE();
6413 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6414 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6415
6416 IEM_MC_ADVANCE_RIP_AND_FINISH();
6417 IEM_MC_END();
6418 }
6419}
6420
6421
6422/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6423FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6424{
6425 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6426 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6427}
6428
6429
6430/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6431FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6432{
6433 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6434 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6435}
6436
6437
6438/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6439FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6440{
6441 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6442 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6443}
6444
6445
6446/**
6447 * Common worker for MMX instructions of the form:
6448 * psrlw mm, imm8
6449 * psraw mm, imm8
6450 * psllw mm, imm8
6451 * psrld mm, imm8
6452 * psrad mm, imm8
6453 * pslld mm, imm8
6454 * psrlq mm, imm8
6455 * psllq mm, imm8
6456 *
6457 */
6458FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6459{
6460 if (IEM_IS_MODRM_REG_MODE(bRm))
6461 {
6462 /*
6463 * Register, immediate.
6464 */
6465 IEM_MC_BEGIN(0, 0);
6466 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6468 IEM_MC_ARG(uint64_t *, pDst, 0);
6469 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6470 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6471 IEM_MC_PREPARE_FPU_USAGE();
6472 IEM_MC_FPU_TO_MMX_MODE();
6473
6474 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6475 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6476 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6477
6478 IEM_MC_ADVANCE_RIP_AND_FINISH();
6479 IEM_MC_END();
6480 }
6481 else
6482 {
6483 /*
6484 * Register, memory not supported.
6485 */
6486 /// @todo Caller already enforced register mode?!
6487 AssertFailedReturn(VINF_SUCCESS);
6488 }
6489}
6490
6491
6492#if 0 /*unused*/
6493/**
6494 * Common worker for SSE2 instructions of the form:
6495 * psrlw xmm, imm8
6496 * psraw xmm, imm8
6497 * psllw xmm, imm8
6498 * psrld xmm, imm8
6499 * psrad xmm, imm8
6500 * pslld xmm, imm8
6501 * psrlq xmm, imm8
6502 * psllq xmm, imm8
6503 *
6504 */
6505FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6506{
6507 if (IEM_IS_MODRM_REG_MODE(bRm))
6508 {
6509 /*
6510 * Register, immediate.
6511 */
6512 IEM_MC_BEGIN(0, 0);
6513 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6515 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6516 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6518 IEM_MC_PREPARE_SSE_USAGE();
6519 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6521 IEM_MC_ADVANCE_RIP_AND_FINISH();
6522 IEM_MC_END();
6523 }
6524 else
6525 {
6526 /*
6527 * Register, memory.
6528 */
6529 /// @todo Caller already enforced register mode?!
6530 AssertFailedReturn(VINF_SUCCESS);
6531 }
6532}
6533#endif
6534
6535
6536/**
6537 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6538 */
6539#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6540 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6541 { \
6542 /* \
6543 * Register, immediate. \
6544 */ \
6545 IEM_MC_BEGIN(0, 0); \
6546 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6548 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6549 IEM_MC_PREPARE_SSE_USAGE(); \
6550 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6551 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6552 } IEM_MC_NATIVE_ELSE() { \
6553 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6554 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6555 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6556 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6557 } IEM_MC_NATIVE_ENDIF(); \
6558 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6559 IEM_MC_END(); \
6560 } \
6561 else \
6562 { \
6563 /* \
6564 * Register, memory. \
6565 */ \
6566 AssertFailedReturn(VINF_SUCCESS); \
6567 } (void)0
6568
6569
6570/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6571FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6572{
6573// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6574 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6575}
6576
6577
6578/** Opcode 0x66 0x0f 0x71 11/2. */
6579FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6580{
6581// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6582 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6583}
6584
6585
6586/** Opcode 0x0f 0x71 11/4. */
6587FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6588{
6589// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6590 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6591}
6592
6593
6594/** Opcode 0x66 0x0f 0x71 11/4. */
6595FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6596{
6597// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6598 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6599}
6600
6601
6602/** Opcode 0x0f 0x71 11/6. */
6603FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6604{
6605// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6606 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6607}
6608
6609
6610/** Opcode 0x66 0x0f 0x71 11/6. */
6611FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6612{
6613// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6614 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6615}
6616
6617
6618/**
6619 * Group 12 jump table for register variant.
6620 */
6621IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6622{
6623 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6624 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6625 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6626 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6627 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6628 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6629 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6630 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6631};
6632AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6633
6634
6635/** Opcode 0x0f 0x71. */
6636FNIEMOP_DEF(iemOp_Grp12)
6637{
6638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6639 if (IEM_IS_MODRM_REG_MODE(bRm))
6640 /* register, register */
6641 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6642 + pVCpu->iem.s.idxPrefix], bRm);
6643 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6644}
6645
6646
6647/** Opcode 0x0f 0x72 11/2. */
6648FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6649{
6650// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6651 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6652}
6653
6654
6655/** Opcode 0x66 0x0f 0x72 11/2. */
6656FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6657{
6658// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6659 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6660}
6661
6662
6663/** Opcode 0x0f 0x72 11/4. */
6664FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6665{
6666// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6667 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6668}
6669
6670
6671/** Opcode 0x66 0x0f 0x72 11/4. */
6672FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6673{
6674// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6675 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6676}
6677
6678
6679/** Opcode 0x0f 0x72 11/6. */
6680FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6681{
6682// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6683 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6684}
6685
6686/** Opcode 0x66 0x0f 0x72 11/6. */
6687FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6688{
6689// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6690 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6691}
6692
6693
6694/**
6695 * Group 13 jump table for register variant.
6696 */
6697IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6698{
6699 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6700 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6701 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6702 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6703 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6704 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6705 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6706 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6707};
6708AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6709
6710/** Opcode 0x0f 0x72. */
6711FNIEMOP_DEF(iemOp_Grp13)
6712{
6713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6714 if (IEM_IS_MODRM_REG_MODE(bRm))
6715 /* register, register */
6716 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6717 + pVCpu->iem.s.idxPrefix], bRm);
6718 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6719}
6720
6721
6722/** Opcode 0x0f 0x73 11/2. */
6723FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6724{
6725// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6726 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6727}
6728
6729
6730/** Opcode 0x66 0x0f 0x73 11/2. */
6731FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6732{
6733// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6734 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6735}
6736
6737
6738/** Opcode 0x66 0x0f 0x73 11/3. */
6739FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6740{
6741// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6742 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6743}
6744
6745
6746/** Opcode 0x0f 0x73 11/6. */
6747FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6748{
6749// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6750 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6751}
6752
6753
6754/** Opcode 0x66 0x0f 0x73 11/6. */
6755FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6756{
6757// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6758 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6759}
6760
6761
6762/** Opcode 0x66 0x0f 0x73 11/7. */
6763FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6764{
6765// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6766 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6767}
6768
6769/**
6770 * Group 14 jump table for register variant.
6771 */
6772IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6773{
6774 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6775 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6776 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6777 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6778 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6779 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6780 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6781 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6782};
6783AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6784
6785
6786/** Opcode 0x0f 0x73. */
6787FNIEMOP_DEF(iemOp_Grp14)
6788{
6789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6790 if (IEM_IS_MODRM_REG_MODE(bRm))
6791 /* register, register */
6792 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6793 + pVCpu->iem.s.idxPrefix], bRm);
6794 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6795}
6796
6797
6798/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6799FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6800{
6801 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6802 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6803}
6804
6805
6806/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6807FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6808{
6809 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6810 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6811}
6812
6813
6814/* Opcode 0xf3 0x0f 0x74 - invalid */
6815/* Opcode 0xf2 0x0f 0x74 - invalid */
6816
6817
6818/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6819FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6820{
6821 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6822 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6823}
6824
6825
6826/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6827FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6828{
6829 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6830 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6831}
6832
6833
6834/* Opcode 0xf3 0x0f 0x75 - invalid */
6835/* Opcode 0xf2 0x0f 0x75 - invalid */
6836
6837
6838/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6839FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6840{
6841 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6842 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6843}
6844
6845
6846/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6847FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6848{
6849 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6850 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6851}
6852
6853
6854/* Opcode 0xf3 0x0f 0x76 - invalid */
6855/* Opcode 0xf2 0x0f 0x76 - invalid */
6856
6857
6858/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6859FNIEMOP_DEF(iemOp_emms)
6860{
6861 IEMOP_MNEMONIC(emms, "emms");
6862 IEM_MC_BEGIN(0, 0);
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6866 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6867 IEM_MC_FPU_FROM_MMX_MODE();
6868 IEM_MC_ADVANCE_RIP_AND_FINISH();
6869 IEM_MC_END();
6870}
6871
6872/* Opcode 0x66 0x0f 0x77 - invalid */
6873/* Opcode 0xf3 0x0f 0x77 - invalid */
6874/* Opcode 0xf2 0x0f 0x77 - invalid */
6875
6876/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6877#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6878FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6879{
6880 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6881 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6882 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6883 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6884
6885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6886 if (IEM_IS_MODRM_REG_MODE(bRm))
6887 {
6888 /*
6889 * Register, register.
6890 */
6891 if (enmEffOpSize == IEMMODE_64BIT)
6892 {
6893 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6894 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6895 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6896 IEM_MC_ARG(uint64_t, u64Enc, 1);
6897 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6898 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6899 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6900 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6901 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6902 IEM_MC_END();
6903 }
6904 else
6905 {
6906 IEM_MC_BEGIN(0, 0);
6907 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6908 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6909 IEM_MC_ARG(uint32_t, u32Enc, 1);
6910 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6911 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6912 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6913 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6914 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6915 IEM_MC_END();
6916 }
6917 }
6918 else
6919 {
6920 /*
6921 * Memory, register.
6922 */
6923 if (enmEffOpSize == IEMMODE_64BIT)
6924 {
6925 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6926 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6928 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6929 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6930 IEM_MC_ARG(uint64_t, u64Enc, 2);
6931 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6932 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6933 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6934 IEM_MC_END();
6935 }
6936 else
6937 {
6938 IEM_MC_BEGIN(0, 0);
6939 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6941 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6942 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6943 IEM_MC_ARG(uint32_t, u32Enc, 2);
6944 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6945 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6946 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6947 IEM_MC_END();
6948 }
6949 }
6950}
6951#else
6952FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
6953#endif
6954
6955/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6956FNIEMOP_STUB(iemOp_AmdGrp17);
6957/* Opcode 0xf3 0x0f 0x78 - invalid */
6958/* Opcode 0xf2 0x0f 0x78 - invalid */
6959
6960/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6961#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6962FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6963{
6964 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6965 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6966 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6967 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6968
6969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6970 if (IEM_IS_MODRM_REG_MODE(bRm))
6971 {
6972 /*
6973 * Register, register.
6974 */
6975 if (enmEffOpSize == IEMMODE_64BIT)
6976 {
6977 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6978 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6979 IEM_MC_ARG(uint64_t, u64Val, 0);
6980 IEM_MC_ARG(uint64_t, u64Enc, 1);
6981 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6982 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6983 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
6984 IEM_MC_END();
6985 }
6986 else
6987 {
6988 IEM_MC_BEGIN(0, 0);
6989 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6990 IEM_MC_ARG(uint32_t, u32Val, 0);
6991 IEM_MC_ARG(uint32_t, u32Enc, 1);
6992 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6993 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6994 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
6995 IEM_MC_END();
6996 }
6997 }
6998 else
6999 {
7000 /*
7001 * Register, memory.
7002 */
7003 if (enmEffOpSize == IEMMODE_64BIT)
7004 {
7005 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7006 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7008 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7009 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7010 IEM_MC_ARG(uint64_t, u64Enc, 2);
7011 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7012 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7013 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7014 IEM_MC_END();
7015 }
7016 else
7017 {
7018 IEM_MC_BEGIN(0, 0);
7019 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7021 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7022 IEM_MC_ARG(uint32_t, u32Enc, 2);
7023 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7024 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7025 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7026 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7027 IEM_MC_END();
7028 }
7029 }
7030}
7031#else
7032FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7033#endif
7034/* Opcode 0x66 0x0f 0x79 - invalid */
7035/* Opcode 0xf3 0x0f 0x79 - invalid */
7036/* Opcode 0xf2 0x0f 0x79 - invalid */
7037
7038/* Opcode 0x0f 0x7a - invalid */
7039/* Opcode 0x66 0x0f 0x7a - invalid */
7040/* Opcode 0xf3 0x0f 0x7a - invalid */
7041/* Opcode 0xf2 0x0f 0x7a - invalid */
7042
7043/* Opcode 0x0f 0x7b - invalid */
7044/* Opcode 0x66 0x0f 0x7b - invalid */
7045/* Opcode 0xf3 0x0f 0x7b - invalid */
7046/* Opcode 0xf2 0x0f 0x7b - invalid */
7047
7048/* Opcode 0x0f 0x7c - invalid */
7049
7050
7051/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7052FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7053{
7054 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7055 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7056}
7057
7058
7059/* Opcode 0xf3 0x0f 0x7c - invalid */
7060
7061
7062/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7063FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7064{
7065 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7066 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7067}
7068
7069
7070/* Opcode 0x0f 0x7d - invalid */
7071
7072
7073/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7074FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7075{
7076 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7077 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7078}
7079
7080
7081/* Opcode 0xf3 0x0f 0x7d - invalid */
7082
7083
7084/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7085FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7086{
7087 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7088 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7089}
7090
7091
7092/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7093FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7094{
7095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7096 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7097 {
7098 /**
7099 * @opcode 0x7e
7100 * @opcodesub rex.w=1
7101 * @oppfx none
7102 * @opcpuid mmx
7103 * @opgroup og_mmx_datamove
7104 * @opxcpttype 5
7105 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7106 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7107 */
7108 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7109 if (IEM_IS_MODRM_REG_MODE(bRm))
7110 {
7111 /* greg64, MMX */
7112 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7114 IEM_MC_LOCAL(uint64_t, u64Tmp);
7115
7116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7118 IEM_MC_FPU_TO_MMX_MODE();
7119
7120 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7121 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7122
7123 IEM_MC_ADVANCE_RIP_AND_FINISH();
7124 IEM_MC_END();
7125 }
7126 else
7127 {
7128 /* [mem64], MMX */
7129 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7131 IEM_MC_LOCAL(uint64_t, u64Tmp);
7132
7133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7135 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7136 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7137
7138 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7140 IEM_MC_FPU_TO_MMX_MODE();
7141
7142 IEM_MC_ADVANCE_RIP_AND_FINISH();
7143 IEM_MC_END();
7144 }
7145 }
7146 else
7147 {
7148 /**
7149 * @opdone
7150 * @opcode 0x7e
7151 * @opcodesub rex.w=0
7152 * @oppfx none
7153 * @opcpuid mmx
7154 * @opgroup og_mmx_datamove
7155 * @opxcpttype 5
7156 * @opfunction iemOp_movd_q_Pd_Ey
7157 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7158 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7159 */
7160 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7161 if (IEM_IS_MODRM_REG_MODE(bRm))
7162 {
7163 /* greg32, MMX */
7164 IEM_MC_BEGIN(0, 0);
7165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7166 IEM_MC_LOCAL(uint32_t, u32Tmp);
7167
7168 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7169 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7170 IEM_MC_FPU_TO_MMX_MODE();
7171
7172 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7173 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7174
7175 IEM_MC_ADVANCE_RIP_AND_FINISH();
7176 IEM_MC_END();
7177 }
7178 else
7179 {
7180 /* [mem32], MMX */
7181 IEM_MC_BEGIN(0, 0);
7182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7183 IEM_MC_LOCAL(uint32_t, u32Tmp);
7184
7185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7188 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7189
7190 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7191 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7192 IEM_MC_FPU_TO_MMX_MODE();
7193
7194 IEM_MC_ADVANCE_RIP_AND_FINISH();
7195 IEM_MC_END();
7196 }
7197 }
7198}
7199
7200
7201FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7202{
7203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7204 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7205 {
7206 /**
7207 * @opcode 0x7e
7208 * @opcodesub rex.w=1
7209 * @oppfx 0x66
7210 * @opcpuid sse2
7211 * @opgroup og_sse2_simdint_datamove
7212 * @opxcpttype 5
7213 * @optest 64-bit / op1=1 op2=2 -> op1=2
7214 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7215 */
7216 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7217 if (IEM_IS_MODRM_REG_MODE(bRm))
7218 {
7219 /* greg64, XMM */
7220 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7222 IEM_MC_LOCAL(uint64_t, u64Tmp);
7223
7224 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7226
7227 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7228 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7229
7230 IEM_MC_ADVANCE_RIP_AND_FINISH();
7231 IEM_MC_END();
7232 }
7233 else
7234 {
7235 /* [mem64], XMM */
7236 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7238 IEM_MC_LOCAL(uint64_t, u64Tmp);
7239
7240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7242 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7243 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7244
7245 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7246 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7247
7248 IEM_MC_ADVANCE_RIP_AND_FINISH();
7249 IEM_MC_END();
7250 }
7251 }
7252 else
7253 {
7254 /**
7255 * @opdone
7256 * @opcode 0x7e
7257 * @opcodesub rex.w=0
7258 * @oppfx 0x66
7259 * @opcpuid sse2
7260 * @opgroup og_sse2_simdint_datamove
7261 * @opxcpttype 5
7262 * @opfunction iemOp_movd_q_Vy_Ey
7263 * @optest op1=1 op2=2 -> op1=2
7264 * @optest op1=0 op2=-42 -> op1=-42
7265 */
7266 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7267 if (IEM_IS_MODRM_REG_MODE(bRm))
7268 {
7269 /* greg32, XMM */
7270 IEM_MC_BEGIN(0, 0);
7271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7272 IEM_MC_LOCAL(uint32_t, u32Tmp);
7273
7274 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7276
7277 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7278 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7279
7280 IEM_MC_ADVANCE_RIP_AND_FINISH();
7281 IEM_MC_END();
7282 }
7283 else
7284 {
7285 /* [mem32], XMM */
7286 IEM_MC_BEGIN(0, 0);
7287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7288 IEM_MC_LOCAL(uint32_t, u32Tmp);
7289
7290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7294
7295 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7296 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7297
7298 IEM_MC_ADVANCE_RIP_AND_FINISH();
7299 IEM_MC_END();
7300 }
7301 }
7302}
7303
7304/**
7305 * @opcode 0x7e
7306 * @oppfx 0xf3
7307 * @opcpuid sse2
7308 * @opgroup og_sse2_pcksclr_datamove
7309 * @opxcpttype none
7310 * @optest op1=1 op2=2 -> op1=2
7311 * @optest op1=0 op2=-42 -> op1=-42
7312 */
7313FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7314{
7315 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7317 if (IEM_IS_MODRM_REG_MODE(bRm))
7318 {
7319 /*
7320 * XMM128, XMM64.
7321 */
7322 IEM_MC_BEGIN(0, 0);
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7324 IEM_MC_LOCAL(uint64_t, uSrc);
7325
7326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7328
7329 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7330 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7331
7332 IEM_MC_ADVANCE_RIP_AND_FINISH();
7333 IEM_MC_END();
7334 }
7335 else
7336 {
7337 /*
7338 * XMM128, [mem64].
7339 */
7340 IEM_MC_BEGIN(0, 0);
7341 IEM_MC_LOCAL(uint64_t, uSrc);
7342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7343
7344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7348
7349 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7350 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7351
7352 IEM_MC_ADVANCE_RIP_AND_FINISH();
7353 IEM_MC_END();
7354 }
7355}
7356
7357/* Opcode 0xf2 0x0f 0x7e - invalid */
7358
7359
7360/** Opcode 0x0f 0x7f - movq Qq, Pq */
7361FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7362{
7363 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7365 if (IEM_IS_MODRM_REG_MODE(bRm))
7366 {
7367 /*
7368 * MMX, MMX.
7369 */
7370 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7371 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7372 IEM_MC_BEGIN(0, 0);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7374 IEM_MC_LOCAL(uint64_t, u64Tmp);
7375 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7376 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7377 IEM_MC_FPU_TO_MMX_MODE();
7378
7379 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7380 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7381
7382 IEM_MC_ADVANCE_RIP_AND_FINISH();
7383 IEM_MC_END();
7384 }
7385 else
7386 {
7387 /*
7388 * [mem64], MMX.
7389 */
7390 IEM_MC_BEGIN(0, 0);
7391 IEM_MC_LOCAL(uint64_t, u64Tmp);
7392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7393
7394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7396 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7397 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7398
7399 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7400 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7401 IEM_MC_FPU_TO_MMX_MODE();
7402
7403 IEM_MC_ADVANCE_RIP_AND_FINISH();
7404 IEM_MC_END();
7405 }
7406}
7407
7408/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7409FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7410{
7411 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7413 if (IEM_IS_MODRM_REG_MODE(bRm))
7414 {
7415 /*
7416 * XMM, XMM.
7417 */
7418 IEM_MC_BEGIN(0, 0);
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7420 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7421 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7422 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7423 IEM_GET_MODRM_REG(pVCpu, bRm));
7424 IEM_MC_ADVANCE_RIP_AND_FINISH();
7425 IEM_MC_END();
7426 }
7427 else
7428 {
7429 /*
7430 * [mem128], XMM.
7431 */
7432 IEM_MC_BEGIN(0, 0);
7433 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7435
7436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7438 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7439 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7440
7441 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7442 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7443
7444 IEM_MC_ADVANCE_RIP_AND_FINISH();
7445 IEM_MC_END();
7446 }
7447}
7448
7449/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7450FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7451{
7452 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7454 if (IEM_IS_MODRM_REG_MODE(bRm))
7455 {
7456 /*
7457 * XMM, XMM.
7458 */
7459 IEM_MC_BEGIN(0, 0);
7460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7461 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7463 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7464 IEM_GET_MODRM_REG(pVCpu, bRm));
7465 IEM_MC_ADVANCE_RIP_AND_FINISH();
7466 IEM_MC_END();
7467 }
7468 else
7469 {
7470 /*
7471 * [mem128], XMM.
7472 */
7473 IEM_MC_BEGIN(0, 0);
7474 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7476
7477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7481
7482 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7483 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7484
7485 IEM_MC_ADVANCE_RIP_AND_FINISH();
7486 IEM_MC_END();
7487 }
7488}
7489
7490/* Opcode 0xf2 0x0f 0x7f - invalid */
7491
7492
7493/**
7494 * @opcode 0x80
7495 * @opfltest of
7496 */
7497FNIEMOP_DEF(iemOp_jo_Jv)
7498{
7499 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7500 IEMOP_HLP_MIN_386();
7501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7502 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7503 {
7504 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7505 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7508 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7509 } IEM_MC_ELSE() {
7510 IEM_MC_ADVANCE_RIP_AND_FINISH();
7511 } IEM_MC_ENDIF();
7512 IEM_MC_END();
7513 }
7514 else
7515 {
7516 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7517 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7520 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7521 } IEM_MC_ELSE() {
7522 IEM_MC_ADVANCE_RIP_AND_FINISH();
7523 } IEM_MC_ENDIF();
7524 IEM_MC_END();
7525 }
7526}
7527
7528
7529/**
7530 * @opcode 0x81
7531 * @opfltest of
7532 */
7533FNIEMOP_DEF(iemOp_jno_Jv)
7534{
7535 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7536 IEMOP_HLP_MIN_386();
7537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7538 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7539 {
7540 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7541 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7544 IEM_MC_ADVANCE_RIP_AND_FINISH();
7545 } IEM_MC_ELSE() {
7546 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7547 } IEM_MC_ENDIF();
7548 IEM_MC_END();
7549 }
7550 else
7551 {
7552 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7553 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7556 IEM_MC_ADVANCE_RIP_AND_FINISH();
7557 } IEM_MC_ELSE() {
7558 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7559 } IEM_MC_ENDIF();
7560 IEM_MC_END();
7561 }
7562}
7563
7564
7565/**
7566 * @opcode 0x82
7567 * @opfltest cf
7568 */
7569FNIEMOP_DEF(iemOp_jc_Jv)
7570{
7571 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7572 IEMOP_HLP_MIN_386();
7573 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7574 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7575 {
7576 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7577 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7580 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7581 } IEM_MC_ELSE() {
7582 IEM_MC_ADVANCE_RIP_AND_FINISH();
7583 } IEM_MC_ENDIF();
7584 IEM_MC_END();
7585 }
7586 else
7587 {
7588 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7589 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7592 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7593 } IEM_MC_ELSE() {
7594 IEM_MC_ADVANCE_RIP_AND_FINISH();
7595 } IEM_MC_ENDIF();
7596 IEM_MC_END();
7597 }
7598}
7599
7600
7601/**
7602 * @opcode 0x83
7603 * @opfltest cf
7604 */
7605FNIEMOP_DEF(iemOp_jnc_Jv)
7606{
7607 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7608 IEMOP_HLP_MIN_386();
7609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7610 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7611 {
7612 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7613 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 } IEM_MC_ELSE() {
7618 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7619 } IEM_MC_ENDIF();
7620 IEM_MC_END();
7621 }
7622 else
7623 {
7624 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7625 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7628 IEM_MC_ADVANCE_RIP_AND_FINISH();
7629 } IEM_MC_ELSE() {
7630 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7631 } IEM_MC_ENDIF();
7632 IEM_MC_END();
7633 }
7634}
7635
7636
7637/**
7638 * @opcode 0x84
7639 * @opfltest zf
7640 */
7641FNIEMOP_DEF(iemOp_je_Jv)
7642{
7643 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7644 IEMOP_HLP_MIN_386();
7645 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7647 {
7648 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7649 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7652 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7653 } IEM_MC_ELSE() {
7654 IEM_MC_ADVANCE_RIP_AND_FINISH();
7655 } IEM_MC_ENDIF();
7656 IEM_MC_END();
7657 }
7658 else
7659 {
7660 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7661 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7664 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7665 } IEM_MC_ELSE() {
7666 IEM_MC_ADVANCE_RIP_AND_FINISH();
7667 } IEM_MC_ENDIF();
7668 IEM_MC_END();
7669 }
7670}
7671
7672
7673/**
7674 * @opcode 0x85
7675 * @opfltest zf
7676 */
7677FNIEMOP_DEF(iemOp_jne_Jv)
7678{
7679 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7680 IEMOP_HLP_MIN_386();
7681 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7682 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7683 {
7684 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7685 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7688 IEM_MC_ADVANCE_RIP_AND_FINISH();
7689 } IEM_MC_ELSE() {
7690 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7691 } IEM_MC_ENDIF();
7692 IEM_MC_END();
7693 }
7694 else
7695 {
7696 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7697 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7700 IEM_MC_ADVANCE_RIP_AND_FINISH();
7701 } IEM_MC_ELSE() {
7702 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7703 } IEM_MC_ENDIF();
7704 IEM_MC_END();
7705 }
7706}
7707
7708
7709/**
7710 * @opcode 0x86
7711 * @opfltest cf,zf
7712 */
7713FNIEMOP_DEF(iemOp_jbe_Jv)
7714{
7715 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7716 IEMOP_HLP_MIN_386();
7717 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7718 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7719 {
7720 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7721 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7723 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7724 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7725 } IEM_MC_ELSE() {
7726 IEM_MC_ADVANCE_RIP_AND_FINISH();
7727 } IEM_MC_ENDIF();
7728 IEM_MC_END();
7729 }
7730 else
7731 {
7732 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7733 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7735 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7736 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7737 } IEM_MC_ELSE() {
7738 IEM_MC_ADVANCE_RIP_AND_FINISH();
7739 } IEM_MC_ENDIF();
7740 IEM_MC_END();
7741 }
7742}
7743
7744
7745/**
7746 * @opcode 0x87
7747 * @opfltest cf,zf
7748 */
7749FNIEMOP_DEF(iemOp_jnbe_Jv)
7750{
7751 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7752 IEMOP_HLP_MIN_386();
7753 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7754 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7755 {
7756 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7757 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7760 IEM_MC_ADVANCE_RIP_AND_FINISH();
7761 } IEM_MC_ELSE() {
7762 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7763 } IEM_MC_ENDIF();
7764 IEM_MC_END();
7765 }
7766 else
7767 {
7768 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7769 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7772 IEM_MC_ADVANCE_RIP_AND_FINISH();
7773 } IEM_MC_ELSE() {
7774 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7775 } IEM_MC_ENDIF();
7776 IEM_MC_END();
7777 }
7778}
7779
7780
7781/**
7782 * @opcode 0x88
7783 * @opfltest sf
7784 */
7785FNIEMOP_DEF(iemOp_js_Jv)
7786{
7787 IEMOP_MNEMONIC(js_Jv, "js Jv");
7788 IEMOP_HLP_MIN_386();
7789 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7790 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7791 {
7792 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7793 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7796 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7797 } IEM_MC_ELSE() {
7798 IEM_MC_ADVANCE_RIP_AND_FINISH();
7799 } IEM_MC_ENDIF();
7800 IEM_MC_END();
7801 }
7802 else
7803 {
7804 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7805 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7808 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7809 } IEM_MC_ELSE() {
7810 IEM_MC_ADVANCE_RIP_AND_FINISH();
7811 } IEM_MC_ENDIF();
7812 IEM_MC_END();
7813 }
7814}
7815
7816
7817/**
7818 * @opcode 0x89
7819 * @opfltest sf
7820 */
7821FNIEMOP_DEF(iemOp_jns_Jv)
7822{
7823 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7824 IEMOP_HLP_MIN_386();
7825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7826 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7827 {
7828 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7829 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7832 IEM_MC_ADVANCE_RIP_AND_FINISH();
7833 } IEM_MC_ELSE() {
7834 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7835 } IEM_MC_ENDIF();
7836 IEM_MC_END();
7837 }
7838 else
7839 {
7840 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7841 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7844 IEM_MC_ADVANCE_RIP_AND_FINISH();
7845 } IEM_MC_ELSE() {
7846 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7847 } IEM_MC_ENDIF();
7848 IEM_MC_END();
7849 }
7850}
7851
7852
7853/**
7854 * @opcode 0x8a
7855 * @opfltest pf
7856 */
7857FNIEMOP_DEF(iemOp_jp_Jv)
7858{
7859 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7860 IEMOP_HLP_MIN_386();
7861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7862 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7863 {
7864 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7865 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7868 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7869 } IEM_MC_ELSE() {
7870 IEM_MC_ADVANCE_RIP_AND_FINISH();
7871 } IEM_MC_ENDIF();
7872 IEM_MC_END();
7873 }
7874 else
7875 {
7876 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7877 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7880 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7881 } IEM_MC_ELSE() {
7882 IEM_MC_ADVANCE_RIP_AND_FINISH();
7883 } IEM_MC_ENDIF();
7884 IEM_MC_END();
7885 }
7886}
7887
7888
7889/**
7890 * @opcode 0x8b
7891 * @opfltest pf
7892 */
7893FNIEMOP_DEF(iemOp_jnp_Jv)
7894{
7895 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7896 IEMOP_HLP_MIN_386();
7897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7899 {
7900 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7901 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7904 IEM_MC_ADVANCE_RIP_AND_FINISH();
7905 } IEM_MC_ELSE() {
7906 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7907 } IEM_MC_ENDIF();
7908 IEM_MC_END();
7909 }
7910 else
7911 {
7912 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7913 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7916 IEM_MC_ADVANCE_RIP_AND_FINISH();
7917 } IEM_MC_ELSE() {
7918 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7919 } IEM_MC_ENDIF();
7920 IEM_MC_END();
7921 }
7922}
7923
7924
7925/**
7926 * @opcode 0x8c
7927 * @opfltest sf,of
7928 */
7929FNIEMOP_DEF(iemOp_jl_Jv)
7930{
7931 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7932 IEMOP_HLP_MIN_386();
7933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7934 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7935 {
7936 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7937 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7939 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7940 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7941 } IEM_MC_ELSE() {
7942 IEM_MC_ADVANCE_RIP_AND_FINISH();
7943 } IEM_MC_ENDIF();
7944 IEM_MC_END();
7945 }
7946 else
7947 {
7948 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7949 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7952 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7953 } IEM_MC_ELSE() {
7954 IEM_MC_ADVANCE_RIP_AND_FINISH();
7955 } IEM_MC_ENDIF();
7956 IEM_MC_END();
7957 }
7958}
7959
7960
7961/**
7962 * @opcode 0x8d
7963 * @opfltest sf,of
7964 */
7965FNIEMOP_DEF(iemOp_jnl_Jv)
7966{
7967 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7968 IEMOP_HLP_MIN_386();
7969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7970 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7971 {
7972 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7973 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7975 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7976 IEM_MC_ADVANCE_RIP_AND_FINISH();
7977 } IEM_MC_ELSE() {
7978 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7979 } IEM_MC_ENDIF();
7980 IEM_MC_END();
7981 }
7982 else
7983 {
7984 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7985 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7987 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7988 IEM_MC_ADVANCE_RIP_AND_FINISH();
7989 } IEM_MC_ELSE() {
7990 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7991 } IEM_MC_ENDIF();
7992 IEM_MC_END();
7993 }
7994}
7995
7996
7997/**
7998 * @opcode 0x8e
7999 * @opfltest zf,sf,of
8000 */
8001FNIEMOP_DEF(iemOp_jle_Jv)
8002{
8003 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8004 IEMOP_HLP_MIN_386();
8005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8006 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8007 {
8008 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8009 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8012 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8013 } IEM_MC_ELSE() {
8014 IEM_MC_ADVANCE_RIP_AND_FINISH();
8015 } IEM_MC_ENDIF();
8016 IEM_MC_END();
8017 }
8018 else
8019 {
8020 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8021 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8023 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8024 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8025 } IEM_MC_ELSE() {
8026 IEM_MC_ADVANCE_RIP_AND_FINISH();
8027 } IEM_MC_ENDIF();
8028 IEM_MC_END();
8029 }
8030}
8031
8032
8033/**
8034 * @opcode 0x8f
8035 * @opfltest zf,sf,of
8036 */
8037FNIEMOP_DEF(iemOp_jnle_Jv)
8038{
8039 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8040 IEMOP_HLP_MIN_386();
8041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8043 {
8044 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8045 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8047 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8048 IEM_MC_ADVANCE_RIP_AND_FINISH();
8049 } IEM_MC_ELSE() {
8050 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8051 } IEM_MC_ENDIF();
8052 IEM_MC_END();
8053 }
8054 else
8055 {
8056 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8057 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8060 IEM_MC_ADVANCE_RIP_AND_FINISH();
8061 } IEM_MC_ELSE() {
8062 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8063 } IEM_MC_ENDIF();
8064 IEM_MC_END();
8065 }
8066}
8067
8068
8069/**
8070 * @opcode 0x90
8071 * @opfltest of
8072 */
8073FNIEMOP_DEF(iemOp_seto_Eb)
8074{
8075 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8076 IEMOP_HLP_MIN_386();
8077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8078
8079 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8080 * any way. AMD says it's "unused", whatever that means. We're
8081 * ignoring for now. */
8082 if (IEM_IS_MODRM_REG_MODE(bRm))
8083 {
8084 /* register target */
8085 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8088 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8089 } IEM_MC_ELSE() {
8090 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8091 } IEM_MC_ENDIF();
8092 IEM_MC_ADVANCE_RIP_AND_FINISH();
8093 IEM_MC_END();
8094 }
8095 else
8096 {
8097 /* memory target */
8098 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8102 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8103 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8104 } IEM_MC_ELSE() {
8105 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8106 } IEM_MC_ENDIF();
8107 IEM_MC_ADVANCE_RIP_AND_FINISH();
8108 IEM_MC_END();
8109 }
8110}
8111
8112
8113/**
8114 * @opcode 0x91
8115 * @opfltest of
8116 */
8117FNIEMOP_DEF(iemOp_setno_Eb)
8118{
8119 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8120 IEMOP_HLP_MIN_386();
8121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8122
8123 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8124 * any way. AMD says it's "unused", whatever that means. We're
8125 * ignoring for now. */
8126 if (IEM_IS_MODRM_REG_MODE(bRm))
8127 {
8128 /* register target */
8129 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8132 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8133 } IEM_MC_ELSE() {
8134 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8135 } IEM_MC_ENDIF();
8136 IEM_MC_ADVANCE_RIP_AND_FINISH();
8137 IEM_MC_END();
8138 }
8139 else
8140 {
8141 /* memory target */
8142 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8146 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8148 } IEM_MC_ELSE() {
8149 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8150 } IEM_MC_ENDIF();
8151 IEM_MC_ADVANCE_RIP_AND_FINISH();
8152 IEM_MC_END();
8153 }
8154}
8155
8156
8157/**
8158 * @opcode 0x92
8159 * @opfltest cf
8160 */
8161FNIEMOP_DEF(iemOp_setc_Eb)
8162{
8163 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8164 IEMOP_HLP_MIN_386();
8165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8166
8167 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8168 * any way. AMD says it's "unused", whatever that means. We're
8169 * ignoring for now. */
8170 if (IEM_IS_MODRM_REG_MODE(bRm))
8171 {
8172 /* register target */
8173 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8176 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8177 } IEM_MC_ELSE() {
8178 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8179 } IEM_MC_ENDIF();
8180 IEM_MC_ADVANCE_RIP_AND_FINISH();
8181 IEM_MC_END();
8182 }
8183 else
8184 {
8185 /* memory target */
8186 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8192 } IEM_MC_ELSE() {
8193 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8194 } IEM_MC_ENDIF();
8195 IEM_MC_ADVANCE_RIP_AND_FINISH();
8196 IEM_MC_END();
8197 }
8198}
8199
8200
8201/**
8202 * @opcode 0x93
8203 * @opfltest cf
8204 */
8205FNIEMOP_DEF(iemOp_setnc_Eb)
8206{
8207 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8208 IEMOP_HLP_MIN_386();
8209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8210
8211 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8212 * any way. AMD says it's "unused", whatever that means. We're
8213 * ignoring for now. */
8214 if (IEM_IS_MODRM_REG_MODE(bRm))
8215 {
8216 /* register target */
8217 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8220 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8221 } IEM_MC_ELSE() {
8222 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8223 } IEM_MC_ENDIF();
8224 IEM_MC_ADVANCE_RIP_AND_FINISH();
8225 IEM_MC_END();
8226 }
8227 else
8228 {
8229 /* memory target */
8230 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8234 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8235 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8236 } IEM_MC_ELSE() {
8237 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8238 } IEM_MC_ENDIF();
8239 IEM_MC_ADVANCE_RIP_AND_FINISH();
8240 IEM_MC_END();
8241 }
8242}
8243
8244
8245/**
8246 * @opcode 0x94
8247 * @opfltest zf
8248 */
8249FNIEMOP_DEF(iemOp_sete_Eb)
8250{
8251 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8252 IEMOP_HLP_MIN_386();
8253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8254
8255 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8256 * any way. AMD says it's "unused", whatever that means. We're
8257 * ignoring for now. */
8258 if (IEM_IS_MODRM_REG_MODE(bRm))
8259 {
8260 /* register target */
8261 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8264 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8265 } IEM_MC_ELSE() {
8266 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8267 } IEM_MC_ENDIF();
8268 IEM_MC_ADVANCE_RIP_AND_FINISH();
8269 IEM_MC_END();
8270 }
8271 else
8272 {
8273 /* memory target */
8274 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8279 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8280 } IEM_MC_ELSE() {
8281 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8282 } IEM_MC_ENDIF();
8283 IEM_MC_ADVANCE_RIP_AND_FINISH();
8284 IEM_MC_END();
8285 }
8286}
8287
8288
8289/**
8290 * @opcode 0x95
8291 * @opfltest zf
8292 */
8293FNIEMOP_DEF(iemOp_setne_Eb)
8294{
8295 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8296 IEMOP_HLP_MIN_386();
8297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8298
8299 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8300 * any way. AMD says it's "unused", whatever that means. We're
8301 * ignoring for now. */
8302 if (IEM_IS_MODRM_REG_MODE(bRm))
8303 {
8304 /* register target */
8305 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8308 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8309 } IEM_MC_ELSE() {
8310 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8311 } IEM_MC_ENDIF();
8312 IEM_MC_ADVANCE_RIP_AND_FINISH();
8313 IEM_MC_END();
8314 }
8315 else
8316 {
8317 /* memory target */
8318 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8324 } IEM_MC_ELSE() {
8325 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8326 } IEM_MC_ENDIF();
8327 IEM_MC_ADVANCE_RIP_AND_FINISH();
8328 IEM_MC_END();
8329 }
8330}
8331
8332
8333/**
8334 * @opcode 0x96
8335 * @opfltest cf,zf
8336 */
8337FNIEMOP_DEF(iemOp_setbe_Eb)
8338{
8339 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8340 IEMOP_HLP_MIN_386();
8341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8342
8343 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8344 * any way. AMD says it's "unused", whatever that means. We're
8345 * ignoring for now. */
8346 if (IEM_IS_MODRM_REG_MODE(bRm))
8347 {
8348 /* register target */
8349 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8352 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8353 } IEM_MC_ELSE() {
8354 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8355 } IEM_MC_ENDIF();
8356 IEM_MC_ADVANCE_RIP_AND_FINISH();
8357 IEM_MC_END();
8358 }
8359 else
8360 {
8361 /* memory target */
8362 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8366 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8367 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8368 } IEM_MC_ELSE() {
8369 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8370 } IEM_MC_ENDIF();
8371 IEM_MC_ADVANCE_RIP_AND_FINISH();
8372 IEM_MC_END();
8373 }
8374}
8375
8376
8377/**
8378 * @opcode 0x97
8379 * @opfltest cf,zf
8380 */
8381FNIEMOP_DEF(iemOp_setnbe_Eb)
8382{
8383 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8384 IEMOP_HLP_MIN_386();
8385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8386
8387 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8388 * any way. AMD says it's "unused", whatever that means. We're
8389 * ignoring for now. */
8390 if (IEM_IS_MODRM_REG_MODE(bRm))
8391 {
8392 /* register target */
8393 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8395 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8396 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8397 } IEM_MC_ELSE() {
8398 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8399 } IEM_MC_ENDIF();
8400 IEM_MC_ADVANCE_RIP_AND_FINISH();
8401 IEM_MC_END();
8402 }
8403 else
8404 {
8405 /* memory target */
8406 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8410 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8412 } IEM_MC_ELSE() {
8413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8414 } IEM_MC_ENDIF();
8415 IEM_MC_ADVANCE_RIP_AND_FINISH();
8416 IEM_MC_END();
8417 }
8418}
8419
8420
8421/**
8422 * @opcode 0x98
8423 * @opfltest sf
8424 */
8425FNIEMOP_DEF(iemOp_sets_Eb)
8426{
8427 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8428 IEMOP_HLP_MIN_386();
8429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8430
8431 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8432 * any way. AMD says it's "unused", whatever that means. We're
8433 * ignoring for now. */
8434 if (IEM_IS_MODRM_REG_MODE(bRm))
8435 {
8436 /* register target */
8437 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8440 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8441 } IEM_MC_ELSE() {
8442 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8443 } IEM_MC_ENDIF();
8444 IEM_MC_ADVANCE_RIP_AND_FINISH();
8445 IEM_MC_END();
8446 }
8447 else
8448 {
8449 /* memory target */
8450 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8456 } IEM_MC_ELSE() {
8457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8458 } IEM_MC_ENDIF();
8459 IEM_MC_ADVANCE_RIP_AND_FINISH();
8460 IEM_MC_END();
8461 }
8462}
8463
8464
8465/**
8466 * @opcode 0x99
8467 * @opfltest sf
8468 */
8469FNIEMOP_DEF(iemOp_setns_Eb)
8470{
8471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8472 IEMOP_HLP_MIN_386();
8473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8474
8475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8476 * any way. AMD says it's "unused", whatever that means. We're
8477 * ignoring for now. */
8478 if (IEM_IS_MODRM_REG_MODE(bRm))
8479 {
8480 /* register target */
8481 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8484 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8485 } IEM_MC_ELSE() {
8486 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8487 } IEM_MC_ENDIF();
8488 IEM_MC_ADVANCE_RIP_AND_FINISH();
8489 IEM_MC_END();
8490 }
8491 else
8492 {
8493 /* memory target */
8494 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8500 } IEM_MC_ELSE() {
8501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8502 } IEM_MC_ENDIF();
8503 IEM_MC_ADVANCE_RIP_AND_FINISH();
8504 IEM_MC_END();
8505 }
8506}
8507
8508
8509/**
8510 * @opcode 0x9a
8511 * @opfltest pf
8512 */
8513FNIEMOP_DEF(iemOp_setp_Eb)
8514{
8515 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8516 IEMOP_HLP_MIN_386();
8517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8518
8519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8520 * any way. AMD says it's "unused", whatever that means. We're
8521 * ignoring for now. */
8522 if (IEM_IS_MODRM_REG_MODE(bRm))
8523 {
8524 /* register target */
8525 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP_AND_FINISH();
8533 IEM_MC_END();
8534 }
8535 else
8536 {
8537 /* memory target */
8538 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8544 } IEM_MC_ELSE() {
8545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8546 } IEM_MC_ENDIF();
8547 IEM_MC_ADVANCE_RIP_AND_FINISH();
8548 IEM_MC_END();
8549 }
8550}
8551
8552
8553/**
8554 * @opcode 0x9b
8555 * @opfltest pf
8556 */
8557FNIEMOP_DEF(iemOp_setnp_Eb)
8558{
8559 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8560 IEMOP_HLP_MIN_386();
8561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8562
8563 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8564 * any way. AMD says it's "unused", whatever that means. We're
8565 * ignoring for now. */
8566 if (IEM_IS_MODRM_REG_MODE(bRm))
8567 {
8568 /* register target */
8569 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8572 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8573 } IEM_MC_ELSE() {
8574 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8575 } IEM_MC_ENDIF();
8576 IEM_MC_ADVANCE_RIP_AND_FINISH();
8577 IEM_MC_END();
8578 }
8579 else
8580 {
8581 /* memory target */
8582 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8588 } IEM_MC_ELSE() {
8589 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8590 } IEM_MC_ENDIF();
8591 IEM_MC_ADVANCE_RIP_AND_FINISH();
8592 IEM_MC_END();
8593 }
8594}
8595
8596
8597/**
8598 * @opcode 0x9c
8599 * @opfltest sf,of
8600 */
8601FNIEMOP_DEF(iemOp_setl_Eb)
8602{
8603 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8604 IEMOP_HLP_MIN_386();
8605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8606
8607 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8608 * any way. AMD says it's "unused", whatever that means. We're
8609 * ignoring for now. */
8610 if (IEM_IS_MODRM_REG_MODE(bRm))
8611 {
8612 /* register target */
8613 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8615 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8616 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8617 } IEM_MC_ELSE() {
8618 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8619 } IEM_MC_ENDIF();
8620 IEM_MC_ADVANCE_RIP_AND_FINISH();
8621 IEM_MC_END();
8622 }
8623 else
8624 {
8625 /* memory target */
8626 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8630 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8632 } IEM_MC_ELSE() {
8633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8634 } IEM_MC_ENDIF();
8635 IEM_MC_ADVANCE_RIP_AND_FINISH();
8636 IEM_MC_END();
8637 }
8638}
8639
8640
8641/**
8642 * @opcode 0x9d
8643 * @opfltest sf,of
8644 */
8645FNIEMOP_DEF(iemOp_setnl_Eb)
8646{
8647 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8648 IEMOP_HLP_MIN_386();
8649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8650
8651 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8652 * any way. AMD says it's "unused", whatever that means. We're
8653 * ignoring for now. */
8654 if (IEM_IS_MODRM_REG_MODE(bRm))
8655 {
8656 /* register target */
8657 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8659 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8660 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8661 } IEM_MC_ELSE() {
8662 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8663 } IEM_MC_ENDIF();
8664 IEM_MC_ADVANCE_RIP_AND_FINISH();
8665 IEM_MC_END();
8666 }
8667 else
8668 {
8669 /* memory target */
8670 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8674 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8676 } IEM_MC_ELSE() {
8677 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8678 } IEM_MC_ENDIF();
8679 IEM_MC_ADVANCE_RIP_AND_FINISH();
8680 IEM_MC_END();
8681 }
8682}
8683
8684
8685/**
8686 * @opcode 0x9e
8687 * @opfltest zf,sf,of
8688 */
8689FNIEMOP_DEF(iemOp_setle_Eb)
8690{
8691 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8692 IEMOP_HLP_MIN_386();
8693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8694
8695 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8696 * any way. AMD says it's "unused", whatever that means. We're
8697 * ignoring for now. */
8698 if (IEM_IS_MODRM_REG_MODE(bRm))
8699 {
8700 /* register target */
8701 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8703 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8704 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8705 } IEM_MC_ELSE() {
8706 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8707 } IEM_MC_ENDIF();
8708 IEM_MC_ADVANCE_RIP_AND_FINISH();
8709 IEM_MC_END();
8710 }
8711 else
8712 {
8713 /* memory target */
8714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8718 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8719 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8720 } IEM_MC_ELSE() {
8721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8722 } IEM_MC_ENDIF();
8723 IEM_MC_ADVANCE_RIP_AND_FINISH();
8724 IEM_MC_END();
8725 }
8726}
8727
8728
8729/**
8730 * @opcode 0x9f
8731 * @opfltest zf,sf,of
8732 */
8733FNIEMOP_DEF(iemOp_setnle_Eb)
8734{
8735 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8736 IEMOP_HLP_MIN_386();
8737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8738
8739 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8740 * any way. AMD says it's "unused", whatever that means. We're
8741 * ignoring for now. */
8742 if (IEM_IS_MODRM_REG_MODE(bRm))
8743 {
8744 /* register target */
8745 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8747 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8748 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8749 } IEM_MC_ELSE() {
8750 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8751 } IEM_MC_ENDIF();
8752 IEM_MC_ADVANCE_RIP_AND_FINISH();
8753 IEM_MC_END();
8754 }
8755 else
8756 {
8757 /* memory target */
8758 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8763 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8764 } IEM_MC_ELSE() {
8765 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8766 } IEM_MC_ENDIF();
8767 IEM_MC_ADVANCE_RIP_AND_FINISH();
8768 IEM_MC_END();
8769 }
8770}
8771
8772
8773/** Opcode 0x0f 0xa0. */
8774FNIEMOP_DEF(iemOp_push_fs)
8775{
8776 IEMOP_MNEMONIC(push_fs, "push fs");
8777 IEMOP_HLP_MIN_386();
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8780}
8781
8782
8783/** Opcode 0x0f 0xa1. */
8784FNIEMOP_DEF(iemOp_pop_fs)
8785{
8786 IEMOP_MNEMONIC(pop_fs, "pop fs");
8787 IEMOP_HLP_MIN_386();
8788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8789 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8790 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8792 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8793 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8794 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8795 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8796 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8797}
8798
8799
8800/** Opcode 0x0f 0xa2. */
8801FNIEMOP_DEF(iemOp_cpuid)
8802{
8803 IEMOP_MNEMONIC(cpuid, "cpuid");
8804 IEMOP_HLP_MIN_486(); /* not all 486es. */
8805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8806 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8809 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8811 iemCImpl_cpuid);
8812}
8813
8814
8815/**
8816 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8817 * iemOp_bts_Ev_Gv.
8818 */
8819
8820#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8822 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8823 \
8824 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8825 { \
8826 /* register destination. */ \
8827 switch (pVCpu->iem.s.enmEffOpSize) \
8828 { \
8829 case IEMMODE_16BIT: \
8830 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8832 \
8833 IEM_MC_ARG(uint16_t, u16Src, 2); \
8834 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8835 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8836 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8838 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8839 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8840 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8841 \
8842 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8843 IEM_MC_END(); \
8844 break; \
8845 \
8846 case IEMMODE_32BIT: \
8847 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8849 \
8850 IEM_MC_ARG(uint32_t, u32Src, 2); \
8851 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8852 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8853 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8854 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8855 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8856 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8857 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8858 \
8859 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8860 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8861 IEM_MC_END(); \
8862 break; \
8863 \
8864 case IEMMODE_64BIT: \
8865 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8867 \
8868 IEM_MC_ARG(uint64_t, u64Src, 2); \
8869 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8870 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8871 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8872 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8873 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8874 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8875 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8876 \
8877 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8878 IEM_MC_END(); \
8879 break; \
8880 \
8881 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8882 } \
8883 } \
8884 else \
8885 { \
8886 /* memory destination. */ \
8887 /** @todo test negative bit offsets! */ \
8888 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8889 { \
8890 switch (pVCpu->iem.s.enmEffOpSize) \
8891 { \
8892 case IEMMODE_16BIT: \
8893 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8896 IEMOP_HLP_DONE_DECODING(); \
8897 \
8898 IEM_MC_ARG(uint16_t, u16Src, 2); \
8899 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8900 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8901 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8902 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8903 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8904 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8905 \
8906 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8907 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8908 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8909 \
8910 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8911 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8912 \
8913 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8914 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8916 IEM_MC_END(); \
8917 break; \
8918 \
8919 case IEMMODE_32BIT: \
8920 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8923 IEMOP_HLP_DONE_DECODING(); \
8924 \
8925 IEM_MC_ARG(uint32_t, u32Src, 2); \
8926 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8927 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8928 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8929 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8930 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8931 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8932 \
8933 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8934 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8935 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8936 \
8937 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8938 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8939 \
8940 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8941 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8943 IEM_MC_END(); \
8944 break; \
8945 \
8946 case IEMMODE_64BIT: \
8947 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8950 IEMOP_HLP_DONE_DECODING(); \
8951 \
8952 IEM_MC_ARG(uint64_t, u64Src, 2); \
8953 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8954 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
8955 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
8956 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
8957 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
8958 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
8959 \
8960 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8961 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8962 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8963 \
8964 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8965 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8966 \
8967 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8968 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8969 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8970 IEM_MC_END(); \
8971 break; \
8972 \
8973 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8974 } \
8975 } \
8976 else \
8977 { \
8978 (void)0
8979/* Separate macro to work around parsing issue in IEMAllInstPython.py */
8980#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
8981 switch (pVCpu->iem.s.enmEffOpSize) \
8982 { \
8983 case IEMMODE_16BIT: \
8984 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8987 IEMOP_HLP_DONE_DECODING(); \
8988 \
8989 IEM_MC_ARG(uint16_t, u16Src, 2); \
8990 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8991 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8992 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8993 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8994 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8995 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8996 \
8997 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8998 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8999 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9000 \
9001 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9002 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9003 \
9004 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9005 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9007 IEM_MC_END(); \
9008 break; \
9009 \
9010 case IEMMODE_32BIT: \
9011 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9014 IEMOP_HLP_DONE_DECODING(); \
9015 \
9016 IEM_MC_ARG(uint32_t, u32Src, 2); \
9017 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9018 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9019 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9020 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9021 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9022 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9023 \
9024 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9025 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9026 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9027 \
9028 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9029 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9030 \
9031 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9032 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9034 IEM_MC_END(); \
9035 break; \
9036 \
9037 case IEMMODE_64BIT: \
9038 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9041 IEMOP_HLP_DONE_DECODING(); \
9042 \
9043 IEM_MC_ARG(uint64_t, u64Src, 2); \
9044 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9045 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9046 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9047 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9048 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9049 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9050 \
9051 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9052 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9053 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9054 \
9055 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9056 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9057 \
9058 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9059 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9061 IEM_MC_END(); \
9062 break; \
9063 \
9064 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9065 } \
9066 } \
9067 } \
9068 (void)0
9069
9070/* Read-only version (bt). */
9071#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9074 \
9075 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9076 { \
9077 /* register destination. */ \
9078 switch (pVCpu->iem.s.enmEffOpSize) \
9079 { \
9080 case IEMMODE_16BIT: \
9081 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9083 \
9084 IEM_MC_ARG(uint16_t, u16Src, 2); \
9085 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9086 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9087 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9088 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9089 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9090 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9091 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9092 \
9093 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9094 IEM_MC_END(); \
9095 break; \
9096 \
9097 case IEMMODE_32BIT: \
9098 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9100 \
9101 IEM_MC_ARG(uint32_t, u32Src, 2); \
9102 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9103 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9104 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9105 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9106 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9107 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9108 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9109 \
9110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9111 IEM_MC_END(); \
9112 break; \
9113 \
9114 case IEMMODE_64BIT: \
9115 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9117 \
9118 IEM_MC_ARG(uint64_t, u64Src, 2); \
9119 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9120 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9121 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9122 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9123 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9124 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9125 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9126 \
9127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9128 IEM_MC_END(); \
9129 break; \
9130 \
9131 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9132 } \
9133 } \
9134 else \
9135 { \
9136 /* memory destination. */ \
9137 /** @todo test negative bit offsets! */ \
9138 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9139 { \
9140 switch (pVCpu->iem.s.enmEffOpSize) \
9141 { \
9142 case IEMMODE_16BIT: \
9143 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9146 IEMOP_HLP_DONE_DECODING(); \
9147 \
9148 IEM_MC_ARG(uint16_t, u16Src, 2); \
9149 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9150 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9151 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9152 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9153 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9154 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9155 \
9156 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9157 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9158 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9159 \
9160 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9161 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9162 \
9163 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9164 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9165 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9166 IEM_MC_END(); \
9167 break; \
9168 \
9169 case IEMMODE_32BIT: \
9170 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9173 IEMOP_HLP_DONE_DECODING(); \
9174 \
9175 IEM_MC_ARG(uint32_t, u32Src, 2); \
9176 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9177 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9178 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9179 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9180 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9181 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9182 \
9183 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9184 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9185 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9186 \
9187 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9188 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9189 \
9190 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9191 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9193 IEM_MC_END(); \
9194 break; \
9195 \
9196 case IEMMODE_64BIT: \
9197 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9200 IEMOP_HLP_DONE_DECODING(); \
9201 \
9202 IEM_MC_ARG(uint64_t, u64Src, 2); \
9203 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9204 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9205 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9206 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9207 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9208 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9209 \
9210 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9211 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9212 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9213 \
9214 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9215 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9216 \
9217 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9218 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9219 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9220 IEM_MC_END(); \
9221 break; \
9222 \
9223 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9224 } \
9225 } \
9226 else \
9227 { \
9228 IEMOP_HLP_DONE_DECODING(); \
9229 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9230 } \
9231 } \
9232 (void)0
9233
9234
9235/**
9236 * @opcode 0xa3
9237 * @oppfx n/a
9238 * @opflclass bitmap
9239 */
9240FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9241{
9242 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9243 IEMOP_HLP_MIN_386();
9244 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9245}
9246
9247
9248/**
9249 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9250 */
9251#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9252 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9253 \
9254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9255 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9256 \
9257 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9258 { \
9259 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9260 \
9261 switch (pVCpu->iem.s.enmEffOpSize) \
9262 { \
9263 case IEMMODE_16BIT: \
9264 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9267 IEM_MC_ARG(uint16_t, u16Src, 1); \
9268 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9269 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9270 \
9271 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9272 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9273 IEM_MC_REF_EFLAGS(pEFlags); \
9274 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9275 \
9276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9277 IEM_MC_END(); \
9278 break; \
9279 \
9280 case IEMMODE_32BIT: \
9281 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9283 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9284 IEM_MC_ARG(uint32_t, u32Src, 1); \
9285 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9286 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9287 \
9288 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9289 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9290 IEM_MC_REF_EFLAGS(pEFlags); \
9291 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9292 \
9293 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9294 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9295 IEM_MC_END(); \
9296 break; \
9297 \
9298 case IEMMODE_64BIT: \
9299 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9301 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9302 IEM_MC_ARG(uint64_t, u64Src, 1); \
9303 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9304 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9305 \
9306 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9307 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9308 IEM_MC_REF_EFLAGS(pEFlags); \
9309 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9310 \
9311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9312 IEM_MC_END(); \
9313 break; \
9314 \
9315 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9316 } \
9317 } \
9318 else \
9319 { \
9320 switch (pVCpu->iem.s.enmEffOpSize) \
9321 { \
9322 case IEMMODE_16BIT: \
9323 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9326 \
9327 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9329 \
9330 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9331 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9332 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9333 \
9334 IEM_MC_ARG(uint16_t, u16Src, 1); \
9335 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9336 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9338 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9339 \
9340 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9341 IEM_MC_COMMIT_EFLAGS(EFlags); \
9342 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9343 IEM_MC_END(); \
9344 break; \
9345 \
9346 case IEMMODE_32BIT: \
9347 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9350 \
9351 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9353 \
9354 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9355 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9356 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9357 \
9358 IEM_MC_ARG(uint32_t, u32Src, 1); \
9359 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9360 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9361 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9362 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9363 \
9364 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9365 IEM_MC_COMMIT_EFLAGS(EFlags); \
9366 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9367 IEM_MC_END(); \
9368 break; \
9369 \
9370 case IEMMODE_64BIT: \
9371 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9374 \
9375 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9377 \
9378 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9380 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9381 \
9382 IEM_MC_ARG(uint64_t, u64Src, 1); \
9383 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9384 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9385 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9386 \
9387 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9388 \
9389 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9390 IEM_MC_COMMIT_EFLAGS(EFlags); \
9391 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9392 IEM_MC_END(); \
9393 break; \
9394 \
9395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9396 } \
9397 } (void)0
9398
9399
9400/**
9401 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9402 */
9403#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9404 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9405 \
9406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9408 \
9409 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9410 { \
9411 switch (pVCpu->iem.s.enmEffOpSize) \
9412 { \
9413 case IEMMODE_16BIT: \
9414 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9416 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9417 IEM_MC_ARG(uint16_t, u16Src, 1); \
9418 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9419 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9420 \
9421 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9422 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9423 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9424 IEM_MC_REF_EFLAGS(pEFlags); \
9425 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9426 \
9427 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9428 IEM_MC_END(); \
9429 break; \
9430 \
9431 case IEMMODE_32BIT: \
9432 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9434 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9435 IEM_MC_ARG(uint32_t, u32Src, 1); \
9436 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9437 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9438 \
9439 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9440 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9441 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9442 IEM_MC_REF_EFLAGS(pEFlags); \
9443 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9444 \
9445 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9447 IEM_MC_END(); \
9448 break; \
9449 \
9450 case IEMMODE_64BIT: \
9451 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9454 IEM_MC_ARG(uint64_t, u64Src, 1); \
9455 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9456 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9457 \
9458 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9459 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9460 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9461 IEM_MC_REF_EFLAGS(pEFlags); \
9462 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9463 \
9464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9465 IEM_MC_END(); \
9466 break; \
9467 \
9468 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9469 } \
9470 } \
9471 else \
9472 { \
9473 switch (pVCpu->iem.s.enmEffOpSize) \
9474 { \
9475 case IEMMODE_16BIT: \
9476 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9477 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9478 IEM_MC_ARG(uint16_t, u16Src, 1); \
9479 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9481 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9482 \
9483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9485 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9486 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9487 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9488 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9489 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9490 \
9491 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9492 IEM_MC_COMMIT_EFLAGS(EFlags); \
9493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9494 IEM_MC_END(); \
9495 break; \
9496 \
9497 case IEMMODE_32BIT: \
9498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9499 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9500 IEM_MC_ARG(uint32_t, u32Src, 1); \
9501 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9504 \
9505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9507 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9508 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9510 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9511 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9512 \
9513 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9514 IEM_MC_COMMIT_EFLAGS(EFlags); \
9515 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9516 IEM_MC_END(); \
9517 break; \
9518 \
9519 case IEMMODE_64BIT: \
9520 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9521 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9522 IEM_MC_ARG(uint64_t, u64Src, 1); \
9523 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9525 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9526 \
9527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9529 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9530 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9531 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9532 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9533 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9534 \
9535 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9536 IEM_MC_COMMIT_EFLAGS(EFlags); \
9537 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9538 IEM_MC_END(); \
9539 break; \
9540 \
9541 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9542 } \
9543 } (void)0
9544
9545
9546/**
9547 * @opcode 0xa4
9548 * @opflclass shift_count
9549 */
9550FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9551{
9552 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9553 IEMOP_HLP_MIN_386();
9554 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9555}
9556
9557
9558/**
9559 * @opcode 0xa5
9560 * @opflclass shift_count
9561 */
9562FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9563{
9564 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9565 IEMOP_HLP_MIN_386();
9566 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9567}
9568
9569
9570/** Opcode 0x0f 0xa8. */
9571FNIEMOP_DEF(iemOp_push_gs)
9572{
9573 IEMOP_MNEMONIC(push_gs, "push gs");
9574 IEMOP_HLP_MIN_386();
9575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9576 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9577}
9578
9579
9580/** Opcode 0x0f 0xa9. */
9581FNIEMOP_DEF(iemOp_pop_gs)
9582{
9583 IEMOP_MNEMONIC(pop_gs, "pop gs");
9584 IEMOP_HLP_MIN_386();
9585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9587 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9588 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9589 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9590 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9591 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9592 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9593 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9594}
9595
9596
9597/** Opcode 0x0f 0xaa. */
9598FNIEMOP_DEF(iemOp_rsm)
9599{
9600 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9601 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9603 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9604 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9605 iemCImpl_rsm);
9606}
9607
9608
9609
9610/**
9611 * @opcode 0xab
9612 * @oppfx n/a
9613 * @opflclass bitmap
9614 */
9615FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9616{
9617 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9618 IEMOP_HLP_MIN_386();
9619 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9620 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9621}
9622
9623
9624/**
9625 * @opcode 0xac
9626 * @opflclass shift_count
9627 */
9628FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9629{
9630 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9631 IEMOP_HLP_MIN_386();
9632 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9633}
9634
9635
9636/**
9637 * @opcode 0xad
9638 * @opflclass shift_count
9639 */
9640FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9641{
9642 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9643 IEMOP_HLP_MIN_386();
9644 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9645}
9646
9647
9648/** Opcode 0x0f 0xae mem/0. */
9649FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9650{
9651 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9652 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9653 IEMOP_RAISE_INVALID_OPCODE_RET();
9654
9655 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9656 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9659 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9660 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9661 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9662 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9663 IEM_MC_END();
9664}
9665
9666
9667/** Opcode 0x0f 0xae mem/1. */
9668FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9669{
9670 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9671 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9672 IEMOP_RAISE_INVALID_OPCODE_RET();
9673
9674 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9675 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9678 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9679 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9680 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9681 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9682 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9683 IEM_MC_END();
9684}
9685
9686
9687/**
9688 * @opmaps grp15
9689 * @opcode !11/2
9690 * @oppfx none
9691 * @opcpuid sse
9692 * @opgroup og_sse_mxcsrsm
9693 * @opxcpttype 5
9694 * @optest op1=0 -> mxcsr=0
9695 * @optest op1=0x2083 -> mxcsr=0x2083
9696 * @optest op1=0xfffffffe -> value.xcpt=0xd
9697 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9698 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9699 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9700 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9701 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9702 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9703 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9704 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9705 */
9706FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9707{
9708 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9709 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9710 IEMOP_RAISE_INVALID_OPCODE_RET();
9711
9712 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9713 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9716 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9717 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9718 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9719 IEM_MC_END();
9720}
9721
9722
9723/**
9724 * @opmaps grp15
9725 * @opcode !11/3
9726 * @oppfx none
9727 * @opcpuid sse
9728 * @opgroup og_sse_mxcsrsm
9729 * @opxcpttype 5
9730 * @optest mxcsr=0 -> op1=0
9731 * @optest mxcsr=0x2083 -> op1=0x2083
9732 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9733 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9734 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9735 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9736 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9737 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9738 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9739 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9740 */
9741FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9742{
9743 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9744 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9745 IEMOP_RAISE_INVALID_OPCODE_RET();
9746
9747 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9748 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9751 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9752 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9753 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9754 IEM_MC_END();
9755}
9756
9757
9758/**
9759 * @opmaps grp15
9760 * @opcode !11/4
9761 * @oppfx none
9762 * @opcpuid xsave
9763 * @opgroup og_system
9764 * @opxcpttype none
9765 */
9766FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9767{
9768 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9769 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9770 IEMOP_RAISE_INVALID_OPCODE_RET();
9771
9772 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9773 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9777 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9778 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9779 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9780 IEM_MC_END();
9781}
9782
9783
9784/**
9785 * @opmaps grp15
9786 * @opcode !11/5
9787 * @oppfx none
9788 * @opcpuid xsave
9789 * @opgroup og_system
9790 * @opxcpttype none
9791 */
9792FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9793{
9794 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9795 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9796 IEMOP_RAISE_INVALID_OPCODE_RET();
9797
9798 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9799 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9802 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9803 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9804 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9805 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9806 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9807 IEM_MC_END();
9808}
9809
9810/** Opcode 0x0f 0xae mem/6. */
9811FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9812
9813/**
9814 * @opmaps grp15
9815 * @opcode !11/7
9816 * @oppfx none
9817 * @opcpuid clfsh
9818 * @opgroup og_cachectl
9819 * @optest op1=1 ->
9820 */
9821FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9822{
9823 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9824 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9825 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9826
9827 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9828 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9832 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9833 IEM_MC_END();
9834}
9835
9836/**
9837 * @opmaps grp15
9838 * @opcode !11/7
9839 * @oppfx 0x66
9840 * @opcpuid clflushopt
9841 * @opgroup og_cachectl
9842 * @optest op1=1 ->
9843 */
9844FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9845{
9846 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9847 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9848 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9849
9850 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9851 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9854 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9855 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9856 IEM_MC_END();
9857}
9858
9859
9860/** Opcode 0x0f 0xae 11b/5. */
9861FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9862{
9863 RT_NOREF_PV(bRm);
9864 IEMOP_MNEMONIC(lfence, "lfence");
9865 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9867#ifdef RT_ARCH_ARM64
9868 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9869#else
9870 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9871 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9872 else
9873 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9874#endif
9875 IEM_MC_ADVANCE_RIP_AND_FINISH();
9876 IEM_MC_END();
9877}
9878
9879
9880/** Opcode 0x0f 0xae 11b/6. */
9881FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9882{
9883 RT_NOREF_PV(bRm);
9884 IEMOP_MNEMONIC(mfence, "mfence");
9885 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9887#ifdef RT_ARCH_ARM64
9888 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9889#else
9890 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9891 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9892 else
9893 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9894#endif
9895 IEM_MC_ADVANCE_RIP_AND_FINISH();
9896 IEM_MC_END();
9897}
9898
9899
9900/** Opcode 0x0f 0xae 11b/7. */
9901FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9902{
9903 RT_NOREF_PV(bRm);
9904 IEMOP_MNEMONIC(sfence, "sfence");
9905 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9907#ifdef RT_ARCH_ARM64
9908 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9909#else
9910 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9911 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9912 else
9913 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9914#endif
9915 IEM_MC_ADVANCE_RIP_AND_FINISH();
9916 IEM_MC_END();
9917}
9918
9919
9920/** Opcode 0xf3 0x0f 0xae 11b/0. */
9921FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9922{
9923 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9924 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9925 {
9926 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9928 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9929 IEM_MC_LOCAL(uint64_t, u64Dst);
9930 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9931 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9932 IEM_MC_ADVANCE_RIP_AND_FINISH();
9933 IEM_MC_END();
9934 }
9935 else
9936 {
9937 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9939 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9940 IEM_MC_LOCAL(uint32_t, u32Dst);
9941 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9942 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9943 IEM_MC_ADVANCE_RIP_AND_FINISH();
9944 IEM_MC_END();
9945 }
9946}
9947
9948
9949/** Opcode 0xf3 0x0f 0xae 11b/1. */
9950FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9951{
9952 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9953 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9954 {
9955 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9957 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9958 IEM_MC_LOCAL(uint64_t, u64Dst);
9959 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9960 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9961 IEM_MC_ADVANCE_RIP_AND_FINISH();
9962 IEM_MC_END();
9963 }
9964 else
9965 {
9966 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9968 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9969 IEM_MC_LOCAL(uint32_t, u32Dst);
9970 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9971 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9972 IEM_MC_ADVANCE_RIP_AND_FINISH();
9973 IEM_MC_END();
9974 }
9975}
9976
9977
9978/** Opcode 0xf3 0x0f 0xae 11b/2. */
9979FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9980{
9981 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9982 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9983 {
9984 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9986 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9987 IEM_MC_LOCAL(uint64_t, u64Dst);
9988 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9989 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9990 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9991 IEM_MC_ADVANCE_RIP_AND_FINISH();
9992 IEM_MC_END();
9993 }
9994 else
9995 {
9996 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9998 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9999 IEM_MC_LOCAL(uint32_t, u32Dst);
10000 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10001 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10002 IEM_MC_ADVANCE_RIP_AND_FINISH();
10003 IEM_MC_END();
10004 }
10005}
10006
10007
10008/** Opcode 0xf3 0x0f 0xae 11b/3. */
10009FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10010{
10011 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10013 {
10014 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10016 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10017 IEM_MC_LOCAL(uint64_t, u64Dst);
10018 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10019 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10020 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10021 IEM_MC_ADVANCE_RIP_AND_FINISH();
10022 IEM_MC_END();
10023 }
10024 else
10025 {
10026 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10028 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10029 IEM_MC_LOCAL(uint32_t, u32Dst);
10030 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10031 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10032 IEM_MC_ADVANCE_RIP_AND_FINISH();
10033 IEM_MC_END();
10034 }
10035}
10036
10037
10038/**
10039 * Group 15 jump table for register variant.
10040 */
10041IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10042{ /* pfx: none, 066h, 0f3h, 0f2h */
10043 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10044 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10045 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10046 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10047 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10048 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10049 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10050 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10051};
10052AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10053
10054
10055/**
10056 * Group 15 jump table for memory variant.
10057 */
10058IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10059{ /* pfx: none, 066h, 0f3h, 0f2h */
10060 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10061 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10062 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10063 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10064 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10065 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10066 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10067 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10068};
10069AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10070
10071
10072/** Opcode 0x0f 0xae. */
10073FNIEMOP_DEF(iemOp_Grp15)
10074{
10075 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10077 if (IEM_IS_MODRM_REG_MODE(bRm))
10078 /* register, register */
10079 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10080 + pVCpu->iem.s.idxPrefix], bRm);
10081 /* memory, register */
10082 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10083 + pVCpu->iem.s.idxPrefix], bRm);
10084}
10085
10086
10087/**
10088 * @opcode 0xaf
10089 * @opflclass multiply
10090 */
10091FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10092{
10093 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10094 IEMOP_HLP_MIN_386();
10095 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10096 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10098 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10099}
10100
10101
10102/**
10103 * @opcode 0xb0
10104 * @opflclass arithmetic
10105 */
10106FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10107{
10108 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10109 IEMOP_HLP_MIN_486();
10110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10111
10112 if (IEM_IS_MODRM_REG_MODE(bRm))
10113 {
10114 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10116 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10117 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10118 IEM_MC_ARG(uint8_t, u8Src, 2);
10119 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10120
10121 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10122 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10123 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10124 IEM_MC_REF_EFLAGS(pEFlags);
10125 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10126
10127 IEM_MC_ADVANCE_RIP_AND_FINISH();
10128 IEM_MC_END();
10129 }
10130 else
10131 {
10132#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10133 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10136 IEMOP_HLP_DONE_DECODING(); \
10137 \
10138 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10139 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10140 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10141 \
10142 IEM_MC_ARG(uint8_t, u8Src, 2); \
10143 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10144 \
10145 IEM_MC_LOCAL(uint8_t, u8Al); \
10146 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10147 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10148 \
10149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10150 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10151 \
10152 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10153 IEM_MC_COMMIT_EFLAGS(EFlags); \
10154 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10155 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10156 IEM_MC_END()
10157
10158 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10159 {
10160 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10161 }
10162 else
10163 {
10164 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10165 }
10166 }
10167}
10168
10169/**
10170 * @opcode 0xb1
10171 * @opflclass arithmetic
10172 */
10173FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10174{
10175 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10176 IEMOP_HLP_MIN_486();
10177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10178
10179 if (IEM_IS_MODRM_REG_MODE(bRm))
10180 {
10181 switch (pVCpu->iem.s.enmEffOpSize)
10182 {
10183 case IEMMODE_16BIT:
10184 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10187 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10188 IEM_MC_ARG(uint16_t, u16Src, 2);
10189 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10190
10191 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10192 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10193 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10194 IEM_MC_REF_EFLAGS(pEFlags);
10195 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10196
10197 IEM_MC_ADVANCE_RIP_AND_FINISH();
10198 IEM_MC_END();
10199 break;
10200
10201 case IEMMODE_32BIT:
10202 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10204 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10205 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10206 IEM_MC_ARG(uint32_t, u32Src, 2);
10207 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10208
10209 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10210 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10211 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10212 IEM_MC_REF_EFLAGS(pEFlags);
10213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10214
10215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10216 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10217 } IEM_MC_ELSE() {
10218 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10219 } IEM_MC_ENDIF();
10220
10221 IEM_MC_ADVANCE_RIP_AND_FINISH();
10222 IEM_MC_END();
10223 break;
10224
10225 case IEMMODE_64BIT:
10226 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10229 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10230 IEM_MC_ARG(uint64_t, u64Src, 2);
10231 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10232
10233 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10234 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10235 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10236 IEM_MC_REF_EFLAGS(pEFlags);
10237 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10238
10239 IEM_MC_ADVANCE_RIP_AND_FINISH();
10240 IEM_MC_END();
10241 break;
10242
10243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10244 }
10245 }
10246 else
10247 {
10248#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10249 do { \
10250 switch (pVCpu->iem.s.enmEffOpSize) \
10251 { \
10252 case IEMMODE_16BIT: \
10253 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10254 \
10255 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10258 IEMOP_HLP_DONE_DECODING(); \
10259 \
10260 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10261 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10262 \
10263 IEM_MC_ARG(uint16_t, u16Src, 2); \
10264 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10265 \
10266 IEM_MC_LOCAL(uint16_t, u16Ax); \
10267 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10268 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10269 \
10270 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10271 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10272 \
10273 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10274 IEM_MC_COMMIT_EFLAGS(EFlags); \
10275 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10277 IEM_MC_END(); \
10278 break; \
10279 \
10280 case IEMMODE_32BIT: \
10281 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10284 IEMOP_HLP_DONE_DECODING(); \
10285 \
10286 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10287 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10288 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10289 \
10290 IEM_MC_ARG(uint32_t, u32Src, 2); \
10291 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10292 \
10293 IEM_MC_LOCAL(uint32_t, u32Eax); \
10294 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10295 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10296 \
10297 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10298 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10299 \
10300 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10301 IEM_MC_COMMIT_EFLAGS(EFlags); \
10302 \
10303 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10304 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10305 } IEM_MC_ENDIF(); \
10306 \
10307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10308 IEM_MC_END(); \
10309 break; \
10310 \
10311 case IEMMODE_64BIT: \
10312 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10315 IEMOP_HLP_DONE_DECODING(); \
10316 \
10317 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10318 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10319 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10320 \
10321 IEM_MC_ARG(uint64_t, u64Src, 2); \
10322 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10323 \
10324 IEM_MC_LOCAL(uint64_t, u64Rax); \
10325 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10326 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10327 \
10328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10329 \
10330 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10331 \
10332 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10333 IEM_MC_COMMIT_EFLAGS(EFlags); \
10334 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10336 IEM_MC_END(); \
10337 break; \
10338 \
10339 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10340 } \
10341 } while (0)
10342
10343 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10344 {
10345 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10346 }
10347 else
10348 {
10349 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10350 }
10351 }
10352}
10353
10354
10355/** Opcode 0x0f 0xb2. */
10356FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10357{
10358 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10359 IEMOP_HLP_MIN_386();
10360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10361 if (IEM_IS_MODRM_REG_MODE(bRm))
10362 IEMOP_RAISE_INVALID_OPCODE_RET();
10363 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10364}
10365
10366
10367/**
10368 * @opcode 0xb3
10369 * @oppfx n/a
10370 * @opflclass bitmap
10371 */
10372FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10373{
10374 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10375 IEMOP_HLP_MIN_386();
10376 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10377 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10378}
10379
10380
10381/** Opcode 0x0f 0xb4. */
10382FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10383{
10384 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10385 IEMOP_HLP_MIN_386();
10386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10387 if (IEM_IS_MODRM_REG_MODE(bRm))
10388 IEMOP_RAISE_INVALID_OPCODE_RET();
10389 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10390}
10391
10392
10393/** Opcode 0x0f 0xb5. */
10394FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10395{
10396 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10397 IEMOP_HLP_MIN_386();
10398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10399 if (IEM_IS_MODRM_REG_MODE(bRm))
10400 IEMOP_RAISE_INVALID_OPCODE_RET();
10401 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10402}
10403
10404
10405/** Opcode 0x0f 0xb6. */
10406FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10407{
10408 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10409 IEMOP_HLP_MIN_386();
10410
10411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10412
10413 /*
10414 * If rm is denoting a register, no more instruction bytes.
10415 */
10416 if (IEM_IS_MODRM_REG_MODE(bRm))
10417 {
10418 switch (pVCpu->iem.s.enmEffOpSize)
10419 {
10420 case IEMMODE_16BIT:
10421 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10423 IEM_MC_LOCAL(uint16_t, u16Value);
10424 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10425 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10426 IEM_MC_ADVANCE_RIP_AND_FINISH();
10427 IEM_MC_END();
10428 break;
10429
10430 case IEMMODE_32BIT:
10431 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10433 IEM_MC_LOCAL(uint32_t, u32Value);
10434 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10435 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10436 IEM_MC_ADVANCE_RIP_AND_FINISH();
10437 IEM_MC_END();
10438 break;
10439
10440 case IEMMODE_64BIT:
10441 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10443 IEM_MC_LOCAL(uint64_t, u64Value);
10444 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10445 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10446 IEM_MC_ADVANCE_RIP_AND_FINISH();
10447 IEM_MC_END();
10448 break;
10449
10450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10451 }
10452 }
10453 else
10454 {
10455 /*
10456 * We're loading a register from memory.
10457 */
10458 switch (pVCpu->iem.s.enmEffOpSize)
10459 {
10460 case IEMMODE_16BIT:
10461 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10462 IEM_MC_LOCAL(uint16_t, u16Value);
10463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10466 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10468 IEM_MC_ADVANCE_RIP_AND_FINISH();
10469 IEM_MC_END();
10470 break;
10471
10472 case IEMMODE_32BIT:
10473 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10474 IEM_MC_LOCAL(uint32_t, u32Value);
10475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10478 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10479 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10480 IEM_MC_ADVANCE_RIP_AND_FINISH();
10481 IEM_MC_END();
10482 break;
10483
10484 case IEMMODE_64BIT:
10485 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10486 IEM_MC_LOCAL(uint64_t, u64Value);
10487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10491 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10492 IEM_MC_ADVANCE_RIP_AND_FINISH();
10493 IEM_MC_END();
10494 break;
10495
10496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10497 }
10498 }
10499}
10500
10501
10502/** Opcode 0x0f 0xb7. */
10503FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10504{
10505 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10506 IEMOP_HLP_MIN_386();
10507
10508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10509
10510 /** @todo Not entirely sure how the operand size prefix is handled here,
10511 * assuming that it will be ignored. Would be nice to have a few
10512 * test for this. */
10513
10514 /** @todo There should be no difference in the behaviour whether REX.W is
10515 * present or not... */
10516
10517 /*
10518 * If rm is denoting a register, no more instruction bytes.
10519 */
10520 if (IEM_IS_MODRM_REG_MODE(bRm))
10521 {
10522 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10523 {
10524 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10526 IEM_MC_LOCAL(uint32_t, u32Value);
10527 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10528 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530 IEM_MC_END();
10531 }
10532 else
10533 {
10534 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10536 IEM_MC_LOCAL(uint64_t, u64Value);
10537 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10538 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10539 IEM_MC_ADVANCE_RIP_AND_FINISH();
10540 IEM_MC_END();
10541 }
10542 }
10543 else
10544 {
10545 /*
10546 * We're loading a register from memory.
10547 */
10548 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10549 {
10550 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10551 IEM_MC_LOCAL(uint32_t, u32Value);
10552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10556 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10557 IEM_MC_ADVANCE_RIP_AND_FINISH();
10558 IEM_MC_END();
10559 }
10560 else
10561 {
10562 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10563 IEM_MC_LOCAL(uint64_t, u64Value);
10564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10567 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10568 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10569 IEM_MC_ADVANCE_RIP_AND_FINISH();
10570 IEM_MC_END();
10571 }
10572 }
10573}
10574
10575
10576/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10577FNIEMOP_UD_STUB(iemOp_jmpe);
10578
10579
10580/**
10581 * @opcode 0xb8
10582 * @oppfx 0xf3
10583 * @opflmodify cf,pf,af,zf,sf,of
10584 * @opflclear cf,pf,af,sf,of
10585 */
10586FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10587{
10588 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10589 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10590 return iemOp_InvalidNeedRM(pVCpu);
10591#ifndef TST_IEM_CHECK_MC
10592# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10593 static const IEMOPBINSIZES s_Native =
10594 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10595# endif
10596 static const IEMOPBINSIZES s_Fallback =
10597 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10598#endif
10599 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10601 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10602}
10603
10604
10605/**
10606 * @opcode 0xb9
10607 * @opinvalid intel-modrm
10608 * @optest ->
10609 */
10610FNIEMOP_DEF(iemOp_Grp10)
10611{
10612 /*
10613 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10614 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10615 */
10616 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10617 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10618 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10619}
10620
10621
10622/**
10623 * Body for group 8 bit instruction.
10624 */
10625#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10627 \
10628 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10629 { \
10630 /* register destination. */ \
10631 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10632 \
10633 switch (pVCpu->iem.s.enmEffOpSize) \
10634 { \
10635 case IEMMODE_16BIT: \
10636 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10638 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10639 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10640 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10641 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10642 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10643 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10644 \
10645 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10646 IEM_MC_END(); \
10647 break; \
10648 \
10649 case IEMMODE_32BIT: \
10650 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10652 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10653 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10654 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10655 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10656 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10657 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10658 \
10659 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10661 IEM_MC_END(); \
10662 break; \
10663 \
10664 case IEMMODE_64BIT: \
10665 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10667 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10668 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10669 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10670 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10671 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10672 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10673 \
10674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10675 IEM_MC_END(); \
10676 break; \
10677 \
10678 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10679 } \
10680 } \
10681 else \
10682 { \
10683 /* memory destination. */ \
10684 /** @todo test negative bit offsets! */ \
10685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10686 { \
10687 switch (pVCpu->iem.s.enmEffOpSize) \
10688 { \
10689 case IEMMODE_16BIT: \
10690 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10693 \
10694 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10695 IEMOP_HLP_DONE_DECODING(); \
10696 \
10697 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10698 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10699 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10700 \
10701 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10702 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10703 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10704 \
10705 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10706 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10708 IEM_MC_END(); \
10709 break; \
10710 \
10711 case IEMMODE_32BIT: \
10712 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10715 \
10716 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10717 IEMOP_HLP_DONE_DECODING(); \
10718 \
10719 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10720 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10721 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10722 \
10723 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10724 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10725 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10726 \
10727 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10728 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10729 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10730 IEM_MC_END(); \
10731 break; \
10732 \
10733 case IEMMODE_64BIT: \
10734 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10737 \
10738 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10739 IEMOP_HLP_DONE_DECODING(); \
10740 \
10741 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10742 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10743 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10744 \
10745 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10746 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10747 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10748 \
10749 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10750 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10752 IEM_MC_END(); \
10753 break; \
10754 \
10755 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10756 } \
10757 } \
10758 else \
10759 { \
10760 (void)0
10761/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10762#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10763 switch (pVCpu->iem.s.enmEffOpSize) \
10764 { \
10765 case IEMMODE_16BIT: \
10766 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10769 \
10770 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10771 IEMOP_HLP_DONE_DECODING(); \
10772 \
10773 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10774 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10775 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10776 \
10777 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10778 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10779 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10780 \
10781 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10782 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10784 IEM_MC_END(); \
10785 break; \
10786 \
10787 case IEMMODE_32BIT: \
10788 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10791 \
10792 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10793 IEMOP_HLP_DONE_DECODING(); \
10794 \
10795 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10796 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10797 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10798 \
10799 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10800 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10801 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10802 \
10803 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10804 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10806 IEM_MC_END(); \
10807 break; \
10808 \
10809 case IEMMODE_64BIT: \
10810 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10813 \
10814 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10815 IEMOP_HLP_DONE_DECODING(); \
10816 \
10817 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10818 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10819 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10820 \
10821 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10822 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10823 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10824 \
10825 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10826 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10827 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10828 IEM_MC_END(); \
10829 break; \
10830 \
10831 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10832 } \
10833 } \
10834 } \
10835 (void)0
10836
10837/* Read-only version (bt) */
10838#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10840 \
10841 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10842 { \
10843 /* register destination. */ \
10844 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10845 \
10846 switch (pVCpu->iem.s.enmEffOpSize) \
10847 { \
10848 case IEMMODE_16BIT: \
10849 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10851 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10852 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10853 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10854 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10855 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10856 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10857 \
10858 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10859 IEM_MC_END(); \
10860 break; \
10861 \
10862 case IEMMODE_32BIT: \
10863 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10865 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10866 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10867 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10868 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10869 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10870 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10871 \
10872 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10873 IEM_MC_END(); \
10874 break; \
10875 \
10876 case IEMMODE_64BIT: \
10877 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10879 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10880 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10881 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10882 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10883 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10884 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10885 \
10886 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10887 IEM_MC_END(); \
10888 break; \
10889 \
10890 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10891 } \
10892 } \
10893 else \
10894 { \
10895 /* memory destination. */ \
10896 /** @todo test negative bit offsets! */ \
10897 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10898 { \
10899 switch (pVCpu->iem.s.enmEffOpSize) \
10900 { \
10901 case IEMMODE_16BIT: \
10902 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10905 \
10906 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10907 IEMOP_HLP_DONE_DECODING(); \
10908 \
10909 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10910 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10911 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10912 \
10913 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10914 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10915 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10916 \
10917 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10918 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10919 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10920 IEM_MC_END(); \
10921 break; \
10922 \
10923 case IEMMODE_32BIT: \
10924 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10927 \
10928 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10929 IEMOP_HLP_DONE_DECODING(); \
10930 \
10931 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10932 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10933 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10934 \
10935 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10936 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10937 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10938 \
10939 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10940 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10942 IEM_MC_END(); \
10943 break; \
10944 \
10945 case IEMMODE_64BIT: \
10946 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10949 \
10950 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10951 IEMOP_HLP_DONE_DECODING(); \
10952 \
10953 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10954 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10955 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10956 \
10957 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10958 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10959 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10960 \
10961 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10962 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10964 IEM_MC_END(); \
10965 break; \
10966 \
10967 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10968 } \
10969 } \
10970 else \
10971 { \
10972 IEMOP_HLP_DONE_DECODING(); \
10973 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10974 } \
10975 } \
10976 (void)0
10977
10978
10979/**
10980 * @opmaps grp8
10981 * @opcode /4
10982 * @oppfx n/a
10983 * @opflclass bitmap
10984 */
10985FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10986{
10987 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10988 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
10989}
10990
10991
10992/**
10993 * @opmaps grp8
10994 * @opcode /5
10995 * @oppfx n/a
10996 * @opflclass bitmap
10997 */
10998FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10999{
11000 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11001 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11002 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11003}
11004
11005
11006/**
11007 * @opmaps grp8
11008 * @opcode /6
11009 * @oppfx n/a
11010 * @opflclass bitmap
11011 */
11012FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11013{
11014 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11015 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11016 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11017}
11018
11019
11020/**
11021 * @opmaps grp8
11022 * @opcode /7
11023 * @oppfx n/a
11024 * @opflclass bitmap
11025 */
11026FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11027{
11028 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11029 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11030 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11031}
11032
11033
11034/** Opcode 0x0f 0xba. */
11035FNIEMOP_DEF(iemOp_Grp8)
11036{
11037 IEMOP_HLP_MIN_386();
11038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11039 switch (IEM_GET_MODRM_REG_8(bRm))
11040 {
11041 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11042 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11043 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11044 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11045
11046 case 0: case 1: case 2: case 3:
11047 /* Both AMD and Intel want full modr/m decoding and imm8. */
11048 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11049
11050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11051 }
11052}
11053
11054
11055/**
11056 * @opcode 0xbb
11057 * @oppfx n/a
11058 * @opflclass bitmap
11059 */
11060FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11061{
11062 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11063 IEMOP_HLP_MIN_386();
11064 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11065 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11066}
11067
11068
11069/**
11070 * Body for BSF and BSR instructions.
11071 *
11072 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11073 * the destination register, which means that for 32-bit operations the high
11074 * bits must be left alone.
11075 *
11076 * @param pImpl Pointer to the instruction implementation (assembly).
11077 */
11078#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11080 \
11081 /* \
11082 * If rm is denoting a register, no more instruction bytes. \
11083 */ \
11084 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11085 { \
11086 switch (pVCpu->iem.s.enmEffOpSize) \
11087 { \
11088 case IEMMODE_16BIT: \
11089 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11091 \
11092 IEM_MC_ARG(uint16_t, u16Src, 2); \
11093 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11094 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11095 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11096 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11097 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11098 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11099 \
11100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11101 IEM_MC_END(); \
11102 break; \
11103 \
11104 case IEMMODE_32BIT: \
11105 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11107 \
11108 IEM_MC_ARG(uint32_t, u32Src, 2); \
11109 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11110 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11111 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11112 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11113 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11114 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11115 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11116 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11117 } IEM_MC_ENDIF(); \
11118 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11119 IEM_MC_END(); \
11120 break; \
11121 \
11122 case IEMMODE_64BIT: \
11123 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11125 \
11126 IEM_MC_ARG(uint64_t, u64Src, 2); \
11127 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11128 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11129 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11130 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11131 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11132 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11133 \
11134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11135 IEM_MC_END(); \
11136 break; \
11137 \
11138 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11139 } \
11140 } \
11141 else \
11142 { \
11143 /* \
11144 * We're accessing memory. \
11145 */ \
11146 switch (pVCpu->iem.s.enmEffOpSize) \
11147 { \
11148 case IEMMODE_16BIT: \
11149 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11153 \
11154 IEM_MC_ARG(uint16_t, u16Src, 2); \
11155 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11156 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11157 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11158 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11159 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11160 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11161 \
11162 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11163 IEM_MC_END(); \
11164 break; \
11165 \
11166 case IEMMODE_32BIT: \
11167 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11171 \
11172 IEM_MC_ARG(uint32_t, u32Src, 2); \
11173 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11174 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11175 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11176 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11177 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11178 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11179 \
11180 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11181 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11182 } IEM_MC_ENDIF(); \
11183 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11184 IEM_MC_END(); \
11185 break; \
11186 \
11187 case IEMMODE_64BIT: \
11188 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11192 \
11193 IEM_MC_ARG(uint64_t, u64Src, 2); \
11194 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11195 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11196 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11197 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11198 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11199 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11200 \
11201 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11202 IEM_MC_END(); \
11203 break; \
11204 \
11205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11206 } \
11207 } (void)0
11208
11209
11210/**
11211 * @opcode 0xbc
11212 * @oppfx !0xf3
11213 * @opfltest cf,pf,af,sf,of
11214 * @opflmodify cf,pf,af,zf,sf,of
11215 * @opflundef cf,pf,af,sf,of
11216 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11217 * document them as inputs. Sigh.
11218 */
11219FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11220{
11221 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11222 IEMOP_HLP_MIN_386();
11223 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11224 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11225 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11226}
11227
11228
11229/**
11230 * @opcode 0xbc
11231 * @oppfx 0xf3
11232 * @opfltest pf,af,sf,of
11233 * @opflmodify cf,pf,af,zf,sf,of
11234 * @opflundef pf,af,sf,of
11235 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11236 * document them as inputs. Sigh.
11237 */
11238FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11239{
11240 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11241 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11242 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11243
11244#ifndef TST_IEM_CHECK_MC
11245 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11246 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11247 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11248 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11249 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11250 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11251 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11252 {
11253 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11254 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11255 };
11256#endif
11257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11258 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11259 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11261 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11262}
11263
11264
11265/**
11266 * @opcode 0xbd
11267 * @oppfx !0xf3
11268 * @opfltest cf,pf,af,sf,of
11269 * @opflmodify cf,pf,af,zf,sf,of
11270 * @opflundef cf,pf,af,sf,of
11271 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11272 * document them as inputs. Sigh.
11273 */
11274FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11275{
11276 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11277 IEMOP_HLP_MIN_386();
11278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11279 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11280 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11281}
11282
11283
11284/**
11285 * @opcode 0xbd
11286 * @oppfx 0xf3
11287 * @opfltest pf,af,sf,of
11288 * @opflmodify cf,pf,af,zf,sf,of
11289 * @opflundef pf,af,sf,of
11290 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11291 * document them as inputs. Sigh.
11292 */
11293FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11294{
11295 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11296 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11297 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11298
11299#ifndef TST_IEM_CHECK_MC
11300 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11301 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11302 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11303 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11304 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11305 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11306 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11307 {
11308 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11309 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11310 };
11311#endif
11312 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11313 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11314 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11316 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11317}
11318
11319
11320
11321/** Opcode 0x0f 0xbe. */
11322FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11323{
11324 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11325 IEMOP_HLP_MIN_386();
11326
11327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11328
11329 /*
11330 * If rm is denoting a register, no more instruction bytes.
11331 */
11332 if (IEM_IS_MODRM_REG_MODE(bRm))
11333 {
11334 switch (pVCpu->iem.s.enmEffOpSize)
11335 {
11336 case IEMMODE_16BIT:
11337 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11339 IEM_MC_LOCAL(uint16_t, u16Value);
11340 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11341 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11342 IEM_MC_ADVANCE_RIP_AND_FINISH();
11343 IEM_MC_END();
11344 break;
11345
11346 case IEMMODE_32BIT:
11347 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11349 IEM_MC_LOCAL(uint32_t, u32Value);
11350 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11351 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11352 IEM_MC_ADVANCE_RIP_AND_FINISH();
11353 IEM_MC_END();
11354 break;
11355
11356 case IEMMODE_64BIT:
11357 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11359 IEM_MC_LOCAL(uint64_t, u64Value);
11360 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11361 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11362 IEM_MC_ADVANCE_RIP_AND_FINISH();
11363 IEM_MC_END();
11364 break;
11365
11366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11367 }
11368 }
11369 else
11370 {
11371 /*
11372 * We're loading a register from memory.
11373 */
11374 switch (pVCpu->iem.s.enmEffOpSize)
11375 {
11376 case IEMMODE_16BIT:
11377 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11378 IEM_MC_LOCAL(uint16_t, u16Value);
11379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11383 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11384 IEM_MC_ADVANCE_RIP_AND_FINISH();
11385 IEM_MC_END();
11386 break;
11387
11388 case IEMMODE_32BIT:
11389 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11390 IEM_MC_LOCAL(uint32_t, u32Value);
11391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11394 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11395 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11396 IEM_MC_ADVANCE_RIP_AND_FINISH();
11397 IEM_MC_END();
11398 break;
11399
11400 case IEMMODE_64BIT:
11401 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11402 IEM_MC_LOCAL(uint64_t, u64Value);
11403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11406 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11407 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11408 IEM_MC_ADVANCE_RIP_AND_FINISH();
11409 IEM_MC_END();
11410 break;
11411
11412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11413 }
11414 }
11415}
11416
11417
11418/** Opcode 0x0f 0xbf. */
11419FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11420{
11421 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11422 IEMOP_HLP_MIN_386();
11423
11424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11425
11426 /** @todo Not entirely sure how the operand size prefix is handled here,
11427 * assuming that it will be ignored. Would be nice to have a few
11428 * test for this. */
11429 /*
11430 * If rm is denoting a register, no more instruction bytes.
11431 */
11432 if (IEM_IS_MODRM_REG_MODE(bRm))
11433 {
11434 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11435 {
11436 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11438 IEM_MC_LOCAL(uint32_t, u32Value);
11439 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11440 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11441 IEM_MC_ADVANCE_RIP_AND_FINISH();
11442 IEM_MC_END();
11443 }
11444 else
11445 {
11446 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11448 IEM_MC_LOCAL(uint64_t, u64Value);
11449 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11450 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11451 IEM_MC_ADVANCE_RIP_AND_FINISH();
11452 IEM_MC_END();
11453 }
11454 }
11455 else
11456 {
11457 /*
11458 * We're loading a register from memory.
11459 */
11460 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11461 {
11462 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11463 IEM_MC_LOCAL(uint32_t, u32Value);
11464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11469 IEM_MC_ADVANCE_RIP_AND_FINISH();
11470 IEM_MC_END();
11471 }
11472 else
11473 {
11474 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11475 IEM_MC_LOCAL(uint64_t, u64Value);
11476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11480 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11481 IEM_MC_ADVANCE_RIP_AND_FINISH();
11482 IEM_MC_END();
11483 }
11484 }
11485}
11486
11487
11488/**
11489 * @opcode 0xc0
11490 * @opflclass arithmetic
11491 */
11492FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11493{
11494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11495 IEMOP_HLP_MIN_486();
11496 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11497
11498 /*
11499 * If rm is denoting a register, no more instruction bytes.
11500 */
11501 if (IEM_IS_MODRM_REG_MODE(bRm))
11502 {
11503 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11505 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11506 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11507 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11508
11509 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11510 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11511 IEM_MC_REF_EFLAGS(pEFlags);
11512 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11513
11514 IEM_MC_ADVANCE_RIP_AND_FINISH();
11515 IEM_MC_END();
11516 }
11517 else
11518 {
11519 /*
11520 * We're accessing memory.
11521 */
11522#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11523 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11526 IEMOP_HLP_DONE_DECODING(); \
11527 \
11528 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11529 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11530 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11531 \
11532 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11533 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11534 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11535 \
11536 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11537 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11538 \
11539 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11540 IEM_MC_COMMIT_EFLAGS(EFlags); \
11541 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11542 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11543 IEM_MC_END()
11544 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11545 {
11546 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11547 }
11548 else
11549 {
11550 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11551 }
11552 }
11553}
11554
11555
11556/**
11557 * @opcode 0xc1
11558 * @opflclass arithmetic
11559 */
11560FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11561{
11562 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11563 IEMOP_HLP_MIN_486();
11564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11565
11566 /*
11567 * If rm is denoting a register, no more instruction bytes.
11568 */
11569 if (IEM_IS_MODRM_REG_MODE(bRm))
11570 {
11571 switch (pVCpu->iem.s.enmEffOpSize)
11572 {
11573 case IEMMODE_16BIT:
11574 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11576 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11577 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11579
11580 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11581 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11582 IEM_MC_REF_EFLAGS(pEFlags);
11583 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11584
11585 IEM_MC_ADVANCE_RIP_AND_FINISH();
11586 IEM_MC_END();
11587 break;
11588
11589 case IEMMODE_32BIT:
11590 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11593 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11595
11596 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11597 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11598 IEM_MC_REF_EFLAGS(pEFlags);
11599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11600
11601 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11602 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11603 IEM_MC_ADVANCE_RIP_AND_FINISH();
11604 IEM_MC_END();
11605 break;
11606
11607 case IEMMODE_64BIT:
11608 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11611 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11613
11614 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11615 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11616 IEM_MC_REF_EFLAGS(pEFlags);
11617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11618
11619 IEM_MC_ADVANCE_RIP_AND_FINISH();
11620 IEM_MC_END();
11621 break;
11622
11623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11624 }
11625 }
11626 else
11627 {
11628 /*
11629 * We're accessing memory.
11630 */
11631#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11632 do { \
11633 switch (pVCpu->iem.s.enmEffOpSize) \
11634 { \
11635 case IEMMODE_16BIT: \
11636 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11639 IEMOP_HLP_DONE_DECODING(); \
11640 \
11641 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11642 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11643 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11644 \
11645 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11646 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11647 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11648 \
11649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11650 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11651 \
11652 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11653 IEM_MC_COMMIT_EFLAGS(EFlags); \
11654 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11655 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11656 IEM_MC_END(); \
11657 break; \
11658 \
11659 case IEMMODE_32BIT: \
11660 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11663 IEMOP_HLP_DONE_DECODING(); \
11664 \
11665 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11666 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11667 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11668 \
11669 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11670 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11671 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11672 \
11673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11674 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11675 \
11676 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11677 IEM_MC_COMMIT_EFLAGS(EFlags); \
11678 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11679 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11680 IEM_MC_END(); \
11681 break; \
11682 \
11683 case IEMMODE_64BIT: \
11684 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11687 IEMOP_HLP_DONE_DECODING(); \
11688 \
11689 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11691 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11692 \
11693 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11694 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11695 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11696 \
11697 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11698 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11699 \
11700 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11701 IEM_MC_COMMIT_EFLAGS(EFlags); \
11702 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11703 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11704 IEM_MC_END(); \
11705 break; \
11706 \
11707 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11708 } \
11709 } while (0)
11710
11711 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11712 {
11713 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11714 }
11715 else
11716 {
11717 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11718 }
11719 }
11720}
11721
11722
11723/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11724FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11725{
11726 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11727
11728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11729 if (IEM_IS_MODRM_REG_MODE(bRm))
11730 {
11731 /*
11732 * XMM, XMM.
11733 */
11734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11735 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11737 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11738 IEM_MC_LOCAL(X86XMMREG, Dst);
11739 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11740 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11741 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11742 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11743 IEM_MC_PREPARE_SSE_USAGE();
11744 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11745 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11746 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11747 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11748
11749 IEM_MC_ADVANCE_RIP_AND_FINISH();
11750 IEM_MC_END();
11751 }
11752 else
11753 {
11754 /*
11755 * XMM, [mem128].
11756 */
11757 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11758 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11759 IEM_MC_LOCAL(X86XMMREG, Dst);
11760 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11761 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11763
11764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11765 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11766 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11768 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11769 IEM_MC_PREPARE_SSE_USAGE();
11770
11771 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11772 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11773 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11774 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11775
11776 IEM_MC_ADVANCE_RIP_AND_FINISH();
11777 IEM_MC_END();
11778 }
11779}
11780
11781
11782/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11783FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11784{
11785 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11786
11787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11788 if (IEM_IS_MODRM_REG_MODE(bRm))
11789 {
11790 /*
11791 * XMM, XMM.
11792 */
11793 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11794 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11796 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11797 IEM_MC_LOCAL(X86XMMREG, Dst);
11798 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11799 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11800 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11802 IEM_MC_PREPARE_SSE_USAGE();
11803 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11804 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11805 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11806 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11807
11808 IEM_MC_ADVANCE_RIP_AND_FINISH();
11809 IEM_MC_END();
11810 }
11811 else
11812 {
11813 /*
11814 * XMM, [mem128].
11815 */
11816 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11817 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11818 IEM_MC_LOCAL(X86XMMREG, Dst);
11819 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11820 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11822
11823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11824 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11825 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11827 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11828 IEM_MC_PREPARE_SSE_USAGE();
11829
11830 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11831 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11833 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11834
11835 IEM_MC_ADVANCE_RIP_AND_FINISH();
11836 IEM_MC_END();
11837 }
11838}
11839
11840
11841/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11842FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11843{
11844 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11845
11846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11847 if (IEM_IS_MODRM_REG_MODE(bRm))
11848 {
11849 /*
11850 * XMM32, XMM32.
11851 */
11852 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11853 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11855 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11856 IEM_MC_LOCAL(X86XMMREG, Dst);
11857 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11858 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11859 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11860 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11861 IEM_MC_PREPARE_SSE_USAGE();
11862 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11863 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11864 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11865 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11866
11867 IEM_MC_ADVANCE_RIP_AND_FINISH();
11868 IEM_MC_END();
11869 }
11870 else
11871 {
11872 /*
11873 * XMM32, [mem32].
11874 */
11875 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11876 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11877 IEM_MC_LOCAL(X86XMMREG, Dst);
11878 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11879 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11881
11882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11883 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11884 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11886 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11887 IEM_MC_PREPARE_SSE_USAGE();
11888
11889 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11890 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11891 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11892 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11893 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11894
11895 IEM_MC_ADVANCE_RIP_AND_FINISH();
11896 IEM_MC_END();
11897 }
11898}
11899
11900
11901/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11902FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11903{
11904 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11905
11906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11907 if (IEM_IS_MODRM_REG_MODE(bRm))
11908 {
11909 /*
11910 * XMM64, XMM64.
11911 */
11912 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11913 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11915 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11916 IEM_MC_LOCAL(X86XMMREG, Dst);
11917 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11918 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11919 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11920 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11921 IEM_MC_PREPARE_SSE_USAGE();
11922 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11923 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11924 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11925 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11926
11927 IEM_MC_ADVANCE_RIP_AND_FINISH();
11928 IEM_MC_END();
11929 }
11930 else
11931 {
11932 /*
11933 * XMM64, [mem64].
11934 */
11935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11936 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11937 IEM_MC_LOCAL(X86XMMREG, Dst);
11938 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11939 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11941
11942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11943 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11944 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11947 IEM_MC_PREPARE_SSE_USAGE();
11948
11949 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11950 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11951 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11952 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11953 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11954
11955 IEM_MC_ADVANCE_RIP_AND_FINISH();
11956 IEM_MC_END();
11957 }
11958}
11959
11960
11961/** Opcode 0x0f 0xc3. */
11962FNIEMOP_DEF(iemOp_movnti_My_Gy)
11963{
11964 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11965
11966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11967
11968 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11969 if (IEM_IS_MODRM_MEM_MODE(bRm))
11970 {
11971 switch (pVCpu->iem.s.enmEffOpSize)
11972 {
11973 case IEMMODE_32BIT:
11974 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11975 IEM_MC_LOCAL(uint32_t, u32Value);
11976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11977
11978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11980
11981 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11982 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11983 IEM_MC_ADVANCE_RIP_AND_FINISH();
11984 IEM_MC_END();
11985 break;
11986
11987 case IEMMODE_64BIT:
11988 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11989 IEM_MC_LOCAL(uint64_t, u64Value);
11990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11991
11992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11994
11995 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11996 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11997 IEM_MC_ADVANCE_RIP_AND_FINISH();
11998 IEM_MC_END();
11999 break;
12000
12001 case IEMMODE_16BIT:
12002 /** @todo check this form. */
12003 IEMOP_RAISE_INVALID_OPCODE_RET();
12004
12005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12006 }
12007 }
12008 else
12009 IEMOP_RAISE_INVALID_OPCODE_RET();
12010}
12011
12012
12013/* Opcode 0x66 0x0f 0xc3 - invalid */
12014/* Opcode 0xf3 0x0f 0xc3 - invalid */
12015/* Opcode 0xf2 0x0f 0xc3 - invalid */
12016
12017
12018/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12019FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12020{
12021 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12023 if (IEM_IS_MODRM_REG_MODE(bRm))
12024 {
12025 /*
12026 * Register, register.
12027 */
12028 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12029 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12030 IEM_MC_LOCAL(uint16_t, uValue);
12031
12032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12033 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12034 IEM_MC_PREPARE_FPU_USAGE();
12035 IEM_MC_FPU_TO_MMX_MODE();
12036
12037 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12038 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12039
12040 IEM_MC_ADVANCE_RIP_AND_FINISH();
12041 IEM_MC_END();
12042 }
12043 else
12044 {
12045 /*
12046 * Register, memory.
12047 */
12048 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12050 IEM_MC_LOCAL(uint16_t, uValue);
12051
12052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12053 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12055 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12056 IEM_MC_PREPARE_FPU_USAGE();
12057
12058 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12059 IEM_MC_FPU_TO_MMX_MODE();
12060 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12061
12062 IEM_MC_ADVANCE_RIP_AND_FINISH();
12063 IEM_MC_END();
12064 }
12065}
12066
12067
12068/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12069FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12070{
12071 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12073 if (IEM_IS_MODRM_REG_MODE(bRm))
12074 {
12075 /*
12076 * Register, register.
12077 */
12078 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12079 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12081
12082 IEM_MC_LOCAL(uint16_t, uValue);
12083 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12084 IEM_MC_PREPARE_SSE_USAGE();
12085
12086 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12087 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12088 IEM_MC_ADVANCE_RIP_AND_FINISH();
12089 IEM_MC_END();
12090 }
12091 else
12092 {
12093 /*
12094 * Register, memory.
12095 */
12096 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12098 IEM_MC_LOCAL(uint16_t, uValue);
12099
12100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12101 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12103 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12104 IEM_MC_PREPARE_SSE_USAGE();
12105
12106 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12107 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12108 IEM_MC_ADVANCE_RIP_AND_FINISH();
12109 IEM_MC_END();
12110 }
12111}
12112
12113
12114/* Opcode 0xf3 0x0f 0xc4 - invalid */
12115/* Opcode 0xf2 0x0f 0xc4 - invalid */
12116
12117
12118/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12119FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12120{
12121 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12123 if (IEM_IS_MODRM_REG_MODE(bRm))
12124 {
12125 /*
12126 * Greg32, MMX, imm8.
12127 */
12128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12129 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12131 IEM_MC_LOCAL(uint16_t, uValue);
12132 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12133 IEM_MC_PREPARE_FPU_USAGE();
12134 IEM_MC_FPU_TO_MMX_MODE();
12135 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12136 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12137 IEM_MC_ADVANCE_RIP_AND_FINISH();
12138 IEM_MC_END();
12139 }
12140 /* No memory operand. */
12141 else
12142 IEMOP_RAISE_INVALID_OPCODE_RET();
12143}
12144
12145
12146/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12147FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12148{
12149 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12151 if (IEM_IS_MODRM_REG_MODE(bRm))
12152 {
12153 /*
12154 * Greg32, XMM, imm8.
12155 */
12156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12157 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12159 IEM_MC_LOCAL(uint16_t, uValue);
12160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12161 IEM_MC_PREPARE_SSE_USAGE();
12162 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12163 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12164 IEM_MC_ADVANCE_RIP_AND_FINISH();
12165 IEM_MC_END();
12166 }
12167 /* No memory operand. */
12168 else
12169 IEMOP_RAISE_INVALID_OPCODE_RET();
12170}
12171
12172
12173/* Opcode 0xf3 0x0f 0xc5 - invalid */
12174/* Opcode 0xf2 0x0f 0xc5 - invalid */
12175
12176
12177/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12178FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12179{
12180 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12182 if (IEM_IS_MODRM_REG_MODE(bRm))
12183 {
12184 /*
12185 * XMM, XMM, imm8.
12186 */
12187 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12188 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12190 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12191 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12192 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12193 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12194 IEM_MC_PREPARE_SSE_USAGE();
12195 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12196 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12198 IEM_MC_ADVANCE_RIP_AND_FINISH();
12199 IEM_MC_END();
12200 }
12201 else
12202 {
12203 /*
12204 * XMM, [mem128], imm8.
12205 */
12206 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12207 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12208 IEM_MC_LOCAL(RTUINT128U, uSrc);
12209 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12211
12212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12213 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12214 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12216 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12217 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12218
12219 IEM_MC_PREPARE_SSE_USAGE();
12220 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12222
12223 IEM_MC_ADVANCE_RIP_AND_FINISH();
12224 IEM_MC_END();
12225 }
12226}
12227
12228
12229/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12230FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12231{
12232 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12234 if (IEM_IS_MODRM_REG_MODE(bRm))
12235 {
12236 /*
12237 * XMM, XMM, imm8.
12238 */
12239 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12240 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12244 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12245 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12246 IEM_MC_PREPARE_SSE_USAGE();
12247 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12248 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12249 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12250 IEM_MC_ADVANCE_RIP_AND_FINISH();
12251 IEM_MC_END();
12252 }
12253 else
12254 {
12255 /*
12256 * XMM, [mem128], imm8.
12257 */
12258 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12259 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12260 IEM_MC_LOCAL(RTUINT128U, uSrc);
12261 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12263
12264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12265 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12266 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12268 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12269 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12270
12271 IEM_MC_PREPARE_SSE_USAGE();
12272 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12273 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12274
12275 IEM_MC_ADVANCE_RIP_AND_FINISH();
12276 IEM_MC_END();
12277 }
12278}
12279
12280
12281/* Opcode 0xf3 0x0f 0xc6 - invalid */
12282/* Opcode 0xf2 0x0f 0xc6 - invalid */
12283
12284
12285/**
12286 * @opmaps grp9
12287 * @opcode /1
12288 * @opcodesub !11 mr/reg rex.w=0
12289 * @oppfx n/a
12290 * @opflmodify zf
12291 */
12292FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12293{
12294 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12295#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12296 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12299 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12300 \
12301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12302 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12303 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12304 \
12305 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12306 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12307 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12308 \
12309 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12310 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12311 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12312 \
12313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12314 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12315 \
12316 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12317 IEM_MC_COMMIT_EFLAGS(EFlags); \
12318 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12319 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12320 } IEM_MC_ENDIF(); \
12321 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12322 \
12323 IEM_MC_END()
12324 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12325 {
12326 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12327 }
12328 else
12329 {
12330 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12331 }
12332}
12333
12334
12335/**
12336 * @opmaps grp9
12337 * @opcode /1
12338 * @opcodesub !11 mr/reg rex.w=1
12339 * @oppfx n/a
12340 * @opflmodify zf
12341 */
12342FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12343{
12344 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12345 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12346 {
12347 /*
12348 * This is hairy, very hairy macro fun. We're walking a fine line
12349 * here to make the code parsable by IEMAllInstPython.py and fit into
12350 * the patterns IEMAllThrdPython.py requires for the code morphing.
12351 */
12352#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12353 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12356 IEMOP_HLP_DONE_DECODING(); \
12357 \
12358 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12359 bUnmapInfoStmt; \
12360 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12361 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12362 \
12363 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12364 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12365 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12366 \
12367 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12368 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12369 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12370 \
12371 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12372
12373#define BODY_CMPXCHG16B_TAIL(a_Type) \
12374 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12375 IEM_MC_COMMIT_EFLAGS(EFlags); \
12376 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12377 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12378 } IEM_MC_ENDIF(); \
12379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12380 IEM_MC_END()
12381
12382#ifdef RT_ARCH_AMD64
12383 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12384 {
12385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12386 {
12387 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12389 BODY_CMPXCHG16B_TAIL(RW);
12390 }
12391 else
12392 {
12393 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12395 BODY_CMPXCHG16B_TAIL(ATOMIC);
12396 }
12397 }
12398 else
12399 { /* (see comments in #else case below) */
12400 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12401 {
12402 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12404 BODY_CMPXCHG16B_TAIL(RW);
12405 }
12406 else
12407 {
12408 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12409 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12410 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12411 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12412 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12413 pEFlags, bUnmapInfo);
12414 IEM_MC_END();
12415 }
12416 }
12417
12418#elif defined(RT_ARCH_ARM64)
12419 /** @todo may require fallback for unaligned accesses... */
12420 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12421 {
12422 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12423 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12424 BODY_CMPXCHG16B_TAIL(RW);
12425 }
12426 else
12427 {
12428 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12429 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12430 BODY_CMPXCHG16B_TAIL(ATOMIC);
12431 }
12432
12433#else
12434 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12435 accesses and not all all atomic, which works fine on in UNI CPU guest
12436 configuration (ignoring DMA). If guest SMP is active we have no choice
12437 but to use a rendezvous callback here. Sigh. */
12438 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12439 {
12440 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12442 BODY_CMPXCHG16B_TAIL(RW);
12443 }
12444 else
12445 {
12446 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12447 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12450 iemCImpl_cmpxchg16b_fallback_rendezvous,
12451 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12452 IEM_MC_END();
12453 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12454 }
12455#endif
12456
12457#undef BODY_CMPXCHG16B
12458 }
12459 Log(("cmpxchg16b -> #UD\n"));
12460 IEMOP_RAISE_INVALID_OPCODE_RET();
12461}
12462
12463FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12464{
12465 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12466 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12467 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12468}
12469
12470
12471/** Opcode 0x0f 0xc7 11/6. */
12472FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12473{
12474 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12475 IEMOP_RAISE_INVALID_OPCODE_RET();
12476
12477 if (IEM_IS_MODRM_REG_MODE(bRm))
12478 {
12479 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12481 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12482 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12483 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12484 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12485 iemCImpl_rdrand, iReg, enmEffOpSize);
12486 IEM_MC_END();
12487 }
12488 /* Register only. */
12489 else
12490 IEMOP_RAISE_INVALID_OPCODE_RET();
12491}
12492
12493/** Opcode 0x0f 0xc7 !11/6. */
12494#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12495FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12496{
12497 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12498 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12499 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12500 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12501 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12503 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12504 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12505 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12506 IEM_MC_END();
12507}
12508#else
12509FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12510#endif
12511
12512/** Opcode 0x66 0x0f 0xc7 !11/6. */
12513#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12514FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12515{
12516 IEMOP_MNEMONIC(vmclear, "vmclear");
12517 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12518 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12519 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12520 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12522 IEMOP_HLP_DONE_DECODING();
12523 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12524 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12525 IEM_MC_END();
12526}
12527#else
12528FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12529#endif
12530
12531/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12532#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12533FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12534{
12535 IEMOP_MNEMONIC(vmxon, "vmxon");
12536 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12537 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12538 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12540 IEMOP_HLP_DONE_DECODING();
12541 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12542 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12543 IEM_MC_END();
12544}
12545#else
12546FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12547#endif
12548
12549/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12550#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12551FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12552{
12553 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12554 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12555 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12557 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12559 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12560 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12561 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12562 IEM_MC_END();
12563}
12564#else
12565FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12566#endif
12567
12568/** Opcode 0x0f 0xc7 11/7. */
12569FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12570{
12571 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12572 IEMOP_RAISE_INVALID_OPCODE_RET();
12573
12574 if (IEM_IS_MODRM_REG_MODE(bRm))
12575 {
12576 /* register destination. */
12577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12579 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12580 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12581 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12582 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12583 iemCImpl_rdseed, iReg, enmEffOpSize);
12584 IEM_MC_END();
12585 }
12586 /* Register only. */
12587 else
12588 IEMOP_RAISE_INVALID_OPCODE_RET();
12589}
12590
12591/**
12592 * Group 9 jump table for register variant.
12593 */
12594IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12595{ /* pfx: none, 066h, 0f3h, 0f2h */
12596 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12597 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12598 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12599 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12600 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12601 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12602 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12603 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12604};
12605AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12606
12607
12608/**
12609 * Group 9 jump table for memory variant.
12610 */
12611IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12612{ /* pfx: none, 066h, 0f3h, 0f2h */
12613 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12614 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12615 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12616 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12617 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12618 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12619 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12620 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12621};
12622AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12623
12624
12625/** Opcode 0x0f 0xc7. */
12626FNIEMOP_DEF(iemOp_Grp9)
12627{
12628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12629 if (IEM_IS_MODRM_REG_MODE(bRm))
12630 /* register, register */
12631 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12632 + pVCpu->iem.s.idxPrefix], bRm);
12633 /* memory, register */
12634 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12635 + pVCpu->iem.s.idxPrefix], bRm);
12636}
12637
12638
12639/**
12640 * Common 'bswap register' helper.
12641 */
12642FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12643{
12644 switch (pVCpu->iem.s.enmEffOpSize)
12645 {
12646 case IEMMODE_16BIT:
12647 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12650 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12651 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12652 IEM_MC_ADVANCE_RIP_AND_FINISH();
12653 IEM_MC_END();
12654 break;
12655
12656 case IEMMODE_32BIT:
12657 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12660 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12661 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12662 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12663 IEM_MC_ADVANCE_RIP_AND_FINISH();
12664 IEM_MC_END();
12665 break;
12666
12667 case IEMMODE_64BIT:
12668 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12670 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12671 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12672 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12673 IEM_MC_ADVANCE_RIP_AND_FINISH();
12674 IEM_MC_END();
12675 break;
12676
12677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12678 }
12679}
12680
12681
12682/** Opcode 0x0f 0xc8. */
12683FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12684{
12685 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12686 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12687 prefix. REX.B is the correct prefix it appears. For a parallel
12688 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12689 IEMOP_HLP_MIN_486();
12690 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12691}
12692
12693
12694/** Opcode 0x0f 0xc9. */
12695FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12696{
12697 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12698 IEMOP_HLP_MIN_486();
12699 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12700}
12701
12702
12703/** Opcode 0x0f 0xca. */
12704FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12705{
12706 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12707 IEMOP_HLP_MIN_486();
12708 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12709}
12710
12711
12712/** Opcode 0x0f 0xcb. */
12713FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12714{
12715 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12716 IEMOP_HLP_MIN_486();
12717 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12718}
12719
12720
12721/** Opcode 0x0f 0xcc. */
12722FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12723{
12724 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12725 IEMOP_HLP_MIN_486();
12726 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12727}
12728
12729
12730/** Opcode 0x0f 0xcd. */
12731FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12732{
12733 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12734 IEMOP_HLP_MIN_486();
12735 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12736}
12737
12738
12739/** Opcode 0x0f 0xce. */
12740FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12741{
12742 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12743 IEMOP_HLP_MIN_486();
12744 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12745}
12746
12747
12748/** Opcode 0x0f 0xcf. */
12749FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12750{
12751 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12752 IEMOP_HLP_MIN_486();
12753 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12754}
12755
12756
12757/* Opcode 0x0f 0xd0 - invalid */
12758
12759
12760/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12761FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12762{
12763 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12764 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12765}
12766
12767
12768/* Opcode 0xf3 0x0f 0xd0 - invalid */
12769
12770
12771/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12772FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12773{
12774 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12775 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12776}
12777
12778
12779
12780/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12781FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12782{
12783 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12784 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12785}
12786
12787/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12788FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12789{
12790 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12791 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12792}
12793
12794/* Opcode 0xf3 0x0f 0xd1 - invalid */
12795/* Opcode 0xf2 0x0f 0xd1 - invalid */
12796
12797/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12798FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12799{
12800 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12801 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12802}
12803
12804
12805/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12806FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12807{
12808 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12809 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12810}
12811
12812
12813/* Opcode 0xf3 0x0f 0xd2 - invalid */
12814/* Opcode 0xf2 0x0f 0xd2 - invalid */
12815
12816/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12817FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12818{
12819 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12820 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12821}
12822
12823
12824/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12825FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12826{
12827 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12828 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12829}
12830
12831
12832/* Opcode 0xf3 0x0f 0xd3 - invalid */
12833/* Opcode 0xf2 0x0f 0xd3 - invalid */
12834
12835
12836/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12837FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12838{
12839 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12840 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12841}
12842
12843
12844/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12845FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12846{
12847 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12848 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12849}
12850
12851
12852/* Opcode 0xf3 0x0f 0xd4 - invalid */
12853/* Opcode 0xf2 0x0f 0xd4 - invalid */
12854
12855/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12856FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12857{
12858 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12859 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12860}
12861
12862/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12863FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12864{
12865 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12866 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmullw_u128);
12867}
12868
12869
12870/* Opcode 0xf3 0x0f 0xd5 - invalid */
12871/* Opcode 0xf2 0x0f 0xd5 - invalid */
12872
12873/* Opcode 0x0f 0xd6 - invalid */
12874
12875/**
12876 * @opcode 0xd6
12877 * @oppfx 0x66
12878 * @opcpuid sse2
12879 * @opgroup og_sse2_pcksclr_datamove
12880 * @opxcpttype none
12881 * @optest op1=-1 op2=2 -> op1=2
12882 * @optest op1=0 op2=-42 -> op1=-42
12883 */
12884FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12885{
12886 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12888 if (IEM_IS_MODRM_REG_MODE(bRm))
12889 {
12890 /*
12891 * Register, register.
12892 */
12893 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12895 IEM_MC_LOCAL(uint64_t, uSrc);
12896
12897 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12898 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12899
12900 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12901 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12902
12903 IEM_MC_ADVANCE_RIP_AND_FINISH();
12904 IEM_MC_END();
12905 }
12906 else
12907 {
12908 /*
12909 * Memory, register.
12910 */
12911 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12912 IEM_MC_LOCAL(uint64_t, uSrc);
12913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12914
12915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12917 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12918 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12919
12920 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12921 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12922
12923 IEM_MC_ADVANCE_RIP_AND_FINISH();
12924 IEM_MC_END();
12925 }
12926}
12927
12928
12929/**
12930 * @opcode 0xd6
12931 * @opcodesub 11 mr/reg
12932 * @oppfx f3
12933 * @opcpuid sse2
12934 * @opgroup og_sse2_simdint_datamove
12935 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12936 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12937 */
12938FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12939{
12940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12941 if (IEM_IS_MODRM_REG_MODE(bRm))
12942 {
12943 /*
12944 * Register, register.
12945 */
12946 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12947 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12949 IEM_MC_LOCAL(uint64_t, uSrc);
12950
12951 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12952 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12953 IEM_MC_FPU_TO_MMX_MODE();
12954
12955 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12956 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12957
12958 IEM_MC_ADVANCE_RIP_AND_FINISH();
12959 IEM_MC_END();
12960 }
12961
12962 /**
12963 * @opdone
12964 * @opmnemonic udf30fd6mem
12965 * @opcode 0xd6
12966 * @opcodesub !11 mr/reg
12967 * @oppfx f3
12968 * @opunused intel-modrm
12969 * @opcpuid sse
12970 * @optest ->
12971 */
12972 else
12973 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12974}
12975
12976
12977/**
12978 * @opcode 0xd6
12979 * @opcodesub 11 mr/reg
12980 * @oppfx f2
12981 * @opcpuid sse2
12982 * @opgroup og_sse2_simdint_datamove
12983 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12984 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12985 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12986 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12987 * @optest op1=-42 op2=0xfedcba9876543210
12988 * -> op1=0xfedcba9876543210 ftw=0xff
12989 */
12990FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12991{
12992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12993 if (IEM_IS_MODRM_REG_MODE(bRm))
12994 {
12995 /*
12996 * Register, register.
12997 */
12998 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12999 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13001 IEM_MC_LOCAL(uint64_t, uSrc);
13002
13003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13004 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13005 IEM_MC_FPU_TO_MMX_MODE();
13006
13007 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13008 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13009
13010 IEM_MC_ADVANCE_RIP_AND_FINISH();
13011 IEM_MC_END();
13012 }
13013
13014 /**
13015 * @opdone
13016 * @opmnemonic udf20fd6mem
13017 * @opcode 0xd6
13018 * @opcodesub !11 mr/reg
13019 * @oppfx f2
13020 * @opunused intel-modrm
13021 * @opcpuid sse
13022 * @optest ->
13023 */
13024 else
13025 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13026}
13027
13028
13029/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13030FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13031{
13032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13033 /* Docs says register only. */
13034 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13035 {
13036 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13037 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13040 IEM_MC_ARG(uint64_t *, puDst, 0);
13041 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13042 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13043 IEM_MC_PREPARE_FPU_USAGE();
13044 IEM_MC_FPU_TO_MMX_MODE();
13045
13046 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13047 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13048 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13049
13050 IEM_MC_ADVANCE_RIP_AND_FINISH();
13051 IEM_MC_END();
13052 }
13053 else
13054 IEMOP_RAISE_INVALID_OPCODE_RET();
13055}
13056
13057
13058/** Opcode 0x66 0x0f 0xd7 - */
13059FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13060{
13061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13062 /* Docs says register only. */
13063 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13064 {
13065 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13066 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13067 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13070 IEM_MC_PREPARE_SSE_USAGE();
13071 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13072 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13073 } IEM_MC_NATIVE_ELSE() {
13074 IEM_MC_ARG(uint64_t *, puDst, 0);
13075 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13076 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13077 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13078 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13079 } IEM_MC_NATIVE_ENDIF();
13080 IEM_MC_ADVANCE_RIP_AND_FINISH();
13081 IEM_MC_END();
13082 }
13083 else
13084 IEMOP_RAISE_INVALID_OPCODE_RET();
13085}
13086
13087
13088/* Opcode 0xf3 0x0f 0xd7 - invalid */
13089/* Opcode 0xf2 0x0f 0xd7 - invalid */
13090
13091
13092/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13093FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13094{
13095 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13096 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13097}
13098
13099
13100/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13101FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13102{
13103 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13104 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13105}
13106
13107
13108/* Opcode 0xf3 0x0f 0xd8 - invalid */
13109/* Opcode 0xf2 0x0f 0xd8 - invalid */
13110
13111/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13112FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13113{
13114 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13115 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13116}
13117
13118
13119/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13120FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13121{
13122 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13123 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13124}
13125
13126
13127/* Opcode 0xf3 0x0f 0xd9 - invalid */
13128/* Opcode 0xf2 0x0f 0xd9 - invalid */
13129
13130/** Opcode 0x0f 0xda - pminub Pq, Qq */
13131FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13132{
13133 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13134 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13135}
13136
13137
13138/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13139FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13140{
13141 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13142 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13143}
13144
13145/* Opcode 0xf3 0x0f 0xda - invalid */
13146/* Opcode 0xf2 0x0f 0xda - invalid */
13147
13148/** Opcode 0x0f 0xdb - pand Pq, Qq */
13149FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13150{
13151 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13152 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13153}
13154
13155
13156/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13157FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13158{
13159 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13160 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13161}
13162
13163
13164/* Opcode 0xf3 0x0f 0xdb - invalid */
13165/* Opcode 0xf2 0x0f 0xdb - invalid */
13166
13167/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13168FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13169{
13170 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13171 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13172}
13173
13174
13175/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13176FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13177{
13178 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13179 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusb_u128);
13180}
13181
13182
13183/* Opcode 0xf3 0x0f 0xdc - invalid */
13184/* Opcode 0xf2 0x0f 0xdc - invalid */
13185
13186/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13187FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13188{
13189 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13190 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13191}
13192
13193
13194/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13195FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13196{
13197 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13198 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusw_u128);
13199}
13200
13201
13202/* Opcode 0xf3 0x0f 0xdd - invalid */
13203/* Opcode 0xf2 0x0f 0xdd - invalid */
13204
13205/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13206FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13207{
13208 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13209 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13210}
13211
13212
13213/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13214FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13215{
13216 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13217 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13218}
13219
13220/* Opcode 0xf3 0x0f 0xde - invalid */
13221/* Opcode 0xf2 0x0f 0xde - invalid */
13222
13223
13224/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13225FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13226{
13227 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13228 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13229}
13230
13231
13232/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13233FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13234{
13235 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13236 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13237}
13238
13239
13240/* Opcode 0xf3 0x0f 0xdf - invalid */
13241/* Opcode 0xf2 0x0f 0xdf - invalid */
13242
13243/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13244FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13245{
13246 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13247 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13248}
13249
13250
13251/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13252FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13253{
13254 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13255 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13256}
13257
13258
13259/* Opcode 0xf3 0x0f 0xe0 - invalid */
13260/* Opcode 0xf2 0x0f 0xe0 - invalid */
13261
13262/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13263FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13264{
13265 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13266 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13267}
13268
13269
13270/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13271FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13272{
13273 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13274 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13275}
13276
13277
13278/* Opcode 0xf3 0x0f 0xe1 - invalid */
13279/* Opcode 0xf2 0x0f 0xe1 - invalid */
13280
13281/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13282FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13283{
13284 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13285 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13286}
13287
13288
13289/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13290FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13291{
13292 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13293 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13294}
13295
13296
13297/* Opcode 0xf3 0x0f 0xe2 - invalid */
13298/* Opcode 0xf2 0x0f 0xe2 - invalid */
13299
13300/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13301FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13302{
13303 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13304 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13305}
13306
13307
13308/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13309FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13310{
13311 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13312 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13313}
13314
13315
13316/* Opcode 0xf3 0x0f 0xe3 - invalid */
13317/* Opcode 0xf2 0x0f 0xe3 - invalid */
13318
13319/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13320FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13321{
13322 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13323 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13324}
13325
13326
13327/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13328FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13329{
13330 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13331 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13332}
13333
13334
13335/* Opcode 0xf3 0x0f 0xe4 - invalid */
13336/* Opcode 0xf2 0x0f 0xe4 - invalid */
13337
13338/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13339FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13340{
13341 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13342 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13343}
13344
13345
13346/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13347FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13348{
13349 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13350 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13351}
13352
13353
13354/* Opcode 0xf3 0x0f 0xe5 - invalid */
13355/* Opcode 0xf2 0x0f 0xe5 - invalid */
13356/* Opcode 0x0f 0xe6 - invalid */
13357
13358
13359/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13360FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13361{
13362 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13363 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13364}
13365
13366
13367/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13368FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13369{
13370 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13371 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13372}
13373
13374
13375/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13376FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13377{
13378 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13379 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13380}
13381
13382
13383/**
13384 * @opcode 0xe7
13385 * @opcodesub !11 mr/reg
13386 * @oppfx none
13387 * @opcpuid sse
13388 * @opgroup og_sse1_cachect
13389 * @opxcpttype none
13390 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13391 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13392 */
13393FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13394{
13395 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13397 if (IEM_IS_MODRM_MEM_MODE(bRm))
13398 {
13399 /* Register, memory. */
13400 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13401 IEM_MC_LOCAL(uint64_t, uSrc);
13402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13403
13404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13406 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13407 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13408 IEM_MC_FPU_TO_MMX_MODE();
13409
13410 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13411 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13412
13413 IEM_MC_ADVANCE_RIP_AND_FINISH();
13414 IEM_MC_END();
13415 }
13416 /**
13417 * @opdone
13418 * @opmnemonic ud0fe7reg
13419 * @opcode 0xe7
13420 * @opcodesub 11 mr/reg
13421 * @oppfx none
13422 * @opunused immediate
13423 * @opcpuid sse
13424 * @optest ->
13425 */
13426 else
13427 IEMOP_RAISE_INVALID_OPCODE_RET();
13428}
13429
13430/**
13431 * @opcode 0xe7
13432 * @opcodesub !11 mr/reg
13433 * @oppfx 0x66
13434 * @opcpuid sse2
13435 * @opgroup og_sse2_cachect
13436 * @opxcpttype 1
13437 * @optest op1=-1 op2=2 -> op1=2
13438 * @optest op1=0 op2=-42 -> op1=-42
13439 */
13440FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13441{
13442 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13444 if (IEM_IS_MODRM_MEM_MODE(bRm))
13445 {
13446 /* Register, memory. */
13447 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13448 IEM_MC_LOCAL(RTUINT128U, uSrc);
13449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13450
13451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13455
13456 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13457 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13458
13459 IEM_MC_ADVANCE_RIP_AND_FINISH();
13460 IEM_MC_END();
13461 }
13462
13463 /**
13464 * @opdone
13465 * @opmnemonic ud660fe7reg
13466 * @opcode 0xe7
13467 * @opcodesub 11 mr/reg
13468 * @oppfx 0x66
13469 * @opunused immediate
13470 * @opcpuid sse
13471 * @optest ->
13472 */
13473 else
13474 IEMOP_RAISE_INVALID_OPCODE_RET();
13475}
13476
13477/* Opcode 0xf3 0x0f 0xe7 - invalid */
13478/* Opcode 0xf2 0x0f 0xe7 - invalid */
13479
13480
13481/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13482FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13483{
13484 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13485 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13486}
13487
13488
13489/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13490FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13491{
13492 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13493 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13494}
13495
13496
13497/* Opcode 0xf3 0x0f 0xe8 - invalid */
13498/* Opcode 0xf2 0x0f 0xe8 - invalid */
13499
13500/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13501FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13502{
13503 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13504 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13505}
13506
13507
13508/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13509FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13510{
13511 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13512 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13513}
13514
13515
13516/* Opcode 0xf3 0x0f 0xe9 - invalid */
13517/* Opcode 0xf2 0x0f 0xe9 - invalid */
13518
13519
13520/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13521FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13522{
13523 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13524 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13525}
13526
13527
13528/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13529FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13530{
13531 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13532 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13533}
13534
13535
13536/* Opcode 0xf3 0x0f 0xea - invalid */
13537/* Opcode 0xf2 0x0f 0xea - invalid */
13538
13539
13540/** Opcode 0x0f 0xeb - por Pq, Qq */
13541FNIEMOP_DEF(iemOp_por_Pq_Qq)
13542{
13543 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13544 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13545}
13546
13547
13548/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13549FNIEMOP_DEF(iemOp_por_Vx_Wx)
13550{
13551 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13552 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
13553}
13554
13555
13556/* Opcode 0xf3 0x0f 0xeb - invalid */
13557/* Opcode 0xf2 0x0f 0xeb - invalid */
13558
13559/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13560FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13561{
13562 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13563 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13564}
13565
13566
13567/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13568FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13569{
13570 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13571 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13572}
13573
13574
13575/* Opcode 0xf3 0x0f 0xec - invalid */
13576/* Opcode 0xf2 0x0f 0xec - invalid */
13577
13578/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13579FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13580{
13581 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13582 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13583}
13584
13585
13586/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13587FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13588{
13589 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13590 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13591}
13592
13593
13594/* Opcode 0xf3 0x0f 0xed - invalid */
13595/* Opcode 0xf2 0x0f 0xed - invalid */
13596
13597
13598/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13599FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13600{
13601 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13602 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13603}
13604
13605
13606/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13607FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13608{
13609 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13610 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13611}
13612
13613
13614/* Opcode 0xf3 0x0f 0xee - invalid */
13615/* Opcode 0xf2 0x0f 0xee - invalid */
13616
13617
13618/** Opcode 0x0f 0xef - pxor Pq, Qq */
13619FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13620{
13621 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13622 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13623}
13624
13625
13626/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13627FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13628{
13629 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13630 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13631}
13632
13633
13634/* Opcode 0xf3 0x0f 0xef - invalid */
13635/* Opcode 0xf2 0x0f 0xef - invalid */
13636
13637/* Opcode 0x0f 0xf0 - invalid */
13638/* Opcode 0x66 0x0f 0xf0 - invalid */
13639
13640
13641/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13642FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13643{
13644 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13646 if (IEM_IS_MODRM_REG_MODE(bRm))
13647 {
13648 /*
13649 * Register, register - (not implemented, assuming it raises \#UD).
13650 */
13651 IEMOP_RAISE_INVALID_OPCODE_RET();
13652 }
13653 else
13654 {
13655 /*
13656 * Register, memory.
13657 */
13658 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13659 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13661
13662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13666 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13667 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13668
13669 IEM_MC_ADVANCE_RIP_AND_FINISH();
13670 IEM_MC_END();
13671 }
13672}
13673
13674
13675/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13676FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13677{
13678 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13679 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13680}
13681
13682
13683/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13684FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13685{
13686 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13687 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13688}
13689
13690
13691/* Opcode 0xf2 0x0f 0xf1 - invalid */
13692
13693/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13694FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13695{
13696 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13697 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13698}
13699
13700
13701/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13702FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13703{
13704 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13705 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13706}
13707
13708
13709/* Opcode 0xf2 0x0f 0xf2 - invalid */
13710
13711/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13712FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13713{
13714 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13715 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13716}
13717
13718
13719/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13720FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13721{
13722 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13723 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13724}
13725
13726/* Opcode 0xf2 0x0f 0xf3 - invalid */
13727
13728/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13729FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13730{
13731 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13732 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13733}
13734
13735
13736/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13737FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13738{
13739 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13741}
13742
13743
13744/* Opcode 0xf2 0x0f 0xf4 - invalid */
13745
13746/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13747FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13748{
13749 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13750 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13751}
13752
13753
13754/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13755FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13756{
13757 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13758 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13759}
13760
13761/* Opcode 0xf2 0x0f 0xf5 - invalid */
13762
13763/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13764FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13765{
13766 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13767 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13768}
13769
13770
13771/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13772FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13773{
13774 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13775 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13776}
13777
13778
13779/* Opcode 0xf2 0x0f 0xf6 - invalid */
13780
13781/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13782FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13783{
13784// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13786 if (IEM_IS_MODRM_REG_MODE(bRm))
13787 {
13788 /*
13789 * MMX, MMX, (implicit) [ ER]DI
13790 */
13791 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13793 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13794 IEM_MC_LOCAL( uint64_t, u64Mem);
13795 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13796 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13797 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13798 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13799 IEM_MC_PREPARE_FPU_USAGE();
13800 IEM_MC_FPU_TO_MMX_MODE();
13801
13802 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13803 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13804 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13805 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13806 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13807 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13808
13809 IEM_MC_ADVANCE_RIP_AND_FINISH();
13810 IEM_MC_END();
13811 }
13812 else
13813 {
13814 /* The memory, register encoding is invalid. */
13815 IEMOP_RAISE_INVALID_OPCODE_RET();
13816 }
13817}
13818
13819
13820/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13821FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13822{
13823// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13825 if (IEM_IS_MODRM_REG_MODE(bRm))
13826 {
13827 /*
13828 * XMM, XMM, (implicit) [ ER]DI
13829 */
13830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13832 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13833 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13834 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13835 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13836 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13837 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13838 IEM_MC_PREPARE_SSE_USAGE();
13839
13840 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13841 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13842 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13843 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13844 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13845 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13846
13847 IEM_MC_ADVANCE_RIP_AND_FINISH();
13848 IEM_MC_END();
13849 }
13850 else
13851 {
13852 /* The memory, register encoding is invalid. */
13853 IEMOP_RAISE_INVALID_OPCODE_RET();
13854 }
13855}
13856
13857
13858/* Opcode 0xf2 0x0f 0xf7 - invalid */
13859
13860
13861/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13862FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13863{
13864 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13865 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13866}
13867
13868
13869/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13870FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13871{
13872 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13873 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13874}
13875
13876
13877/* Opcode 0xf2 0x0f 0xf8 - invalid */
13878
13879
13880/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13881FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13882{
13883 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13884 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13885}
13886
13887
13888/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13889FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13890{
13891 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13892 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13893}
13894
13895
13896/* Opcode 0xf2 0x0f 0xf9 - invalid */
13897
13898
13899/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13900FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13901{
13902 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13903 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13904}
13905
13906
13907/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13908FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13909{
13910 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13911 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13912}
13913
13914
13915/* Opcode 0xf2 0x0f 0xfa - invalid */
13916
13917
13918/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13919FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13920{
13921 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13922 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13923}
13924
13925
13926/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13927FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13928{
13929 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13930 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13931}
13932
13933
13934/* Opcode 0xf2 0x0f 0xfb - invalid */
13935
13936
13937/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13938FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13939{
13940 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13941 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13942}
13943
13944
13945/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13946FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13947{
13948 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13949 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13950}
13951
13952
13953/* Opcode 0xf2 0x0f 0xfc - invalid */
13954
13955
13956/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13957FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13958{
13959 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13960 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
13961}
13962
13963
13964/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13965FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13966{
13967 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13968 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13969}
13970
13971
13972/* Opcode 0xf2 0x0f 0xfd - invalid */
13973
13974
13975/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13976FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13977{
13978 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13979 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
13980}
13981
13982
13983/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13984FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13985{
13986 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13987 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13988}
13989
13990
13991/* Opcode 0xf2 0x0f 0xfe - invalid */
13992
13993
13994/** Opcode **** 0x0f 0xff - UD0 */
13995FNIEMOP_DEF(iemOp_ud0)
13996{
13997 IEMOP_MNEMONIC(ud0, "ud0");
13998 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13999 {
14000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14001 if (IEM_IS_MODRM_MEM_MODE(bRm))
14002 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14003 }
14004 IEMOP_HLP_DONE_DECODING();
14005 IEMOP_RAISE_INVALID_OPCODE_RET();
14006}
14007
14008
14009
14010/**
14011 * Two byte opcode map, first byte 0x0f.
14012 *
14013 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14014 * check if it needs updating as well when making changes.
14015 */
14016const PFNIEMOP g_apfnTwoByteMap[] =
14017{
14018 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14019 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14020 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14021 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14022 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14023 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14024 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14025 /* 0x06 */ IEMOP_X4(iemOp_clts),
14026 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14027 /* 0x08 */ IEMOP_X4(iemOp_invd),
14028 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14029 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14030 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14031 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14032 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14033 /* 0x0e */ IEMOP_X4(iemOp_femms),
14034 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14035
14036 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14037 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14038 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14039 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14040 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14041 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14042 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14043 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14044 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14045 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14046 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14047 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14048 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14049 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14050 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14051 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14052
14053 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14054 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14055 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14056 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14057 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14058 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14059 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14060 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14061 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14062 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14063 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14064 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14065 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14066 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14067 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14068 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14069
14070 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14071 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14072 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14073 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14074 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14075 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14076 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14077 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14078 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14079 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14080 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14081 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14082 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14083 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14084 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14085 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14086
14087 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14088 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14089 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14090 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14091 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14092 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14093 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14094 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14095 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14096 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14097 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14098 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14099 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14100 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14101 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14102 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14103
14104 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14106 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14107 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14108 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14113 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14114 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14115 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14116 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14117 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14118 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14119 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14120
14121 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14133 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14134 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14135 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14136 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14137
14138 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14139 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14140 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14141 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14142 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14143 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14144 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14145 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14146
14147 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14148 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14149 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14150 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14151 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14152 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14153 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14154 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14155
14156 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14157 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14158 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14159 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14160 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14161 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14162 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14163 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14164 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14165 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14166 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14167 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14168 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14169 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14170 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14171 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14172
14173 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14174 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14175 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14176 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14177 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14178 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14179 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14180 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14181 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14182 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14183 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14184 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14185 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14186 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14187 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14188 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14189
14190 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14191 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14192 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14193 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14194 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14195 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14196 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14197 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14198 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14199 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14200 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14201 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14202 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14203 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14204 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14205 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14206
14207 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14208 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14209 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14210 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14211 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14212 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14213 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14214 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14215 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14216 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14217 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14218 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14219 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14220 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14221 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14222 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14223
14224 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14225 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14226 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14227 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14228 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14229 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14230 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14231 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14232 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14233 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14234 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14235 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14236 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14237 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14238 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14239 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14240
14241 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14242 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14243 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14244 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14245 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14246 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14247 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14248 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14249 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14250 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14251 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14252 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14253 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14254 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14255 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14256 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14257
14258 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14259 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14260 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14261 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14262 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14263 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14264 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14265 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14266 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14267 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14268 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14269 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14270 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14271 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14272 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14273 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14274
14275 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14276 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14277 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14278 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14279 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14280 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14281 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14282 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14283 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14284 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14285 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14286 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14287 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14288 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14289 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14290 /* 0xff */ IEMOP_X4(iemOp_ud0),
14291};
14292AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14293
14294/** @} */
14295
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use