VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96860

Last change on this file since 96860 was 96852, checked in by vboxsync, 20 months ago

IEM: Rotate the FPU stack when changing the FP TOS. Make sure stack adjustment is done before MMX instructions execute, not after.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 461.5 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96852 2022-09-26 06:06:05Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP();
92 IEM_MC_END();
93 }
94 return VINF_SUCCESS;
95}
96
97
98/**
99 * Common worker for MMX instructions on the form:
100 * pxxx mm1, mm2/mem64
101 *
102 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
103 * no FXSAVE state, just the operands.
104 */
105FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
114 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
116 IEM_MC_BEGIN(2, 0);
117 IEM_MC_ARG(uint64_t *, pDst, 0);
118 IEM_MC_ARG(uint64_t const *, pSrc, 1);
119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
120 IEM_MC_PREPARE_FPU_USAGE();
121 IEM_MC_FPU_TO_MMX_MODE();
122
123 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
124 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
125 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
126 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 }
131 else
132 {
133 /*
134 * Register, memory.
135 */
136 IEM_MC_BEGIN(2, 2);
137 IEM_MC_ARG(uint64_t *, pDst, 0);
138 IEM_MC_LOCAL(uint64_t, uSrc);
139 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
141
142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
146
147 IEM_MC_PREPARE_FPU_USAGE();
148 IEM_MC_FPU_TO_MMX_MODE();
149
150 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
151 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
152 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
153
154 IEM_MC_ADVANCE_RIP();
155 IEM_MC_END();
156 }
157 return VINF_SUCCESS;
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * for instructions introduced with SSE.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * Register, register.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
177 IEM_MC_BEGIN(2, 0);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * Register, memory.
196 */
197 IEM_MC_BEGIN(2, 2);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/**
223 * Common worker for MMX instructions on the form:
224 * pxxx mm1, mm2/mem64
225 * for instructions introduced with SSE.
226 *
227 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
228 * no FXSAVE state, just the operands.
229 */
230FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
231{
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
233 if (IEM_IS_MODRM_REG_MODE(bRm))
234 {
235 /*
236 * Register, register.
237 */
238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
241 IEM_MC_BEGIN(2, 0);
242 IEM_MC_ARG(uint64_t *, pDst, 0);
243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
245 IEM_MC_PREPARE_FPU_USAGE();
246 IEM_MC_FPU_TO_MMX_MODE();
247
248 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
249 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
250 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 else
257 {
258 /*
259 * Register, memory.
260 */
261 IEM_MC_BEGIN(2, 2);
262 IEM_MC_ARG(uint64_t *, pDst, 0);
263 IEM_MC_LOCAL(uint64_t, uSrc);
264 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
266
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
270 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
271
272 IEM_MC_PREPARE_FPU_USAGE();
273 IEM_MC_FPU_TO_MMX_MODE();
274
275 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
276 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
277 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for MMX instructions on the form:
288 * pxxx mm1, mm2/mem64
289 * that was introduced with SSE2.
290 */
291FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
300 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_BEGIN(2, 0);
303 IEM_MC_ARG(uint64_t *, pDst, 0);
304 IEM_MC_ARG(uint64_t const *, pSrc, 1);
305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
306 IEM_MC_PREPARE_FPU_USAGE();
307 IEM_MC_FPU_TO_MMX_MODE();
308
309 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
310 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313
314 IEM_MC_ADVANCE_RIP();
315 IEM_MC_END();
316 }
317 else
318 {
319 /*
320 * Register, memory.
321 */
322 IEM_MC_BEGIN(2, 2);
323 IEM_MC_ARG(uint64_t *, pDst, 0);
324 IEM_MC_LOCAL(uint64_t, uSrc);
325 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
327
328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
331 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
332
333 IEM_MC_PREPARE_FPU_USAGE();
334 IEM_MC_FPU_TO_MMX_MODE();
335
336 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
337 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
338 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
339
340 IEM_MC_ADVANCE_RIP();
341 IEM_MC_END();
342 }
343 return VINF_SUCCESS;
344}
345
346
347/**
348 * Common worker for SSE2 instructions on the forms:
349 * pxxx xmm1, xmm2/mem128
350 *
351 * Proper alignment of the 128-bit operand is enforced.
352 * Exceptions type 4. SSE2 cpuid checks.
353 *
354 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
355 */
356FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
357{
358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
359 if (IEM_IS_MODRM_REG_MODE(bRm))
360 {
361 /*
362 * Register, register.
363 */
364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
365 IEM_MC_BEGIN(2, 0);
366 IEM_MC_ARG(PRTUINT128U, pDst, 0);
367 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
369 IEM_MC_PREPARE_SSE_USAGE();
370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
371 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
372 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 }
376 else
377 {
378 /*
379 * Register, memory.
380 */
381 IEM_MC_BEGIN(2, 2);
382 IEM_MC_ARG(PRTUINT128U, pDst, 0);
383 IEM_MC_LOCAL(RTUINT128U, uSrc);
384 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
386
387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
391
392 IEM_MC_PREPARE_SSE_USAGE();
393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
394 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
395
396 IEM_MC_ADVANCE_RIP();
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/**
404 * Common worker for SSE2 instructions on the forms:
405 * pxxx xmm1, xmm2/mem128
406 *
407 * Proper alignment of the 128-bit operand is enforced.
408 * Exceptions type 4. SSE2 cpuid checks.
409 *
410 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
411 * no FXSAVE state, just the operands.
412 *
413 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
414 */
415FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
416{
417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
418 if (IEM_IS_MODRM_REG_MODE(bRm))
419 {
420 /*
421 * Register, register.
422 */
423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
424 IEM_MC_BEGIN(2, 0);
425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
426 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
428 IEM_MC_PREPARE_SSE_USAGE();
429 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
430 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
431 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
432 IEM_MC_ADVANCE_RIP();
433 IEM_MC_END();
434 }
435 else
436 {
437 /*
438 * Register, memory.
439 */
440 IEM_MC_BEGIN(2, 2);
441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
442 IEM_MC_LOCAL(RTUINT128U, uSrc);
443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
445
446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
450
451 IEM_MC_PREPARE_SSE_USAGE();
452 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
453 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
454
455 IEM_MC_ADVANCE_RIP();
456 IEM_MC_END();
457 }
458 return VINF_SUCCESS;
459}
460
461
462/**
463 * Common worker for MMX instructions on the forms:
464 * pxxxx mm1, mm2/mem32
465 *
466 * The 2nd operand is the first half of a register, which in the memory case
467 * means a 32-bit memory access.
468 */
469FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
470{
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * Register, register.
476 */
477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
478 IEM_MC_BEGIN(2, 0);
479 IEM_MC_ARG(uint64_t *, puDst, 0);
480 IEM_MC_ARG(uint64_t const *, puSrc, 1);
481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_FPU_TO_MMX_MODE();
484
485 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
486 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
487 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
488 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
489
490 IEM_MC_ADVANCE_RIP();
491 IEM_MC_END();
492 }
493 else
494 {
495 /*
496 * Register, memory.
497 */
498 IEM_MC_BEGIN(2, 2);
499 IEM_MC_ARG(uint64_t *, puDst, 0);
500 IEM_MC_LOCAL(uint64_t, uSrc);
501 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
506 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
507 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
508
509 IEM_MC_PREPARE_FPU_USAGE();
510 IEM_MC_FPU_TO_MMX_MODE();
511
512 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
513 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
514 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
515
516 IEM_MC_ADVANCE_RIP();
517 IEM_MC_END();
518 }
519 return VINF_SUCCESS;
520}
521
522
523/**
524 * Common worker for SSE instructions on the forms:
525 * pxxxx xmm1, xmm2/mem128
526 *
527 * The 2nd operand is the first half of a register, which in the memory case
528 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
529 *
530 * Exceptions type 4.
531 */
532FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
533{
534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
535 if (IEM_IS_MODRM_REG_MODE(bRm))
536 {
537 /*
538 * Register, register.
539 */
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
541 IEM_MC_BEGIN(2, 0);
542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
543 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
548 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 else
553 {
554 /*
555 * Register, memory.
556 */
557 IEM_MC_BEGIN(2, 2);
558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
559 IEM_MC_LOCAL(RTUINT128U, uSrc);
560 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
562
563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
566 /** @todo Most CPUs probably only read the low qword. We read everything to
567 * make sure we apply segmentation and alignment checks correctly.
568 * When we have time, it would be interesting to explore what real
569 * CPUs actually does and whether it will do a TLB load for the high
570 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
571 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
572
573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
574 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
575 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
576
577 IEM_MC_ADVANCE_RIP();
578 IEM_MC_END();
579 }
580 return VINF_SUCCESS;
581}
582
583
584/**
585 * Common worker for SSE2 instructions on the forms:
586 * pxxxx xmm1, xmm2/mem128
587 *
588 * The 2nd operand is the first half of a register, which in the memory case
589 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
590 *
591 * Exceptions type 4.
592 */
593FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
594{
595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
596 if (IEM_IS_MODRM_REG_MODE(bRm))
597 {
598 /*
599 * Register, register.
600 */
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_BEGIN(2, 0);
603 IEM_MC_ARG(PRTUINT128U, puDst, 0);
604 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
609 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 else
614 {
615 /*
616 * Register, memory.
617 */
618 IEM_MC_BEGIN(2, 2);
619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
620 IEM_MC_LOCAL(RTUINT128U, uSrc);
621 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
623
624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
627 /** @todo Most CPUs probably only read the low qword. We read everything to
628 * make sure we apply segmentation and alignment checks correctly.
629 * When we have time, it would be interesting to explore what real
630 * CPUs actually does and whether it will do a TLB load for the high
631 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
632 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
633
634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
636 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
637
638 IEM_MC_ADVANCE_RIP();
639 IEM_MC_END();
640 }
641 return VINF_SUCCESS;
642}
643
644
645/**
646 * Common worker for MMX instructions on the form:
647 * pxxxx mm1, mm2/mem64
648 *
649 * The 2nd operand is the second half of a register, which in the memory case
650 * means a 64-bit memory access for MMX.
651 */
652FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
653{
654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
655 if (IEM_IS_MODRM_REG_MODE(bRm))
656 {
657 /*
658 * Register, register.
659 */
660 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
661 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
663 IEM_MC_BEGIN(2, 0);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_ARG(uint64_t const *, puSrc, 1);
666 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
667 IEM_MC_PREPARE_FPU_USAGE();
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
671 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
672 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
673 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
674
675 IEM_MC_ADVANCE_RIP();
676 IEM_MC_END();
677 }
678 else
679 {
680 /*
681 * Register, memory.
682 */
683 IEM_MC_BEGIN(2, 2);
684 IEM_MC_ARG(uint64_t *, puDst, 0);
685 IEM_MC_LOCAL(uint64_t, uSrc);
686 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
688
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
692 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
693
694 IEM_MC_PREPARE_FPU_USAGE();
695 IEM_MC_FPU_TO_MMX_MODE();
696
697 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
698 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
699 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
700
701 IEM_MC_ADVANCE_RIP();
702 IEM_MC_END();
703 }
704 return VINF_SUCCESS;
705}
706
707
708/**
709 * Common worker for SSE instructions on the form:
710 * pxxxx xmm1, xmm2/mem128
711 *
712 * The 2nd operand is the second half of a register, which for SSE a 128-bit
713 * aligned access where it may read the full 128 bits or only the upper 64 bits.
714 *
715 * Exceptions type 4.
716 */
717FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
718{
719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
720 if (IEM_IS_MODRM_REG_MODE(bRm))
721 {
722 /*
723 * Register, register.
724 */
725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
726 IEM_MC_BEGIN(2, 0);
727 IEM_MC_ARG(PRTUINT128U, puDst, 0);
728 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 IEM_MC_PREPARE_SSE_USAGE();
731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
732 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
733 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
734 IEM_MC_ADVANCE_RIP();
735 IEM_MC_END();
736 }
737 else
738 {
739 /*
740 * Register, memory.
741 */
742 IEM_MC_BEGIN(2, 2);
743 IEM_MC_ARG(PRTUINT128U, puDst, 0);
744 IEM_MC_LOCAL(RTUINT128U, uSrc);
745 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 /** @todo Most CPUs probably only read the high qword. We read everything to
752 * make sure we apply segmentation and alignment checks correctly.
753 * When we have time, it would be interesting to explore what real
754 * CPUs actually does and whether it will do a TLB load for the lower
755 * part or skip any associated \#PF. */
756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
757
758 IEM_MC_PREPARE_SSE_USAGE();
759 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
760 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
761
762 IEM_MC_ADVANCE_RIP();
763 IEM_MC_END();
764 }
765 return VINF_SUCCESS;
766}
767
768
769/**
770 * Common worker for SSE instructions on the forms:
771 * pxxs xmm1, xmm2/mem128
772 *
773 * Proper alignment of the 128-bit operand is enforced.
774 * Exceptions type 2. SSE cpuid checks.
775 *
776 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
777 */
778FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
779{
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
790 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
791 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
793 IEM_MC_PREPARE_SSE_USAGE();
794 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
795 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
797 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
798 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
799
800 IEM_MC_ADVANCE_RIP();
801 IEM_MC_END();
802 }
803 else
804 {
805 /*
806 * Register, memory.
807 */
808 IEM_MC_BEGIN(3, 2);
809 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
810 IEM_MC_LOCAL(X86XMMREG, uSrc2);
811 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
812 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
813 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
815
816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
819 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
820
821 IEM_MC_PREPARE_SSE_USAGE();
822 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
823 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
824 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 return VINF_SUCCESS;
831}
832
833
834/**
835 * Common worker for SSE instructions on the forms:
836 * pxxs xmm1, xmm2/mem32
837 *
838 * Proper alignment of the 128-bit operand is enforced.
839 * Exceptions type 2. SSE cpuid checks.
840 *
841 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
842 */
843FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
844{
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /*
849 * Register, register.
850 */
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_BEGIN(3, 1);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
858 IEM_MC_PREPARE_SSE_USAGE();
859 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
860 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
861 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
862 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
863 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
864
865 IEM_MC_ADVANCE_RIP();
866 IEM_MC_END();
867 }
868 else
869 {
870 /*
871 * Register, memory.
872 */
873 IEM_MC_BEGIN(3, 2);
874 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
875 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
876 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
877 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
878 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
880
881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
884 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
885
886 IEM_MC_PREPARE_SSE_USAGE();
887 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
888 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
889 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
891
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 }
895 return VINF_SUCCESS;
896}
897
898
899/**
900 * Common worker for SSE2 instructions on the forms:
901 * pxxd xmm1, xmm2/mem128
902 *
903 * Proper alignment of the 128-bit operand is enforced.
904 * Exceptions type 2. SSE cpuid checks.
905 *
906 * @sa iemOpCommonSseFp_FullFull_To_Full
907 */
908FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
909{
910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
911 if (IEM_IS_MODRM_REG_MODE(bRm))
912 {
913 /*
914 * Register, register.
915 */
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_BEGIN(3, 1);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
920 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
921 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
923 IEM_MC_PREPARE_SSE_USAGE();
924 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
925 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
926 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
927 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
928 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
929
930 IEM_MC_ADVANCE_RIP();
931 IEM_MC_END();
932 }
933 else
934 {
935 /*
936 * Register, memory.
937 */
938 IEM_MC_BEGIN(3, 2);
939 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
940 IEM_MC_LOCAL(X86XMMREG, uSrc2);
941 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
942 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
943 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
945
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
949 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
950
951 IEM_MC_PREPARE_SSE_USAGE();
952 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
953 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
954 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
956
957 IEM_MC_ADVANCE_RIP();
958 IEM_MC_END();
959 }
960 return VINF_SUCCESS;
961}
962
963
964/**
965 * Common worker for SSE2 instructions on the forms:
966 * pxxs xmm1, xmm2/mem64
967 *
968 * Proper alignment of the 128-bit operand is enforced.
969 * Exceptions type 2. SSE2 cpuid checks.
970 *
971 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
972 */
973FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
974{
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if (IEM_IS_MODRM_REG_MODE(bRm))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(3, 1);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
985 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
986 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_PREPARE_SSE_USAGE();
989 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
990 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
991 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
992 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
994
995 IEM_MC_ADVANCE_RIP();
996 IEM_MC_END();
997 }
998 else
999 {
1000 /*
1001 * Register, memory.
1002 */
1003 IEM_MC_BEGIN(3, 2);
1004 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1005 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1006 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1007 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1008 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1013 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1014 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1015
1016 IEM_MC_PREPARE_SSE_USAGE();
1017 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1019 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1020 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1021
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/**
1030 * Common worker for SSE2 instructions on the form:
1031 * pxxxx xmm1, xmm2/mem128
1032 *
1033 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1034 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1035 *
1036 * Exceptions type 4.
1037 */
1038FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1039{
1040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1041 if (IEM_IS_MODRM_REG_MODE(bRm))
1042 {
1043 /*
1044 * Register, register.
1045 */
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1047 IEM_MC_BEGIN(2, 0);
1048 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1054 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 else
1059 {
1060 /*
1061 * Register, memory.
1062 */
1063 IEM_MC_BEGIN(2, 2);
1064 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1065 IEM_MC_LOCAL(RTUINT128U, uSrc);
1066 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1072 /** @todo Most CPUs probably only read the high qword. We read everything to
1073 * make sure we apply segmentation and alignment checks correctly.
1074 * When we have time, it would be interesting to explore what real
1075 * CPUs actually does and whether it will do a TLB load for the lower
1076 * part or skip any associated \#PF. */
1077 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1078
1079 IEM_MC_PREPARE_SSE_USAGE();
1080 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1081 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1082
1083 IEM_MC_ADVANCE_RIP();
1084 IEM_MC_END();
1085 }
1086 return VINF_SUCCESS;
1087}
1088
1089
1090/**
1091 * Common worker for SSE3 instructions on the forms:
1092 * hxxx xmm1, xmm2/mem128
1093 *
1094 * Proper alignment of the 128-bit operand is enforced.
1095 * Exceptions type 2. SSE3 cpuid checks.
1096 *
1097 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1098 */
1099FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1100{
1101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1102 if (IEM_IS_MODRM_REG_MODE(bRm))
1103 {
1104 /*
1105 * Register, register.
1106 */
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_BEGIN(3, 1);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1111 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1113 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1114 IEM_MC_PREPARE_SSE_USAGE();
1115 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1116 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1117 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1118 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1120
1121 IEM_MC_ADVANCE_RIP();
1122 IEM_MC_END();
1123 }
1124 else
1125 {
1126 /*
1127 * Register, memory.
1128 */
1129 IEM_MC_BEGIN(3, 2);
1130 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1131 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1132 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1133 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1134 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1136
1137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1139 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1140 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141
1142 IEM_MC_PREPARE_SSE_USAGE();
1143 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1144 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1145 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1146 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1147
1148 IEM_MC_ADVANCE_RIP();
1149 IEM_MC_END();
1150 }
1151 return VINF_SUCCESS;
1152}
1153
1154
1155/** Opcode 0x0f 0x00 /0. */
1156FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1157{
1158 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1159 IEMOP_HLP_MIN_286();
1160 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1161
1162 if (IEM_IS_MODRM_REG_MODE(bRm))
1163 {
1164 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1166 }
1167
1168 /* Ignore operand size here, memory refs are always 16-bit. */
1169 IEM_MC_BEGIN(2, 0);
1170 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1173 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1175 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1176 IEM_MC_END();
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/** Opcode 0x0f 0x00 /1. */
1182FNIEMOPRM_DEF(iemOp_Grp6_str)
1183{
1184 IEMOP_MNEMONIC(str, "str Rv/Mw");
1185 IEMOP_HLP_MIN_286();
1186 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1187
1188
1189 if (IEM_IS_MODRM_REG_MODE(bRm))
1190 {
1191 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1193 }
1194
1195 /* Ignore operand size here, memory refs are always 16-bit. */
1196 IEM_MC_BEGIN(2, 0);
1197 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1198 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1200 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1201 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1202 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1203 IEM_MC_END();
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/** Opcode 0x0f 0x00 /2. */
1209FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1210{
1211 IEMOP_MNEMONIC(lldt, "lldt Ew");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1214
1215 if (IEM_IS_MODRM_REG_MODE(bRm))
1216 {
1217 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1218 IEM_MC_BEGIN(1, 0);
1219 IEM_MC_ARG(uint16_t, u16Sel, 0);
1220 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1221 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1222 IEM_MC_END();
1223 }
1224 else
1225 {
1226 IEM_MC_BEGIN(1, 1);
1227 IEM_MC_ARG(uint16_t, u16Sel, 0);
1228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1230 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1231 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1232 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1233 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1234 IEM_MC_END();
1235 }
1236 return VINF_SUCCESS;
1237}
1238
1239
1240/** Opcode 0x0f 0x00 /3. */
1241FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1242{
1243 IEMOP_MNEMONIC(ltr, "ltr Ew");
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_BEGIN(1, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1253 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1254 IEM_MC_END();
1255 }
1256 else
1257 {
1258 IEM_MC_BEGIN(1, 1);
1259 IEM_MC_ARG(uint16_t, u16Sel, 0);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1263 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1264 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1265 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1266 IEM_MC_END();
1267 }
1268 return VINF_SUCCESS;
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1274{
1275 IEMOP_HLP_MIN_286();
1276 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1277
1278 if (IEM_IS_MODRM_REG_MODE(bRm))
1279 {
1280 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1281 IEM_MC_BEGIN(2, 0);
1282 IEM_MC_ARG(uint16_t, u16Sel, 0);
1283 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(2, 1);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1295 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1298 IEM_MC_END();
1299 }
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/** Opcode 0x0f 0x00 /4. */
1305FNIEMOPRM_DEF(iemOp_Grp6_verr)
1306{
1307 IEMOP_MNEMONIC(verr, "verr Ew");
1308 IEMOP_HLP_MIN_286();
1309 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1310}
1311
1312
1313/** Opcode 0x0f 0x00 /5. */
1314FNIEMOPRM_DEF(iemOp_Grp6_verw)
1315{
1316 IEMOP_MNEMONIC(verw, "verw Ew");
1317 IEMOP_HLP_MIN_286();
1318 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1319}
1320
1321
1322/**
1323 * Group 6 jump table.
1324 */
1325IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1326{
1327 iemOp_Grp6_sldt,
1328 iemOp_Grp6_str,
1329 iemOp_Grp6_lldt,
1330 iemOp_Grp6_ltr,
1331 iemOp_Grp6_verr,
1332 iemOp_Grp6_verw,
1333 iemOp_InvalidWithRM,
1334 iemOp_InvalidWithRM
1335};
1336
1337/** Opcode 0x0f 0x00. */
1338FNIEMOP_DEF(iemOp_Grp6)
1339{
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1342}
1343
1344
1345/** Opcode 0x0f 0x01 /0. */
1346FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1347{
1348 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1349 IEMOP_HLP_MIN_286();
1350 IEMOP_HLP_64BIT_OP_SIZE();
1351 IEM_MC_BEGIN(2, 1);
1352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1353 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1357 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1358 IEM_MC_END();
1359 return VINF_SUCCESS;
1360}
1361
1362
1363/** Opcode 0x0f 0x01 /0. */
1364FNIEMOP_DEF(iemOp_Grp7_vmcall)
1365{
1366 IEMOP_MNEMONIC(vmcall, "vmcall");
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1368
1369 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1370 want all hypercalls regardless of instruction used, and if a
1371 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1372 (NEM/win makes ASSUMPTIONS about this behavior.) */
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1379FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1380{
1381 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1382 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1383 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1384 IEMOP_HLP_DONE_DECODING();
1385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1386}
1387#else
1388FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1389{
1390 IEMOP_BITCH_ABOUT_STUB();
1391 return IEMOP_RAISE_INVALID_OPCODE();
1392}
1393#endif
1394
1395
1396/** Opcode 0x0f 0x01 /0. */
1397#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1398FNIEMOP_DEF(iemOp_Grp7_vmresume)
1399{
1400 IEMOP_MNEMONIC(vmresume, "vmresume");
1401 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1402 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1403 IEMOP_HLP_DONE_DECODING();
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1405}
1406#else
1407FNIEMOP_DEF(iemOp_Grp7_vmresume)
1408{
1409 IEMOP_BITCH_ABOUT_STUB();
1410 return IEMOP_RAISE_INVALID_OPCODE();
1411}
1412#endif
1413
1414
1415/** Opcode 0x0f 0x01 /0. */
1416#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1417FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1418{
1419 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1420 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1421 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1422 IEMOP_HLP_DONE_DECODING();
1423 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1424}
1425#else
1426FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1427{
1428 IEMOP_BITCH_ABOUT_STUB();
1429 return IEMOP_RAISE_INVALID_OPCODE();
1430}
1431#endif
1432
1433
1434/** Opcode 0x0f 0x01 /1. */
1435FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1436{
1437 IEMOP_MNEMONIC(sidt, "sidt Ms");
1438 IEMOP_HLP_MIN_286();
1439 IEMOP_HLP_64BIT_OP_SIZE();
1440 IEM_MC_BEGIN(2, 1);
1441 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1446 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1447 IEM_MC_END();
1448 return VINF_SUCCESS;
1449}
1450
1451
1452/** Opcode 0x0f 0x01 /1. */
1453FNIEMOP_DEF(iemOp_Grp7_monitor)
1454{
1455 IEMOP_MNEMONIC(monitor, "monitor");
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1457 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1458}
1459
1460
1461/** Opcode 0x0f 0x01 /1. */
1462FNIEMOP_DEF(iemOp_Grp7_mwait)
1463{
1464 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1467}
1468
1469
1470/** Opcode 0x0f 0x01 /2. */
1471FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1472{
1473 IEMOP_MNEMONIC(lgdt, "lgdt");
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(3, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1482 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 0xd0. */
1489FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1490{
1491 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1493 {
1494 /** @todo r=ramshankar: We should use
1495 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1496 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1499 }
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501}
1502
1503
1504/** Opcode 0x0f 0x01 0xd1. */
1505FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1506{
1507 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1508 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1509 {
1510 /** @todo r=ramshankar: We should use
1511 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1512 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1515 }
1516 return IEMOP_RAISE_INVALID_OPCODE();
1517}
1518
1519
1520/** Opcode 0x0f 0x01 /3. */
1521FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1522{
1523 IEMOP_MNEMONIC(lidt, "lidt");
1524 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1525 ? IEMMODE_64BIT
1526 : pVCpu->iem.s.enmEffOpSize;
1527 IEM_MC_BEGIN(3, 1);
1528 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1529 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1534 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1535 IEM_MC_END();
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/** Opcode 0x0f 0x01 0xd8. */
1541#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1542FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1543{
1544 IEMOP_MNEMONIC(vmrun, "vmrun");
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1547}
1548#else
1549FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1550#endif
1551
1552/** Opcode 0x0f 0x01 0xd9. */
1553FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1554{
1555 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1557
1558 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1559 want all hypercalls regardless of instruction used, and if a
1560 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1561 (NEM/win makes ASSUMPTIONS about this behavior.) */
1562 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1563}
1564
1565/** Opcode 0x0f 0x01 0xda. */
1566#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1567FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1568{
1569 IEMOP_MNEMONIC(vmload, "vmload");
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1571 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1572}
1573#else
1574FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1575#endif
1576
1577
1578/** Opcode 0x0f 0x01 0xdb. */
1579#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1580FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1581{
1582 IEMOP_MNEMONIC(vmsave, "vmsave");
1583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1584 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1585}
1586#else
1587FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1588#endif
1589
1590
1591/** Opcode 0x0f 0x01 0xdc. */
1592#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1593FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1594{
1595 IEMOP_MNEMONIC(stgi, "stgi");
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1598}
1599#else
1600FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1601#endif
1602
1603
1604/** Opcode 0x0f 0x01 0xdd. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1607{
1608 IEMOP_MNEMONIC(clgi, "clgi");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdf. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1620{
1621 IEMOP_MNEMONIC(invlpga, "invlpga");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xde. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1633{
1634 IEMOP_MNEMONIC(skinit, "skinit");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 /4. */
1644FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1645{
1646 IEMOP_MNEMONIC(smsw, "smsw");
1647 IEMOP_HLP_MIN_286();
1648 if (IEM_IS_MODRM_REG_MODE(bRm))
1649 {
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1652 }
1653
1654 /* Ignore operand size here, memory refs are always 16-bit. */
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1662 IEM_MC_END();
1663 return VINF_SUCCESS;
1664}
1665
1666
1667/** Opcode 0x0f 0x01 /6. */
1668FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1669{
1670 /* The operand size is effectively ignored, all is 16-bit and only the
1671 lower 3-bits are used. */
1672 IEMOP_MNEMONIC(lmsw, "lmsw");
1673 IEMOP_HLP_MIN_286();
1674 if (IEM_IS_MODRM_REG_MODE(bRm))
1675 {
1676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1677 IEM_MC_BEGIN(2, 0);
1678 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1679 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1680 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1681 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1682 IEM_MC_END();
1683 }
1684 else
1685 {
1686 IEM_MC_BEGIN(2, 0);
1687 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1692 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1693 IEM_MC_END();
1694 }
1695 return VINF_SUCCESS;
1696}
1697
1698
1699/** Opcode 0x0f 0x01 /7. */
1700FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1701{
1702 IEMOP_MNEMONIC(invlpg, "invlpg");
1703 IEMOP_HLP_MIN_486();
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_BEGIN(1, 1);
1706 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1708 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1709 IEM_MC_END();
1710 return VINF_SUCCESS;
1711}
1712
1713
1714/** Opcode 0x0f 0x01 /7. */
1715FNIEMOP_DEF(iemOp_Grp7_swapgs)
1716{
1717 IEMOP_MNEMONIC(swapgs, "swapgs");
1718 IEMOP_HLP_ONLY_64BIT();
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1721}
1722
1723
1724/** Opcode 0x0f 0x01 /7. */
1725FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1726{
1727 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1730}
1731
1732
1733/**
1734 * Group 7 jump table, memory variant.
1735 */
1736IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1737{
1738 iemOp_Grp7_sgdt,
1739 iemOp_Grp7_sidt,
1740 iemOp_Grp7_lgdt,
1741 iemOp_Grp7_lidt,
1742 iemOp_Grp7_smsw,
1743 iemOp_InvalidWithRM,
1744 iemOp_Grp7_lmsw,
1745 iemOp_Grp7_invlpg
1746};
1747
1748
1749/** Opcode 0x0f 0x01. */
1750FNIEMOP_DEF(iemOp_Grp7)
1751{
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 if (IEM_IS_MODRM_MEM_MODE(bRm))
1754 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1755
1756 switch (IEM_GET_MODRM_REG_8(bRm))
1757 {
1758 case 0:
1759 switch (IEM_GET_MODRM_RM_8(bRm))
1760 {
1761 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1762 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1763 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1764 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1765 }
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767
1768 case 1:
1769 switch (IEM_GET_MODRM_RM_8(bRm))
1770 {
1771 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1772 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1773 }
1774 return IEMOP_RAISE_INVALID_OPCODE();
1775
1776 case 2:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 case 3:
1785 switch (IEM_GET_MODRM_RM_8(bRm))
1786 {
1787 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1792 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1793 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1794 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1796 }
1797
1798 case 4:
1799 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1800
1801 case 5:
1802 return IEMOP_RAISE_INVALID_OPCODE();
1803
1804 case 6:
1805 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1806
1807 case 7:
1808 switch (IEM_GET_MODRM_RM_8(bRm))
1809 {
1810 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1811 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1812 }
1813 return IEMOP_RAISE_INVALID_OPCODE();
1814
1815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1816 }
1817}
1818
1819/** Opcode 0x0f 0x00 /3. */
1820FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1821{
1822 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824
1825 if (IEM_IS_MODRM_REG_MODE(bRm))
1826 {
1827 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1828 switch (pVCpu->iem.s.enmEffOpSize)
1829 {
1830 case IEMMODE_16BIT:
1831 {
1832 IEM_MC_BEGIN(3, 0);
1833 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1834 IEM_MC_ARG(uint16_t, u16Sel, 1);
1835 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1836
1837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1838 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1839 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1840
1841 IEM_MC_END();
1842 return VINF_SUCCESS;
1843 }
1844
1845 case IEMMODE_32BIT:
1846 case IEMMODE_64BIT:
1847 {
1848 IEM_MC_BEGIN(3, 0);
1849 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1850 IEM_MC_ARG(uint16_t, u16Sel, 1);
1851 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1852
1853 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1854 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1855 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1856
1857 IEM_MC_END();
1858 return VINF_SUCCESS;
1859 }
1860
1861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1862 }
1863 }
1864 else
1865 {
1866 switch (pVCpu->iem.s.enmEffOpSize)
1867 {
1868 case IEMMODE_16BIT:
1869 {
1870 IEM_MC_BEGIN(3, 1);
1871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1872 IEM_MC_ARG(uint16_t, u16Sel, 1);
1873 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1875
1876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1877 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 case IEMMODE_32BIT:
1888 case IEMMODE_64BIT:
1889 {
1890 IEM_MC_BEGIN(3, 1);
1891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1892 IEM_MC_ARG(uint16_t, u16Sel, 1);
1893 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1898/** @todo testcase: make sure it's a 16-bit read. */
1899
1900 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1902 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1903
1904 IEM_MC_END();
1905 return VINF_SUCCESS;
1906 }
1907
1908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1909 }
1910 }
1911}
1912
1913
1914
1915/** Opcode 0x0f 0x02. */
1916FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1917{
1918 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1919 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1920}
1921
1922
1923/** Opcode 0x0f 0x03. */
1924FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1925{
1926 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1927 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1928}
1929
1930
1931/** Opcode 0x0f 0x05. */
1932FNIEMOP_DEF(iemOp_syscall)
1933{
1934 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1937}
1938
1939
1940/** Opcode 0x0f 0x06. */
1941FNIEMOP_DEF(iemOp_clts)
1942{
1943 IEMOP_MNEMONIC(clts, "clts");
1944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1945 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1946}
1947
1948
1949/** Opcode 0x0f 0x07. */
1950FNIEMOP_DEF(iemOp_sysret)
1951{
1952 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1955}
1956
1957
1958/** Opcode 0x0f 0x08. */
1959FNIEMOP_DEF(iemOp_invd)
1960{
1961 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1962 IEMOP_HLP_MIN_486();
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1964 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1965}
1966
1967
1968/** Opcode 0x0f 0x09. */
1969FNIEMOP_DEF(iemOp_wbinvd)
1970{
1971 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1972 IEMOP_HLP_MIN_486();
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1975}
1976
1977
1978/** Opcode 0x0f 0x0b. */
1979FNIEMOP_DEF(iemOp_ud2)
1980{
1981 IEMOP_MNEMONIC(ud2, "ud2");
1982 return IEMOP_RAISE_INVALID_OPCODE();
1983}
1984
1985/** Opcode 0x0f 0x0d. */
1986FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1987{
1988 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1989 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1990 {
1991 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1992 return IEMOP_RAISE_INVALID_OPCODE();
1993 }
1994
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996 if (IEM_IS_MODRM_REG_MODE(bRm))
1997 {
1998 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1999 return IEMOP_RAISE_INVALID_OPCODE();
2000 }
2001
2002 switch (IEM_GET_MODRM_REG_8(bRm))
2003 {
2004 case 2: /* Aliased to /0 for the time being. */
2005 case 4: /* Aliased to /0 for the time being. */
2006 case 5: /* Aliased to /0 for the time being. */
2007 case 6: /* Aliased to /0 for the time being. */
2008 case 7: /* Aliased to /0 for the time being. */
2009 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2010 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2011 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014
2015 IEM_MC_BEGIN(0, 1);
2016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 /* Currently a NOP. */
2020 NOREF(GCPtrEffSrc);
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 return VINF_SUCCESS;
2024}
2025
2026
2027/** Opcode 0x0f 0x0e. */
2028FNIEMOP_DEF(iemOp_femms)
2029{
2030 IEMOP_MNEMONIC(femms, "femms");
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032
2033 IEM_MC_BEGIN(0,0);
2034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2037 IEM_MC_FPU_FROM_MMX_MODE();
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 return VINF_SUCCESS;
2041}
2042
2043
2044/** Opcode 0x0f 0x0f. */
2045FNIEMOP_DEF(iemOp_3Dnow)
2046{
2047 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2048 {
2049 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2050 return IEMOP_RAISE_INVALID_OPCODE();
2051 }
2052
2053#ifdef IEM_WITH_3DNOW
2054 /* This is pretty sparse, use switch instead of table. */
2055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2056 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2057#else
2058 IEMOP_BITCH_ABOUT_STUB();
2059 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2060#endif
2061}
2062
2063
2064/**
2065 * @opcode 0x10
2066 * @oppfx none
2067 * @opcpuid sse
2068 * @opgroup og_sse_simdfp_datamove
2069 * @opxcpttype 4UA
2070 * @optest op1=1 op2=2 -> op1=2
2071 * @optest op1=0 op2=-22 -> op1=-22
2072 */
2073FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2074{
2075 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2077 if (IEM_IS_MODRM_REG_MODE(bRm))
2078 {
2079 /*
2080 * Register, register.
2081 */
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2086 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2087 IEM_GET_MODRM_RM(pVCpu, bRm));
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /*
2094 * Memory, register.
2095 */
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2104
2105 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2106 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 return VINF_SUCCESS;
2112
2113}
2114
2115
2116/**
2117 * @opcode 0x10
2118 * @oppfx 0x66
2119 * @opcpuid sse2
2120 * @opgroup og_sse2_pcksclr_datamove
2121 * @opxcpttype 4UA
2122 * @optest op1=1 op2=2 -> op1=2
2123 * @optest op1=0 op2=-42 -> op1=-42
2124 */
2125FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2126{
2127 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2129 if (IEM_IS_MODRM_REG_MODE(bRm))
2130 {
2131 /*
2132 * Register, register.
2133 */
2134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2135 IEM_MC_BEGIN(0, 0);
2136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2138 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2139 IEM_GET_MODRM_RM(pVCpu, bRm));
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 else
2144 {
2145 /*
2146 * Memory, register.
2147 */
2148 IEM_MC_BEGIN(0, 2);
2149 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2151
2152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156
2157 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2158 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2159
2160 IEM_MC_ADVANCE_RIP();
2161 IEM_MC_END();
2162 }
2163 return VINF_SUCCESS;
2164}
2165
2166
2167/**
2168 * @opcode 0x10
2169 * @oppfx 0xf3
2170 * @opcpuid sse
2171 * @opgroup og_sse_simdfp_datamove
2172 * @opxcpttype 5
2173 * @optest op1=1 op2=2 -> op1=2
2174 * @optest op1=0 op2=-22 -> op1=-22
2175 */
2176FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2177{
2178 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 if (IEM_IS_MODRM_REG_MODE(bRm))
2181 {
2182 /*
2183 * Register, register.
2184 */
2185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2186 IEM_MC_BEGIN(0, 1);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2192 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 }
2197 else
2198 {
2199 /*
2200 * Memory, register.
2201 */
2202 IEM_MC_BEGIN(0, 2);
2203 IEM_MC_LOCAL(uint32_t, uSrc);
2204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2205
2206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2209 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2210
2211 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2212 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2213
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/**
2222 * @opcode 0x10
2223 * @oppfx 0xf2
2224 * @opcpuid sse2
2225 * @opgroup og_sse2_pcksclr_datamove
2226 * @opxcpttype 5
2227 * @optest op1=1 op2=2 -> op1=2
2228 * @optest op1=0 op2=-42 -> op1=-42
2229 */
2230FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2231{
2232 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 if (IEM_IS_MODRM_REG_MODE(bRm))
2235 {
2236 /*
2237 * Register, register.
2238 */
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_BEGIN(0, 1);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242
2243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2244 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2245 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP();
2249 IEM_MC_END();
2250 }
2251 else
2252 {
2253 /*
2254 * Memory, register.
2255 */
2256 IEM_MC_BEGIN(0, 2);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2259
2260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264
2265 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2266 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2267
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/**
2276 * @opcode 0x11
2277 * @oppfx none
2278 * @opcpuid sse
2279 * @opgroup og_sse_simdfp_datamove
2280 * @opxcpttype 4UA
2281 * @optest op1=1 op2=2 -> op1=2
2282 * @optest op1=0 op2=-42 -> op1=-42
2283 */
2284FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2285{
2286 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2288 if (IEM_IS_MODRM_REG_MODE(bRm))
2289 {
2290 /*
2291 * Register, register.
2292 */
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_BEGIN(0, 0);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2298 IEM_GET_MODRM_REG(pVCpu, bRm));
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 /*
2305 * Memory, register.
2306 */
2307 IEM_MC_BEGIN(0, 2);
2308 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2310
2311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2315
2316 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2318
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323}
2324
2325
2326/**
2327 * @opcode 0x11
2328 * @oppfx 0x66
2329 * @opcpuid sse2
2330 * @opgroup og_sse2_pcksclr_datamove
2331 * @opxcpttype 4UA
2332 * @optest op1=1 op2=2 -> op1=2
2333 * @optest op1=0 op2=-42 -> op1=-42
2334 */
2335FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2336{
2337 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2339 if (IEM_IS_MODRM_REG_MODE(bRm))
2340 {
2341 /*
2342 * Register, register.
2343 */
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345 IEM_MC_BEGIN(0, 0);
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2349 IEM_GET_MODRM_REG(pVCpu, bRm));
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Memory, register.
2357 */
2358 IEM_MC_BEGIN(0, 2);
2359 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2361
2362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2366
2367 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2368 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/**
2378 * @opcode 0x11
2379 * @oppfx 0xf3
2380 * @opcpuid sse
2381 * @opgroup og_sse_simdfp_datamove
2382 * @opxcpttype 5
2383 * @optest op1=1 op2=2 -> op1=2
2384 * @optest op1=0 op2=-22 -> op1=-22
2385 */
2386FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2387{
2388 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 if (IEM_IS_MODRM_REG_MODE(bRm))
2391 {
2392 /*
2393 * Register, register.
2394 */
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(uint32_t, uSrc);
2398
2399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2401 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2402 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Memory, register.
2411 */
2412 IEM_MC_BEGIN(0, 2);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2420
2421 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x11
2433 * @oppfx 0xf2
2434 * @opcpuid sse2
2435 * @opgroup og_sse2_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=1 op2=2 -> op1=2
2438 * @optest op1=0 op2=-42 -> op1=-42
2439 */
2440FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2441{
2442 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(0, 1);
2451 IEM_MC_LOCAL(uint64_t, uSrc);
2452
2453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2455 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2456 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 else
2462 {
2463 /*
2464 * Memory, register.
2465 */
2466 IEM_MC_BEGIN(0, 2);
2467 IEM_MC_LOCAL(uint64_t, uSrc);
2468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2469
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2474
2475 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2476 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2477
2478 IEM_MC_ADVANCE_RIP();
2479 IEM_MC_END();
2480 }
2481 return VINF_SUCCESS;
2482}
2483
2484
2485FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2486{
2487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2488 if (IEM_IS_MODRM_REG_MODE(bRm))
2489 {
2490 /**
2491 * @opcode 0x12
2492 * @opcodesub 11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2501
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_BEGIN(0, 1);
2504 IEM_MC_LOCAL(uint64_t, uSrc);
2505
2506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2508 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 else
2515 {
2516 /**
2517 * @opdone
2518 * @opcode 0x12
2519 * @opcodesub !11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2527 */
2528 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 2);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2533
2534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2538
2539 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2540 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/**
2550 * @opcode 0x12
2551 * @opcodesub !11 mr/reg
2552 * @oppfx 0x66
2553 * @opcpuid sse2
2554 * @opgroup og_sse2_pcksclr_datamove
2555 * @opxcpttype 5
2556 * @optest op1=1 op2=2 -> op1=2
2557 * @optest op1=0 op2=-42 -> op1=-42
2558 */
2559FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2560{
2561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2562 if (IEM_IS_MODRM_MEM_MODE(bRm))
2563 {
2564 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2565
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(uint64_t, uSrc);
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 return VINF_SUCCESS;
2581 }
2582
2583 /**
2584 * @opdone
2585 * @opmnemonic ud660f12m3
2586 * @opcode 0x12
2587 * @opcodesub 11 mr/reg
2588 * @oppfx 0x66
2589 * @opunused immediate
2590 * @opcpuid sse
2591 * @optest ->
2592 */
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594}
2595
2596
2597/**
2598 * @opcode 0x12
2599 * @oppfx 0xf3
2600 * @opcpuid sse3
2601 * @opgroup og_sse3_pcksclr_datamove
2602 * @opxcpttype 4
2603 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2604 * op1=0x00000002000000020000000100000001
2605 */
2606FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2607{
2608 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2610 if (IEM_IS_MODRM_REG_MODE(bRm))
2611 {
2612 /*
2613 * Register, register.
2614 */
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEM_MC_BEGIN(2, 0);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2619
2620 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2621 IEM_MC_PREPARE_SSE_USAGE();
2622
2623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2625 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 /*
2633 * Register, memory.
2634 */
2635 IEM_MC_BEGIN(2, 2);
2636 IEM_MC_LOCAL(RTUINT128U, uSrc);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2639 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2640
2641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2644 IEM_MC_PREPARE_SSE_USAGE();
2645
2646 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2647 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/**
2658 * @opcode 0x12
2659 * @oppfx 0xf2
2660 * @opcpuid sse3
2661 * @opgroup og_sse3_pcksclr_datamove
2662 * @opxcpttype 5
2663 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2664 * op1=0x22222222111111112222222211111111
2665 */
2666FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2667{
2668 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 if (IEM_IS_MODRM_REG_MODE(bRm))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(2, 0);
2677 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2678 IEM_MC_ARG(uint64_t, uSrc, 1);
2679
2680 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2684 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2685 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 else
2691 {
2692 /*
2693 * Register, memory.
2694 */
2695 IEM_MC_BEGIN(2, 2);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2698 IEM_MC_ARG(uint64_t, uSrc, 1);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2703 IEM_MC_PREPARE_SSE_USAGE();
2704
2705 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2706 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2707 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x13
2718 * @opcodesub !11 mr/reg
2719 * @oppfx none
2720 * @opcpuid sse
2721 * @opgroup og_sse_simdfp_datamove
2722 * @opxcpttype 5
2723 * @optest op1=1 op2=2 -> op1=2
2724 * @optest op1=0 op2=-42 -> op1=-42
2725 */
2726FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if (IEM_IS_MODRM_MEM_MODE(bRm))
2730 {
2731 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2732
2733 IEM_MC_BEGIN(0, 2);
2734 IEM_MC_LOCAL(uint64_t, uSrc);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2736
2737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2740 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2741
2742 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2744
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 return VINF_SUCCESS;
2748 }
2749
2750 /**
2751 * @opdone
2752 * @opmnemonic ud0f13m3
2753 * @opcode 0x13
2754 * @opcodesub 11 mr/reg
2755 * @oppfx none
2756 * @opunused immediate
2757 * @opcpuid sse
2758 * @optest ->
2759 */
2760 return IEMOP_RAISE_INVALID_OPCODE();
2761}
2762
2763
2764/**
2765 * @opcode 0x13
2766 * @opcodesub !11 mr/reg
2767 * @oppfx 0x66
2768 * @opcpuid sse2
2769 * @opgroup og_sse2_pcksclr_datamove
2770 * @opxcpttype 5
2771 * @optest op1=1 op2=2 -> op1=2
2772 * @optest op1=0 op2=-42 -> op1=-42
2773 */
2774FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 if (IEM_IS_MODRM_MEM_MODE(bRm))
2778 {
2779 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2780 IEM_MC_BEGIN(0, 2);
2781 IEM_MC_LOCAL(uint64_t, uSrc);
2782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2783
2784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2788
2789 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2791
2792 IEM_MC_ADVANCE_RIP();
2793 IEM_MC_END();
2794 return VINF_SUCCESS;
2795 }
2796
2797 /**
2798 * @opdone
2799 * @opmnemonic ud660f13m3
2800 * @opcode 0x13
2801 * @opcodesub 11 mr/reg
2802 * @oppfx 0x66
2803 * @opunused immediate
2804 * @opcpuid sse
2805 * @optest ->
2806 */
2807 return IEMOP_RAISE_INVALID_OPCODE();
2808}
2809
2810
2811/**
2812 * @opmnemonic udf30f13
2813 * @opcode 0x13
2814 * @oppfx 0xf3
2815 * @opunused intel-modrm
2816 * @opcpuid sse
2817 * @optest ->
2818 * @opdone
2819 */
2820
2821/**
2822 * @opmnemonic udf20f13
2823 * @opcode 0x13
2824 * @oppfx 0xf2
2825 * @opunused intel-modrm
2826 * @opcpuid sse
2827 * @optest ->
2828 * @opdone
2829 */
2830
2831/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2832FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2833{
2834 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2836}
2837
2838
2839/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2840FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2841{
2842 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2843 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2844}
2845
2846
2847/**
2848 * @opdone
2849 * @opmnemonic udf30f14
2850 * @opcode 0x14
2851 * @oppfx 0xf3
2852 * @opunused intel-modrm
2853 * @opcpuid sse
2854 * @optest ->
2855 * @opdone
2856 */
2857
2858/**
2859 * @opmnemonic udf20f14
2860 * @opcode 0x14
2861 * @oppfx 0xf2
2862 * @opunused intel-modrm
2863 * @opcpuid sse
2864 * @optest ->
2865 * @opdone
2866 */
2867
2868/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2869FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2870{
2871 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2872 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2873}
2874
2875
2876/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2877FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2878{
2879 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2880 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2881}
2882
2883
2884/* Opcode 0xf3 0x0f 0x15 - invalid */
2885/* Opcode 0xf2 0x0f 0x15 - invalid */
2886
2887/**
2888 * @opdone
2889 * @opmnemonic udf30f15
2890 * @opcode 0x15
2891 * @oppfx 0xf3
2892 * @opunused intel-modrm
2893 * @opcpuid sse
2894 * @optest ->
2895 * @opdone
2896 */
2897
2898/**
2899 * @opmnemonic udf20f15
2900 * @opcode 0x15
2901 * @oppfx 0xf2
2902 * @opunused intel-modrm
2903 * @opcpuid sse
2904 * @optest ->
2905 * @opdone
2906 */
2907
2908FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2909{
2910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2911 if (IEM_IS_MODRM_REG_MODE(bRm))
2912 {
2913 /**
2914 * @opcode 0x16
2915 * @opcodesub 11 mr/reg
2916 * @oppfx none
2917 * @opcpuid sse
2918 * @opgroup og_sse_simdfp_datamove
2919 * @opxcpttype 5
2920 * @optest op1=1 op2=2 -> op1=2
2921 * @optest op1=0 op2=-42 -> op1=-42
2922 */
2923 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2924
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEM_MC_BEGIN(0, 1);
2927 IEM_MC_LOCAL(uint64_t, uSrc);
2928
2929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2930 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2931 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /**
2940 * @opdone
2941 * @opcode 0x16
2942 * @opcodesub !11 mr/reg
2943 * @oppfx none
2944 * @opcpuid sse
2945 * @opgroup og_sse_simdfp_datamove
2946 * @opxcpttype 5
2947 * @optest op1=1 op2=2 -> op1=2
2948 * @optest op1=0 op2=-42 -> op1=-42
2949 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2950 */
2951 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2952
2953 IEM_MC_BEGIN(0, 2);
2954 IEM_MC_LOCAL(uint64_t, uSrc);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2960 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2961
2962 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2963 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2964
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 return VINF_SUCCESS;
2969}
2970
2971
2972/**
2973 * @opcode 0x16
2974 * @opcodesub !11 mr/reg
2975 * @oppfx 0x66
2976 * @opcpuid sse2
2977 * @opgroup og_sse2_pcksclr_datamove
2978 * @opxcpttype 5
2979 * @optest op1=1 op2=2 -> op1=2
2980 * @optest op1=0 op2=-42 -> op1=-42
2981 */
2982FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if (IEM_IS_MODRM_MEM_MODE(bRm))
2986 {
2987 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2988 IEM_MC_BEGIN(0, 2);
2989 IEM_MC_LOCAL(uint64_t, uSrc);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2991
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2996
2997 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2998 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2999
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 return VINF_SUCCESS;
3003 }
3004
3005 /**
3006 * @opdone
3007 * @opmnemonic ud660f16m3
3008 * @opcode 0x16
3009 * @opcodesub 11 mr/reg
3010 * @oppfx 0x66
3011 * @opunused immediate
3012 * @opcpuid sse
3013 * @optest ->
3014 */
3015 return IEMOP_RAISE_INVALID_OPCODE();
3016}
3017
3018
3019/**
3020 * @opcode 0x16
3021 * @oppfx 0xf3
3022 * @opcpuid sse3
3023 * @opgroup og_sse3_pcksclr_datamove
3024 * @opxcpttype 4
3025 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3026 * op1=0x00000002000000020000000100000001
3027 */
3028FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3029{
3030 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 if (IEM_IS_MODRM_REG_MODE(bRm))
3033 {
3034 /*
3035 * Register, register.
3036 */
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEM_MC_BEGIN(2, 0);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3041
3042 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3046 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3047 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3048
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 else
3053 {
3054 /*
3055 * Register, memory.
3056 */
3057 IEM_MC_BEGIN(2, 2);
3058 IEM_MC_LOCAL(RTUINT128U, uSrc);
3059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3060 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3061 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3062
3063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP();
3073 IEM_MC_END();
3074 }
3075 return VINF_SUCCESS;
3076}
3077
3078/**
3079 * @opdone
3080 * @opmnemonic udf30f16
3081 * @opcode 0x16
3082 * @oppfx 0xf2
3083 * @opunused intel-modrm
3084 * @opcpuid sse
3085 * @optest ->
3086 * @opdone
3087 */
3088
3089
3090/**
3091 * @opcode 0x17
3092 * @opcodesub !11 mr/reg
3093 * @oppfx none
3094 * @opcpuid sse
3095 * @opgroup og_sse_simdfp_datamove
3096 * @opxcpttype 5
3097 * @optest op1=1 op2=2 -> op1=2
3098 * @optest op1=0 op2=-42 -> op1=-42
3099 */
3100FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if (IEM_IS_MODRM_MEM_MODE(bRm))
3104 {
3105 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3106
3107 IEM_MC_BEGIN(0, 2);
3108 IEM_MC_LOCAL(uint64_t, uSrc);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3115
3116 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3118
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 return VINF_SUCCESS;
3122 }
3123
3124 /**
3125 * @opdone
3126 * @opmnemonic ud0f17m3
3127 * @opcode 0x17
3128 * @opcodesub 11 mr/reg
3129 * @oppfx none
3130 * @opunused immediate
3131 * @opcpuid sse
3132 * @optest ->
3133 */
3134 return IEMOP_RAISE_INVALID_OPCODE();
3135}
3136
3137
3138/**
3139 * @opcode 0x17
3140 * @opcodesub !11 mr/reg
3141 * @oppfx 0x66
3142 * @opcpuid sse2
3143 * @opgroup og_sse2_pcksclr_datamove
3144 * @opxcpttype 5
3145 * @optest op1=1 op2=2 -> op1=2
3146 * @optest op1=0 op2=-42 -> op1=-42
3147 */
3148FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (IEM_IS_MODRM_MEM_MODE(bRm))
3152 {
3153 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3154
3155 IEM_MC_BEGIN(0, 2);
3156 IEM_MC_LOCAL(uint64_t, uSrc);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3162 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3163
3164 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3165 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3166
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 return VINF_SUCCESS;
3170 }
3171
3172 /**
3173 * @opdone
3174 * @opmnemonic ud660f17m3
3175 * @opcode 0x17
3176 * @opcodesub 11 mr/reg
3177 * @oppfx 0x66
3178 * @opunused immediate
3179 * @opcpuid sse
3180 * @optest ->
3181 */
3182 return IEMOP_RAISE_INVALID_OPCODE();
3183}
3184
3185
3186/**
3187 * @opdone
3188 * @opmnemonic udf30f17
3189 * @opcode 0x17
3190 * @oppfx 0xf3
3191 * @opunused intel-modrm
3192 * @opcpuid sse
3193 * @optest ->
3194 * @opdone
3195 */
3196
3197/**
3198 * @opmnemonic udf20f17
3199 * @opcode 0x17
3200 * @oppfx 0xf2
3201 * @opunused intel-modrm
3202 * @opcpuid sse
3203 * @optest ->
3204 * @opdone
3205 */
3206
3207
3208/** Opcode 0x0f 0x18. */
3209FNIEMOP_DEF(iemOp_prefetch_Grp16)
3210{
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if (IEM_IS_MODRM_MEM_MODE(bRm))
3213 {
3214 switch (IEM_GET_MODRM_REG_8(bRm))
3215 {
3216 case 4: /* Aliased to /0 for the time being according to AMD. */
3217 case 5: /* Aliased to /0 for the time being according to AMD. */
3218 case 6: /* Aliased to /0 for the time being according to AMD. */
3219 case 7: /* Aliased to /0 for the time being according to AMD. */
3220 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3221 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3222 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3223 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226
3227 IEM_MC_BEGIN(0, 1);
3228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3231 /* Currently a NOP. */
3232 NOREF(GCPtrEffSrc);
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 return VINF_SUCCESS;
3236 }
3237
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239}
3240
3241
3242/** Opcode 0x0f 0x19..0x1f. */
3243FNIEMOP_DEF(iemOp_nop_Ev)
3244{
3245 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3247 if (IEM_IS_MODRM_REG_MODE(bRm))
3248 {
3249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3250 IEM_MC_BEGIN(0, 0);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 /* Currently a NOP. */
3261 NOREF(GCPtrEffSrc);
3262 IEM_MC_ADVANCE_RIP();
3263 IEM_MC_END();
3264 }
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0x20. */
3270FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3271{
3272 /* mod is ignored, as is operand size overrides. */
3273 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3274 IEMOP_HLP_MIN_386();
3275 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3276 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3277 else
3278 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3279
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3283 {
3284 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3285 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3286 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3287 iCrReg |= 8;
3288 }
3289 switch (iCrReg)
3290 {
3291 case 0: case 2: case 3: case 4: case 8:
3292 break;
3293 default:
3294 return IEMOP_RAISE_INVALID_OPCODE();
3295 }
3296 IEMOP_HLP_DONE_DECODING();
3297
3298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3299}
3300
3301
3302/** Opcode 0x0f 0x21. */
3303FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3304{
3305 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3306 IEMOP_HLP_MIN_386();
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3310 return IEMOP_RAISE_INVALID_OPCODE();
3311 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3312 IEM_GET_MODRM_RM(pVCpu, bRm),
3313 IEM_GET_MODRM_REG_8(bRm));
3314}
3315
3316
3317/** Opcode 0x0f 0x22. */
3318FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3319{
3320 /* mod is ignored, as is operand size overrides. */
3321 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3322 IEMOP_HLP_MIN_386();
3323 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3324 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3325 else
3326 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3327
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3330 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3331 {
3332 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3333 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3334 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3335 iCrReg |= 8;
3336 }
3337 switch (iCrReg)
3338 {
3339 case 0: case 2: case 3: case 4: case 8:
3340 break;
3341 default:
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 }
3344 IEMOP_HLP_DONE_DECODING();
3345
3346 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x23. */
3351FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3352{
3353 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3358 return IEMOP_RAISE_INVALID_OPCODE();
3359 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3360 IEM_GET_MODRM_REG_8(bRm),
3361 IEM_GET_MODRM_RM(pVCpu, bRm));
3362}
3363
3364
3365/** Opcode 0x0f 0x24. */
3366FNIEMOP_DEF(iemOp_mov_Rd_Td)
3367{
3368 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3369 IEMOP_HLP_MIN_386();
3370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3372 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3373 return IEMOP_RAISE_INVALID_OPCODE();
3374 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3375 IEM_GET_MODRM_RM(pVCpu, bRm),
3376 IEM_GET_MODRM_REG_8(bRm));
3377}
3378
3379
3380/** Opcode 0x0f 0x26. */
3381FNIEMOP_DEF(iemOp_mov_Td_Rd)
3382{
3383 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3384 IEMOP_HLP_MIN_386();
3385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3388 return IEMOP_RAISE_INVALID_OPCODE();
3389 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3390 IEM_GET_MODRM_REG_8(bRm),
3391 IEM_GET_MODRM_RM(pVCpu, bRm));
3392}
3393
3394
3395/**
3396 * @opcode 0x28
3397 * @oppfx none
3398 * @opcpuid sse
3399 * @opgroup og_sse_simdfp_datamove
3400 * @opxcpttype 1
3401 * @optest op1=1 op2=2 -> op1=2
3402 * @optest op1=0 op2=-42 -> op1=-42
3403 */
3404FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3405{
3406 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 if (IEM_IS_MODRM_REG_MODE(bRm))
3409 {
3410 /*
3411 * Register, register.
3412 */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3418 IEM_GET_MODRM_RM(pVCpu, bRm));
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 else
3423 {
3424 /*
3425 * Register, memory.
3426 */
3427 IEM_MC_BEGIN(0, 2);
3428 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3430
3431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435
3436 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3437 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445/**
3446 * @opcode 0x28
3447 * @oppfx 66
3448 * @opcpuid sse2
3449 * @opgroup og_sse2_pcksclr_datamove
3450 * @opxcpttype 1
3451 * @optest op1=1 op2=2 -> op1=2
3452 * @optest op1=0 op2=-42 -> op1=-42
3453 */
3454FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3455{
3456 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3458 if (IEM_IS_MODRM_REG_MODE(bRm))
3459 {
3460 /*
3461 * Register, register.
3462 */
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_BEGIN(0, 0);
3465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3468 IEM_GET_MODRM_RM(pVCpu, bRm));
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(0, 2);
3478 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480
3481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3485
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3488
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 return VINF_SUCCESS;
3493}
3494
3495/* Opcode 0xf3 0x0f 0x28 - invalid */
3496/* Opcode 0xf2 0x0f 0x28 - invalid */
3497
3498/**
3499 * @opcode 0x29
3500 * @oppfx none
3501 * @opcpuid sse
3502 * @opgroup og_sse_simdfp_datamove
3503 * @opxcpttype 1
3504 * @optest op1=1 op2=2 -> op1=2
3505 * @optest op1=0 op2=-42 -> op1=-42
3506 */
3507FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3508{
3509 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3511 if (IEM_IS_MODRM_REG_MODE(bRm))
3512 {
3513 /*
3514 * Register, register.
3515 */
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3520 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3521 IEM_GET_MODRM_REG(pVCpu, bRm));
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 }
3525 else
3526 {
3527 /*
3528 * Memory, register.
3529 */
3530 IEM_MC_BEGIN(0, 2);
3531 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3538
3539 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548/**
3549 * @opcode 0x29
3550 * @oppfx 66
3551 * @opcpuid sse2
3552 * @opgroup og_sse2_pcksclr_datamove
3553 * @opxcpttype 1
3554 * @optest op1=1 op2=2 -> op1=2
3555 * @optest op1=0 op2=-42 -> op1=-42
3556 */
3557FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3558{
3559 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3561 if (IEM_IS_MODRM_REG_MODE(bRm))
3562 {
3563 /*
3564 * Register, register.
3565 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 0);
3568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3570 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3571 IEM_GET_MODRM_REG(pVCpu, bRm));
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 /*
3578 * Memory, register.
3579 */
3580 IEM_MC_BEGIN(0, 2);
3581 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3583
3584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3588
3589 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3590 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3591
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3604/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3605FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3606
3607
3608/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3609FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3610{
3611 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3612
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3615 {
3616 if (IEM_IS_MODRM_REG_MODE(bRm))
3617 {
3618 /* XMM, greg64 */
3619 IEM_MC_BEGIN(3, 4);
3620 IEM_MC_LOCAL(uint32_t, fMxcsr);
3621 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3622 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3623 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3624 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3625
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3628 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3629
3630 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3631 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3632 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3633 IEM_MC_IF_MXCSR_XCPT_PENDING()
3634 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3635 IEM_MC_ELSE()
3636 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3637 IEM_MC_ENDIF();
3638
3639 IEM_MC_ADVANCE_RIP();
3640 IEM_MC_END();
3641 }
3642 else
3643 {
3644 /* XMM, [mem64] */
3645 IEM_MC_BEGIN(3, 4);
3646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3647 IEM_MC_LOCAL(uint32_t, fMxcsr);
3648 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3649 IEM_MC_LOCAL(int64_t, i64Src);
3650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3651 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3652 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3653
3654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3657 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3658
3659 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3660 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3661 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3662 IEM_MC_IF_MXCSR_XCPT_PENDING()
3663 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3664 IEM_MC_ELSE()
3665 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3666 IEM_MC_ENDIF();
3667
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 }
3672 else
3673 {
3674 if (IEM_IS_MODRM_REG_MODE(bRm))
3675 {
3676 /* greg, XMM */
3677 IEM_MC_BEGIN(3, 4);
3678 IEM_MC_LOCAL(uint32_t, fMxcsr);
3679 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3681 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3682 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3683
3684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3685 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3686 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3687
3688 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3689 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3690 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3691 IEM_MC_IF_MXCSR_XCPT_PENDING()
3692 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3693 IEM_MC_ELSE()
3694 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3695 IEM_MC_ENDIF();
3696
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 else
3701 {
3702 /* greg, [mem] */
3703 IEM_MC_BEGIN(3, 4);
3704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3705 IEM_MC_LOCAL(uint32_t, fMxcsr);
3706 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3707 IEM_MC_LOCAL(int32_t, i32Src);
3708 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3709 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3710 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3715 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3716
3717 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3718 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3719 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3720 IEM_MC_IF_MXCSR_XCPT_PENDING()
3721 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3722 IEM_MC_ELSE()
3723 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3724 IEM_MC_ENDIF();
3725
3726 IEM_MC_ADVANCE_RIP();
3727 IEM_MC_END();
3728 }
3729 }
3730 return VINF_SUCCESS;
3731}
3732
3733
3734/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3735FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3736{
3737 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3738
3739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3741 {
3742 if (IEM_IS_MODRM_REG_MODE(bRm))
3743 {
3744 /* XMM, greg64 */
3745 IEM_MC_BEGIN(3, 4);
3746 IEM_MC_LOCAL(uint32_t, fMxcsr);
3747 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3749 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3750 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3751
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3755
3756 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3759 IEM_MC_IF_MXCSR_XCPT_PENDING()
3760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3761 IEM_MC_ELSE()
3762 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3763 IEM_MC_ENDIF();
3764
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 /* XMM, [mem64] */
3771 IEM_MC_BEGIN(3, 4);
3772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3773 IEM_MC_LOCAL(uint32_t, fMxcsr);
3774 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3775 IEM_MC_LOCAL(int64_t, i64Src);
3776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3777 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3778 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3779
3780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3784
3785 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3788 IEM_MC_IF_MXCSR_XCPT_PENDING()
3789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3790 IEM_MC_ELSE()
3791 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3792 IEM_MC_ENDIF();
3793
3794 IEM_MC_ADVANCE_RIP();
3795 IEM_MC_END();
3796 }
3797 }
3798 else
3799 {
3800 if (IEM_IS_MODRM_REG_MODE(bRm))
3801 {
3802 /* greg, XMM */
3803 IEM_MC_BEGIN(3, 4);
3804 IEM_MC_LOCAL(uint32_t, fMxcsr);
3805 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3807 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3808 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3809
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3813
3814 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3817 IEM_MC_IF_MXCSR_XCPT_PENDING()
3818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3819 IEM_MC_ELSE()
3820 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3821 IEM_MC_ENDIF();
3822
3823 IEM_MC_ADVANCE_RIP();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 /* greg, [mem] */
3829 IEM_MC_BEGIN(3, 4);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3831 IEM_MC_LOCAL(uint32_t, fMxcsr);
3832 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3833 IEM_MC_LOCAL(int32_t, i32Src);
3834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3835 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3836 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3837
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3842
3843 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3846 IEM_MC_IF_MXCSR_XCPT_PENDING()
3847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3848 IEM_MC_ELSE()
3849 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3850 IEM_MC_ENDIF();
3851
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 }
3855 }
3856 return VINF_SUCCESS;
3857}
3858
3859
3860/**
3861 * @opcode 0x2b
3862 * @opcodesub !11 mr/reg
3863 * @oppfx none
3864 * @opcpuid sse
3865 * @opgroup og_sse1_cachect
3866 * @opxcpttype 1
3867 * @optest op1=1 op2=2 -> op1=2
3868 * @optest op1=0 op2=-42 -> op1=-42
3869 */
3870FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3871{
3872 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 if (IEM_IS_MODRM_MEM_MODE(bRm))
3875 {
3876 /*
3877 * memory, register.
3878 */
3879 IEM_MC_BEGIN(0, 2);
3880 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3882
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3885 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3886 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3887
3888 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3889 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3890
3891 IEM_MC_ADVANCE_RIP();
3892 IEM_MC_END();
3893 }
3894 /* The register, register encoding is invalid. */
3895 else
3896 return IEMOP_RAISE_INVALID_OPCODE();
3897 return VINF_SUCCESS;
3898}
3899
3900/**
3901 * @opcode 0x2b
3902 * @opcodesub !11 mr/reg
3903 * @oppfx 0x66
3904 * @opcpuid sse2
3905 * @opgroup og_sse2_cachect
3906 * @opxcpttype 1
3907 * @optest op1=1 op2=2 -> op1=2
3908 * @optest op1=0 op2=-42 -> op1=-42
3909 */
3910FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3911{
3912 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3914 if (IEM_IS_MODRM_MEM_MODE(bRm))
3915 {
3916 /*
3917 * memory, register.
3918 */
3919 IEM_MC_BEGIN(0, 2);
3920 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3922
3923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3926 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3927
3928 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3929 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3930
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 /* The register, register encoding is invalid. */
3935 else
3936 return IEMOP_RAISE_INVALID_OPCODE();
3937 return VINF_SUCCESS;
3938}
3939/* Opcode 0xf3 0x0f 0x2b - invalid */
3940/* Opcode 0xf2 0x0f 0x2b - invalid */
3941
3942
3943/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3944FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3945/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3946FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3947
3948
3949/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3950FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
3951{
3952 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3953
3954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3955 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3956 {
3957 if (IEM_IS_MODRM_REG_MODE(bRm))
3958 {
3959 /* greg64, XMM */
3960 IEM_MC_BEGIN(3, 4);
3961 IEM_MC_LOCAL(uint32_t, fMxcsr);
3962 IEM_MC_LOCAL(int64_t, i64Dst);
3963 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3964 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3965 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
3966
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3968 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3969 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3970
3971 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3972 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
3973 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3974 IEM_MC_IF_MXCSR_XCPT_PENDING()
3975 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3976 IEM_MC_ELSE()
3977 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
3978 IEM_MC_ENDIF();
3979
3980 IEM_MC_ADVANCE_RIP();
3981 IEM_MC_END();
3982 }
3983 else
3984 {
3985 /* greg64, [mem64] */
3986 IEM_MC_BEGIN(3, 4);
3987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3988 IEM_MC_LOCAL(uint32_t, fMxcsr);
3989 IEM_MC_LOCAL(int64_t, i64Dst);
3990 IEM_MC_LOCAL(uint32_t, u32Src);
3991 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3992 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3993 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
3994
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3998 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3999
4000 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4001 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4002 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4003 IEM_MC_IF_MXCSR_XCPT_PENDING()
4004 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4005 IEM_MC_ELSE()
4006 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4007 IEM_MC_ENDIF();
4008
4009 IEM_MC_ADVANCE_RIP();
4010 IEM_MC_END();
4011 }
4012 }
4013 else
4014 {
4015 if (IEM_IS_MODRM_REG_MODE(bRm))
4016 {
4017 /* greg, XMM */
4018 IEM_MC_BEGIN(3, 4);
4019 IEM_MC_LOCAL(uint32_t, fMxcsr);
4020 IEM_MC_LOCAL(int32_t, i32Dst);
4021 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4022 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4023 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4024
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4027 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4028
4029 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4030 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4031 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4032 IEM_MC_IF_MXCSR_XCPT_PENDING()
4033 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4034 IEM_MC_ELSE()
4035 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4036 IEM_MC_ENDIF();
4037
4038 IEM_MC_ADVANCE_RIP();
4039 IEM_MC_END();
4040 }
4041 else
4042 {
4043 /* greg, [mem] */
4044 IEM_MC_BEGIN(3, 4);
4045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4046 IEM_MC_LOCAL(uint32_t, fMxcsr);
4047 IEM_MC_LOCAL(int32_t, i32Dst);
4048 IEM_MC_LOCAL(uint32_t, u32Src);
4049 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4050 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4051 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4052
4053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4055 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4056 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4057
4058 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4059 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4060 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4061 IEM_MC_IF_MXCSR_XCPT_PENDING()
4062 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4063 IEM_MC_ELSE()
4064 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4065 IEM_MC_ENDIF();
4066
4067 IEM_MC_ADVANCE_RIP();
4068 IEM_MC_END();
4069 }
4070 }
4071 return VINF_SUCCESS;
4072}
4073
4074
4075/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4076FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4077{
4078 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4079
4080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4081 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4082 {
4083 if (IEM_IS_MODRM_REG_MODE(bRm))
4084 {
4085 /* greg64, XMM */
4086 IEM_MC_BEGIN(3, 4);
4087 IEM_MC_LOCAL(uint32_t, fMxcsr);
4088 IEM_MC_LOCAL(int64_t, i64Dst);
4089 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4090 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4091 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4092
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4095 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4096
4097 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4098 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4099 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4100 IEM_MC_IF_MXCSR_XCPT_PENDING()
4101 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4102 IEM_MC_ELSE()
4103 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4104 IEM_MC_ENDIF();
4105
4106 IEM_MC_ADVANCE_RIP();
4107 IEM_MC_END();
4108 }
4109 else
4110 {
4111 /* greg64, [mem64] */
4112 IEM_MC_BEGIN(3, 4);
4113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4114 IEM_MC_LOCAL(uint32_t, fMxcsr);
4115 IEM_MC_LOCAL(int64_t, i64Dst);
4116 IEM_MC_LOCAL(uint64_t, u64Src);
4117 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4118 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4119 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4120
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4124 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4125
4126 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4127 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4128 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4129 IEM_MC_IF_MXCSR_XCPT_PENDING()
4130 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4131 IEM_MC_ELSE()
4132 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4133 IEM_MC_ENDIF();
4134
4135 IEM_MC_ADVANCE_RIP();
4136 IEM_MC_END();
4137 }
4138 }
4139 else
4140 {
4141 if (IEM_IS_MODRM_REG_MODE(bRm))
4142 {
4143 /* greg, XMM */
4144 IEM_MC_BEGIN(3, 4);
4145 IEM_MC_LOCAL(uint32_t, fMxcsr);
4146 IEM_MC_LOCAL(int32_t, i32Dst);
4147 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4148 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4149 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4150
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4153 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4154
4155 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4156 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4157 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4158 IEM_MC_IF_MXCSR_XCPT_PENDING()
4159 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4160 IEM_MC_ELSE()
4161 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4162 IEM_MC_ENDIF();
4163
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 else
4168 {
4169 /* greg, [mem] */
4170 IEM_MC_BEGIN(3, 4);
4171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4172 IEM_MC_LOCAL(uint32_t, fMxcsr);
4173 IEM_MC_LOCAL(int32_t, i32Dst);
4174 IEM_MC_LOCAL(uint64_t, u64Src);
4175 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4176 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4177 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4178
4179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4182 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4183
4184 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4185 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4186 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4187 IEM_MC_IF_MXCSR_XCPT_PENDING()
4188 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4189 IEM_MC_ELSE()
4190 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4191 IEM_MC_ENDIF();
4192
4193 IEM_MC_ADVANCE_RIP();
4194 IEM_MC_END();
4195 }
4196 }
4197 return VINF_SUCCESS;
4198}
4199
4200
4201/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4202FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
4203/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4204FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
4205
4206
4207/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4208FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4209{
4210 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4211
4212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4213 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4214 {
4215 if (IEM_IS_MODRM_REG_MODE(bRm))
4216 {
4217 /* greg64, XMM */
4218 IEM_MC_BEGIN(3, 4);
4219 IEM_MC_LOCAL(uint32_t, fMxcsr);
4220 IEM_MC_LOCAL(int64_t, i64Dst);
4221 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4222 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4223 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4224
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4226 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4227 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4228
4229 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4230 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4231 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4232 IEM_MC_IF_MXCSR_XCPT_PENDING()
4233 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4234 IEM_MC_ELSE()
4235 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4236 IEM_MC_ENDIF();
4237
4238 IEM_MC_ADVANCE_RIP();
4239 IEM_MC_END();
4240 }
4241 else
4242 {
4243 /* greg64, [mem64] */
4244 IEM_MC_BEGIN(3, 4);
4245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4246 IEM_MC_LOCAL(uint32_t, fMxcsr);
4247 IEM_MC_LOCAL(int64_t, i64Dst);
4248 IEM_MC_LOCAL(uint32_t, u32Src);
4249 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4250 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4251 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4252
4253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4256 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4257
4258 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4259 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4260 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4261 IEM_MC_IF_MXCSR_XCPT_PENDING()
4262 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4263 IEM_MC_ELSE()
4264 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4265 IEM_MC_ENDIF();
4266
4267 IEM_MC_ADVANCE_RIP();
4268 IEM_MC_END();
4269 }
4270 }
4271 else
4272 {
4273 if (IEM_IS_MODRM_REG_MODE(bRm))
4274 {
4275 /* greg, XMM */
4276 IEM_MC_BEGIN(3, 4);
4277 IEM_MC_LOCAL(uint32_t, fMxcsr);
4278 IEM_MC_LOCAL(int32_t, i32Dst);
4279 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4280 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4281 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4282
4283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4285 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4286
4287 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4288 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4289 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4290 IEM_MC_IF_MXCSR_XCPT_PENDING()
4291 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4292 IEM_MC_ELSE()
4293 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4294 IEM_MC_ENDIF();
4295
4296 IEM_MC_ADVANCE_RIP();
4297 IEM_MC_END();
4298 }
4299 else
4300 {
4301 /* greg, [mem] */
4302 IEM_MC_BEGIN(3, 4);
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4304 IEM_MC_LOCAL(uint32_t, fMxcsr);
4305 IEM_MC_LOCAL(int32_t, i32Dst);
4306 IEM_MC_LOCAL(uint32_t, u32Src);
4307 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4308 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4309 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4310
4311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4314 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4315
4316 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4317 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4318 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4319 IEM_MC_IF_MXCSR_XCPT_PENDING()
4320 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4321 IEM_MC_ELSE()
4322 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4323 IEM_MC_ENDIF();
4324
4325 IEM_MC_ADVANCE_RIP();
4326 IEM_MC_END();
4327 }
4328 }
4329 return VINF_SUCCESS;
4330}
4331
4332
4333/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4334FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4335{
4336 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4337
4338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4340 {
4341 if (IEM_IS_MODRM_REG_MODE(bRm))
4342 {
4343 /* greg64, XMM */
4344 IEM_MC_BEGIN(3, 4);
4345 IEM_MC_LOCAL(uint32_t, fMxcsr);
4346 IEM_MC_LOCAL(int64_t, i64Dst);
4347 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4348 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4349 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4350
4351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4352 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4353 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4354
4355 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4356 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4357 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4358 IEM_MC_IF_MXCSR_XCPT_PENDING()
4359 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4360 IEM_MC_ELSE()
4361 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4362 IEM_MC_ENDIF();
4363
4364 IEM_MC_ADVANCE_RIP();
4365 IEM_MC_END();
4366 }
4367 else
4368 {
4369 /* greg64, [mem64] */
4370 IEM_MC_BEGIN(3, 4);
4371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4372 IEM_MC_LOCAL(uint32_t, fMxcsr);
4373 IEM_MC_LOCAL(int64_t, i64Dst);
4374 IEM_MC_LOCAL(uint64_t, u64Src);
4375 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4376 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4377 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4378
4379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4382 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4383
4384 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4385 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4386 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4387 IEM_MC_IF_MXCSR_XCPT_PENDING()
4388 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4389 IEM_MC_ELSE()
4390 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4391 IEM_MC_ENDIF();
4392
4393 IEM_MC_ADVANCE_RIP();
4394 IEM_MC_END();
4395 }
4396 }
4397 else
4398 {
4399 if (IEM_IS_MODRM_REG_MODE(bRm))
4400 {
4401 /* greg, XMM */
4402 IEM_MC_BEGIN(3, 4);
4403 IEM_MC_LOCAL(uint32_t, fMxcsr);
4404 IEM_MC_LOCAL(int32_t, i32Dst);
4405 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4406 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4407 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4408
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4410 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4411 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4412
4413 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4414 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4415 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4416 IEM_MC_IF_MXCSR_XCPT_PENDING()
4417 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4418 IEM_MC_ELSE()
4419 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4420 IEM_MC_ENDIF();
4421
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 }
4425 else
4426 {
4427 /* greg, [mem] */
4428 IEM_MC_BEGIN(3, 4);
4429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4430 IEM_MC_LOCAL(uint32_t, fMxcsr);
4431 IEM_MC_LOCAL(int32_t, i32Dst);
4432 IEM_MC_LOCAL(uint64_t, u64Src);
4433 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4434 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4435 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4436
4437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4440 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4441
4442 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4443 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4444 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4445 IEM_MC_IF_MXCSR_XCPT_PENDING()
4446 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4447 IEM_MC_ELSE()
4448 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4449 IEM_MC_ENDIF();
4450
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 }
4455 return VINF_SUCCESS;
4456}
4457
4458
4459/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4460FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4461{
4462 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4464 if (IEM_IS_MODRM_REG_MODE(bRm))
4465 {
4466 /*
4467 * Register, register.
4468 */
4469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4470 IEM_MC_BEGIN(4, 1);
4471 IEM_MC_LOCAL(uint32_t, fEFlags);
4472 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4473 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4474 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4475 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4477 IEM_MC_PREPARE_SSE_USAGE();
4478 IEM_MC_FETCH_EFLAGS(fEFlags);
4479 IEM_MC_REF_MXCSR(pfMxcsr);
4480 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4481 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4483 IEM_MC_IF_MXCSR_XCPT_PENDING()
4484 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4485 IEM_MC_ELSE()
4486 IEM_MC_COMMIT_EFLAGS(fEFlags);
4487 IEM_MC_ENDIF();
4488
4489 IEM_MC_ADVANCE_RIP();
4490 IEM_MC_END();
4491 }
4492 else
4493 {
4494 /*
4495 * Register, memory.
4496 */
4497 IEM_MC_BEGIN(4, 3);
4498 IEM_MC_LOCAL(uint32_t, fEFlags);
4499 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4500 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4501 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4502 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4503 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4505
4506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4509 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4510
4511 IEM_MC_PREPARE_SSE_USAGE();
4512 IEM_MC_REF_MXCSR(pfMxcsr);
4513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4514 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4515 IEM_MC_IF_MXCSR_XCPT_PENDING()
4516 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4517 IEM_MC_ELSE()
4518 IEM_MC_COMMIT_EFLAGS(fEFlags);
4519 IEM_MC_ENDIF();
4520
4521 IEM_MC_ADVANCE_RIP();
4522 IEM_MC_END();
4523 }
4524 return VINF_SUCCESS;
4525}
4526
4527
4528/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4529FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4530{
4531 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4533 if (IEM_IS_MODRM_REG_MODE(bRm))
4534 {
4535 /*
4536 * Register, register.
4537 */
4538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4539 IEM_MC_BEGIN(4, 1);
4540 IEM_MC_LOCAL(uint32_t, fEFlags);
4541 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4542 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4543 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4544 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4546 IEM_MC_PREPARE_SSE_USAGE();
4547 IEM_MC_FETCH_EFLAGS(fEFlags);
4548 IEM_MC_REF_MXCSR(pfMxcsr);
4549 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4550 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4551 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4552 IEM_MC_IF_MXCSR_XCPT_PENDING()
4553 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4554 IEM_MC_ELSE()
4555 IEM_MC_COMMIT_EFLAGS(fEFlags);
4556 IEM_MC_ENDIF();
4557
4558 IEM_MC_ADVANCE_RIP();
4559 IEM_MC_END();
4560 }
4561 else
4562 {
4563 /*
4564 * Register, memory.
4565 */
4566 IEM_MC_BEGIN(4, 3);
4567 IEM_MC_LOCAL(uint32_t, fEFlags);
4568 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4569 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4570 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4571 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4572 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4574
4575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4578 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4579
4580 IEM_MC_PREPARE_SSE_USAGE();
4581 IEM_MC_REF_MXCSR(pfMxcsr);
4582 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4583 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4584 IEM_MC_IF_MXCSR_XCPT_PENDING()
4585 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4586 IEM_MC_ELSE()
4587 IEM_MC_COMMIT_EFLAGS(fEFlags);
4588 IEM_MC_ENDIF();
4589
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 }
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/* Opcode 0xf3 0x0f 0x2e - invalid */
4598/* Opcode 0xf2 0x0f 0x2e - invalid */
4599
4600
4601/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4602FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4603{
4604 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4606 if (IEM_IS_MODRM_REG_MODE(bRm))
4607 {
4608 /*
4609 * Register, register.
4610 */
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612 IEM_MC_BEGIN(4, 1);
4613 IEM_MC_LOCAL(uint32_t, fEFlags);
4614 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4615 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4616 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4617 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4618 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4619 IEM_MC_PREPARE_SSE_USAGE();
4620 IEM_MC_FETCH_EFLAGS(fEFlags);
4621 IEM_MC_REF_MXCSR(pfMxcsr);
4622 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4623 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4625 IEM_MC_IF_MXCSR_XCPT_PENDING()
4626 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4627 IEM_MC_ELSE()
4628 IEM_MC_COMMIT_EFLAGS(fEFlags);
4629 IEM_MC_ENDIF();
4630
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 }
4634 else
4635 {
4636 /*
4637 * Register, memory.
4638 */
4639 IEM_MC_BEGIN(4, 3);
4640 IEM_MC_LOCAL(uint32_t, fEFlags);
4641 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4642 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4643 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4644 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4645 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4647
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4651 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4652
4653 IEM_MC_PREPARE_SSE_USAGE();
4654 IEM_MC_REF_MXCSR(pfMxcsr);
4655 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4656 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4657 IEM_MC_IF_MXCSR_XCPT_PENDING()
4658 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4659 IEM_MC_ELSE()
4660 IEM_MC_COMMIT_EFLAGS(fEFlags);
4661 IEM_MC_ENDIF();
4662
4663 IEM_MC_ADVANCE_RIP();
4664 IEM_MC_END();
4665 }
4666 return VINF_SUCCESS;
4667}
4668
4669
4670/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
4671FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4672{
4673 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4675 if (IEM_IS_MODRM_REG_MODE(bRm))
4676 {
4677 /*
4678 * Register, register.
4679 */
4680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4681 IEM_MC_BEGIN(4, 1);
4682 IEM_MC_LOCAL(uint32_t, fEFlags);
4683 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4684 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4685 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4686 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4687 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4688 IEM_MC_PREPARE_SSE_USAGE();
4689 IEM_MC_FETCH_EFLAGS(fEFlags);
4690 IEM_MC_REF_MXCSR(pfMxcsr);
4691 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4692 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4693 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4694 IEM_MC_IF_MXCSR_XCPT_PENDING()
4695 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4696 IEM_MC_ELSE()
4697 IEM_MC_COMMIT_EFLAGS(fEFlags);
4698 IEM_MC_ENDIF();
4699
4700 IEM_MC_ADVANCE_RIP();
4701 IEM_MC_END();
4702 }
4703 else
4704 {
4705 /*
4706 * Register, memory.
4707 */
4708 IEM_MC_BEGIN(4, 3);
4709 IEM_MC_LOCAL(uint32_t, fEFlags);
4710 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4711 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4712 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4713 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4714 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4716
4717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4720 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4721
4722 IEM_MC_PREPARE_SSE_USAGE();
4723 IEM_MC_REF_MXCSR(pfMxcsr);
4724 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4726 IEM_MC_IF_MXCSR_XCPT_PENDING()
4727 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4728 IEM_MC_ELSE()
4729 IEM_MC_COMMIT_EFLAGS(fEFlags);
4730 IEM_MC_ENDIF();
4731
4732 IEM_MC_ADVANCE_RIP();
4733 IEM_MC_END();
4734 }
4735 return VINF_SUCCESS;
4736}
4737
4738
4739/* Opcode 0xf3 0x0f 0x2f - invalid */
4740/* Opcode 0xf2 0x0f 0x2f - invalid */
4741
4742/** Opcode 0x0f 0x30. */
4743FNIEMOP_DEF(iemOp_wrmsr)
4744{
4745 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
4748}
4749
4750
4751/** Opcode 0x0f 0x31. */
4752FNIEMOP_DEF(iemOp_rdtsc)
4753{
4754 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
4757}
4758
4759
4760/** Opcode 0x0f 0x33. */
4761FNIEMOP_DEF(iemOp_rdmsr)
4762{
4763 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4765 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
4766}
4767
4768
4769/** Opcode 0x0f 0x34. */
4770FNIEMOP_DEF(iemOp_rdpmc)
4771{
4772 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4774 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
4775}
4776
4777
4778/** Opcode 0x0f 0x34. */
4779FNIEMOP_DEF(iemOp_sysenter)
4780{
4781 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
4784}
4785
4786/** Opcode 0x0f 0x35. */
4787FNIEMOP_DEF(iemOp_sysexit)
4788{
4789 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4792}
4793
4794/** Opcode 0x0f 0x37. */
4795FNIEMOP_STUB(iemOp_getsec);
4796
4797
4798/** Opcode 0x0f 0x38. */
4799FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4800{
4801#ifdef IEM_WITH_THREE_0F_38
4802 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4803 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4804#else
4805 IEMOP_BITCH_ABOUT_STUB();
4806 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4807#endif
4808}
4809
4810
4811/** Opcode 0x0f 0x3a. */
4812FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4813{
4814#ifdef IEM_WITH_THREE_0F_3A
4815 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4816 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4817#else
4818 IEMOP_BITCH_ABOUT_STUB();
4819 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4820#endif
4821}
4822
4823
4824/**
4825 * Implements a conditional move.
4826 *
4827 * Wish there was an obvious way to do this where we could share and reduce
4828 * code bloat.
4829 *
4830 * @param a_Cnd The conditional "microcode" operation.
4831 */
4832#define CMOV_X(a_Cnd) \
4833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4834 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4835 { \
4836 switch (pVCpu->iem.s.enmEffOpSize) \
4837 { \
4838 case IEMMODE_16BIT: \
4839 IEM_MC_BEGIN(0, 1); \
4840 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4841 a_Cnd { \
4842 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4843 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4844 } IEM_MC_ENDIF(); \
4845 IEM_MC_ADVANCE_RIP(); \
4846 IEM_MC_END(); \
4847 return VINF_SUCCESS; \
4848 \
4849 case IEMMODE_32BIT: \
4850 IEM_MC_BEGIN(0, 1); \
4851 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4852 a_Cnd { \
4853 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4854 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4855 } IEM_MC_ELSE() { \
4856 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4857 } IEM_MC_ENDIF(); \
4858 IEM_MC_ADVANCE_RIP(); \
4859 IEM_MC_END(); \
4860 return VINF_SUCCESS; \
4861 \
4862 case IEMMODE_64BIT: \
4863 IEM_MC_BEGIN(0, 1); \
4864 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4865 a_Cnd { \
4866 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4867 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4868 } IEM_MC_ENDIF(); \
4869 IEM_MC_ADVANCE_RIP(); \
4870 IEM_MC_END(); \
4871 return VINF_SUCCESS; \
4872 \
4873 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4874 } \
4875 } \
4876 else \
4877 { \
4878 switch (pVCpu->iem.s.enmEffOpSize) \
4879 { \
4880 case IEMMODE_16BIT: \
4881 IEM_MC_BEGIN(0, 2); \
4882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4883 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4885 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4886 a_Cnd { \
4887 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4888 } IEM_MC_ENDIF(); \
4889 IEM_MC_ADVANCE_RIP(); \
4890 IEM_MC_END(); \
4891 return VINF_SUCCESS; \
4892 \
4893 case IEMMODE_32BIT: \
4894 IEM_MC_BEGIN(0, 2); \
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4896 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4898 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4899 a_Cnd { \
4900 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4901 } IEM_MC_ELSE() { \
4902 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4903 } IEM_MC_ENDIF(); \
4904 IEM_MC_ADVANCE_RIP(); \
4905 IEM_MC_END(); \
4906 return VINF_SUCCESS; \
4907 \
4908 case IEMMODE_64BIT: \
4909 IEM_MC_BEGIN(0, 2); \
4910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4911 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4913 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4914 a_Cnd { \
4915 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4916 } IEM_MC_ENDIF(); \
4917 IEM_MC_ADVANCE_RIP(); \
4918 IEM_MC_END(); \
4919 return VINF_SUCCESS; \
4920 \
4921 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4922 } \
4923 } do {} while (0)
4924
4925
4926
4927/** Opcode 0x0f 0x40. */
4928FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4929{
4930 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4931 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4932}
4933
4934
4935/** Opcode 0x0f 0x41. */
4936FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4937{
4938 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4939 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4940}
4941
4942
4943/** Opcode 0x0f 0x42. */
4944FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
4945{
4946 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
4947 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
4948}
4949
4950
4951/** Opcode 0x0f 0x43. */
4952FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
4953{
4954 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
4955 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
4956}
4957
4958
4959/** Opcode 0x0f 0x44. */
4960FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
4961{
4962 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
4963 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
4964}
4965
4966
4967/** Opcode 0x0f 0x45. */
4968FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
4969{
4970 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
4971 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
4972}
4973
4974
4975/** Opcode 0x0f 0x46. */
4976FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
4977{
4978 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
4979 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4980}
4981
4982
4983/** Opcode 0x0f 0x47. */
4984FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
4985{
4986 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
4987 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4988}
4989
4990
4991/** Opcode 0x0f 0x48. */
4992FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
4993{
4994 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
4995 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
4996}
4997
4998
4999/** Opcode 0x0f 0x49. */
5000FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5001{
5002 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5003 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5004}
5005
5006
5007/** Opcode 0x0f 0x4a. */
5008FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5009{
5010 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5011 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5012}
5013
5014
5015/** Opcode 0x0f 0x4b. */
5016FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5017{
5018 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5019 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5020}
5021
5022
5023/** Opcode 0x0f 0x4c. */
5024FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5025{
5026 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5027 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5028}
5029
5030
5031/** Opcode 0x0f 0x4d. */
5032FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5033{
5034 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5035 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5036}
5037
5038
5039/** Opcode 0x0f 0x4e. */
5040FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5041{
5042 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5043 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5044}
5045
5046
5047/** Opcode 0x0f 0x4f. */
5048FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5049{
5050 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5051 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5052}
5053
5054#undef CMOV_X
5055
5056/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5057FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5058{
5059 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5061 if (IEM_IS_MODRM_REG_MODE(bRm))
5062 {
5063 /*
5064 * Register, register.
5065 */
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067 IEM_MC_BEGIN(2, 1);
5068 IEM_MC_LOCAL(uint8_t, u8Dst);
5069 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5070 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5071 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5072 IEM_MC_PREPARE_SSE_USAGE();
5073 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5074 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5075 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5076 IEM_MC_ADVANCE_RIP();
5077 IEM_MC_END();
5078 return VINF_SUCCESS;
5079 }
5080
5081 /* No memory operand. */
5082 return IEMOP_RAISE_INVALID_OPCODE();
5083}
5084
5085
5086/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5087FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5088{
5089 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5091 if (IEM_IS_MODRM_REG_MODE(bRm))
5092 {
5093 /*
5094 * Register, register.
5095 */
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097 IEM_MC_BEGIN(2, 1);
5098 IEM_MC_LOCAL(uint8_t, u8Dst);
5099 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5100 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5101 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5102 IEM_MC_PREPARE_SSE_USAGE();
5103 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5104 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5105 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5106 IEM_MC_ADVANCE_RIP();
5107 IEM_MC_END();
5108 return VINF_SUCCESS;
5109 }
5110
5111 /* No memory operand. */
5112 return IEMOP_RAISE_INVALID_OPCODE();
5113
5114}
5115
5116
5117/* Opcode 0xf3 0x0f 0x50 - invalid */
5118/* Opcode 0xf2 0x0f 0x50 - invalid */
5119
5120
5121/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5122FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5123{
5124 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5125 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5126}
5127
5128
5129/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5130FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5131{
5132 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5133 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5134}
5135
5136
5137/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5138FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5139{
5140 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5141 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5142}
5143
5144
5145/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5146FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5147{
5148 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5149 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5150}
5151
5152
5153/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5154FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5155/* Opcode 0x66 0x0f 0x52 - invalid */
5156/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5157FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5158/* Opcode 0xf2 0x0f 0x52 - invalid */
5159
5160/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5161FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5162/* Opcode 0x66 0x0f 0x53 - invalid */
5163/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5164FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5165/* Opcode 0xf2 0x0f 0x53 - invalid */
5166
5167
5168/** Opcode 0x0f 0x54 - andps Vps, Wps */
5169FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5170{
5171 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5172 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5173}
5174
5175
5176/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5177FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5178{
5179 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5180 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5181}
5182
5183
5184/* Opcode 0xf3 0x0f 0x54 - invalid */
5185/* Opcode 0xf2 0x0f 0x54 - invalid */
5186
5187
5188/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5189FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5190{
5191 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5192 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5193}
5194
5195
5196/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5197FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5198{
5199 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5200 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5201}
5202
5203
5204/* Opcode 0xf3 0x0f 0x55 - invalid */
5205/* Opcode 0xf2 0x0f 0x55 - invalid */
5206
5207
5208/** Opcode 0x0f 0x56 - orps Vps, Wps */
5209FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5210{
5211 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5212 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5213}
5214
5215
5216/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5217FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5218{
5219 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5220 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5221}
5222
5223
5224/* Opcode 0xf3 0x0f 0x56 - invalid */
5225/* Opcode 0xf2 0x0f 0x56 - invalid */
5226
5227
5228/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5229FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5230{
5231 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5232 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5233}
5234
5235
5236/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5237FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5238{
5239 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5240 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5241}
5242
5243
5244/* Opcode 0xf3 0x0f 0x57 - invalid */
5245/* Opcode 0xf2 0x0f 0x57 - invalid */
5246
5247/** Opcode 0x0f 0x58 - addps Vps, Wps */
5248FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5249{
5250 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5251 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5252}
5253
5254
5255/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5256FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5257{
5258 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5259 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5260}
5261
5262
5263/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5264FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5265{
5266 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5267 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5268}
5269
5270
5271/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5272FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5273{
5274 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5275 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5276}
5277
5278
5279/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5280FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5281{
5282 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5283 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5284}
5285
5286
5287/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5288FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5289{
5290 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5291 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5292}
5293
5294
5295/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5296FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5297{
5298 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5299 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5300}
5301
5302
5303/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5304FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5305{
5306 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5307 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5308}
5309
5310
5311/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5312FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5313{
5314 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5315 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5316}
5317
5318
5319/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5320FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5321{
5322 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5323 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5324}
5325
5326
5327/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5328FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5329{
5330 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5331 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5332}
5333
5334
5335/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5336FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5337{
5338 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5339 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5340}
5341
5342
5343/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5344FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5345{
5346 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5347 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5348}
5349
5350
5351/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5352FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5353{
5354 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5355 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5356}
5357
5358
5359/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5360FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5361{
5362 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5363 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5364}
5365
5366
5367/* Opcode 0xf2 0x0f 0x5b - invalid */
5368
5369
5370/** Opcode 0x0f 0x5c - subps Vps, Wps */
5371FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5372{
5373 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5374 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5375}
5376
5377
5378/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5379FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5380{
5381 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5382 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5383}
5384
5385
5386/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5387FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5388{
5389 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5390 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5391}
5392
5393
5394/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5395FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5396{
5397 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5398 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5399}
5400
5401
5402/** Opcode 0x0f 0x5d - minps Vps, Wps */
5403FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5404{
5405 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5406 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5407}
5408
5409
5410/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5411FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5412{
5413 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5414 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5415}
5416
5417
5418/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5419FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5420{
5421 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5422 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5423}
5424
5425
5426/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5427FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5428{
5429 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5430 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5431}
5432
5433
5434/** Opcode 0x0f 0x5e - divps Vps, Wps */
5435FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5436{
5437 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5438 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5439}
5440
5441
5442/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5443FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5444{
5445 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5446 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5447}
5448
5449
5450/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5451FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5452{
5453 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5454 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5455}
5456
5457
5458/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5459FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5460{
5461 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5462 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5463}
5464
5465
5466/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5467FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5468{
5469 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5470 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5471}
5472
5473
5474/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5475FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5476{
5477 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5478 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5479}
5480
5481
5482/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5483FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5484{
5485 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5486 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5487}
5488
5489
5490/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5491FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5492{
5493 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5494 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5495}
5496
5497
5498/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5499FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5500{
5501 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5502 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5503}
5504
5505
5506/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5507FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5508{
5509 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5510 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5511}
5512
5513
5514/* Opcode 0xf3 0x0f 0x60 - invalid */
5515
5516
5517/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5518FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5519{
5520 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5521 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5522 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5523}
5524
5525
5526/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5527FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5528{
5529 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5531}
5532
5533
5534/* Opcode 0xf3 0x0f 0x61 - invalid */
5535
5536
5537/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5538FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5539{
5540 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5541 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5542}
5543
5544
5545/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5546FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5547{
5548 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5549 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5550}
5551
5552
5553/* Opcode 0xf3 0x0f 0x62 - invalid */
5554
5555
5556
5557/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5558FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5559{
5560 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5562}
5563
5564
5565/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5566FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5567{
5568 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5570}
5571
5572
5573/* Opcode 0xf3 0x0f 0x63 - invalid */
5574
5575
5576/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5577FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5578{
5579 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5580 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5581}
5582
5583
5584/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5585FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5586{
5587 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5588 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5589}
5590
5591
5592/* Opcode 0xf3 0x0f 0x64 - invalid */
5593
5594
5595/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5596FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5597{
5598 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5599 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5600}
5601
5602
5603/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5604FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5605{
5606 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5607 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5608}
5609
5610
5611/* Opcode 0xf3 0x0f 0x65 - invalid */
5612
5613
5614/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5615FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5616{
5617 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5618 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5619}
5620
5621
5622/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5623FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5624{
5625 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5626 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5627}
5628
5629
5630/* Opcode 0xf3 0x0f 0x66 - invalid */
5631
5632
5633/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5634FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5635{
5636 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5642FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5643{
5644 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5646}
5647
5648
5649/* Opcode 0xf3 0x0f 0x67 - invalid */
5650
5651
5652/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5653 * @note Intel and AMD both uses Qd for the second parameter, however they
5654 * both list it as a mmX/mem64 operand and intel describes it as being
5655 * loaded as a qword, so it should be Qq, shouldn't it? */
5656FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5657{
5658 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5659 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5660}
5661
5662
5663/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5664FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5665{
5666 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5667 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5668}
5669
5670
5671/* Opcode 0xf3 0x0f 0x68 - invalid */
5672
5673
5674/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5675 * @note Intel and AMD both uses Qd for the second parameter, however they
5676 * both list it as a mmX/mem64 operand and intel describes it as being
5677 * loaded as a qword, so it should be Qq, shouldn't it? */
5678FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5679{
5680 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5682}
5683
5684
5685/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5686FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5687{
5688 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5690
5691}
5692
5693
5694/* Opcode 0xf3 0x0f 0x69 - invalid */
5695
5696
5697/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5698 * @note Intel and AMD both uses Qd for the second parameter, however they
5699 * both list it as a mmX/mem64 operand and intel describes it as being
5700 * loaded as a qword, so it should be Qq, shouldn't it? */
5701FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5702{
5703 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5705}
5706
5707
5708/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5709FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5710{
5711 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5713}
5714
5715
5716/* Opcode 0xf3 0x0f 0x6a - invalid */
5717
5718
5719/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5720FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5721{
5722 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5723 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5724}
5725
5726
5727/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5728FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5729{
5730 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5731 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5732}
5733
5734
5735/* Opcode 0xf3 0x0f 0x6b - invalid */
5736
5737
5738/* Opcode 0x0f 0x6c - invalid */
5739
5740
5741/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5742FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5743{
5744 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5746}
5747
5748
5749/* Opcode 0xf3 0x0f 0x6c - invalid */
5750/* Opcode 0xf2 0x0f 0x6c - invalid */
5751
5752
5753/* Opcode 0x0f 0x6d - invalid */
5754
5755
5756/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5757FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5758{
5759 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5761}
5762
5763
5764/* Opcode 0xf3 0x0f 0x6d - invalid */
5765
5766
5767FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5768{
5769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5770 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5771 {
5772 /**
5773 * @opcode 0x6e
5774 * @opcodesub rex.w=1
5775 * @oppfx none
5776 * @opcpuid mmx
5777 * @opgroup og_mmx_datamove
5778 * @opxcpttype 5
5779 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5780 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5781 */
5782 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5783 if (IEM_IS_MODRM_REG_MODE(bRm))
5784 {
5785 /* MMX, greg64 */
5786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5787 IEM_MC_BEGIN(0, 1);
5788 IEM_MC_LOCAL(uint64_t, u64Tmp);
5789
5790 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5791 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5792 IEM_MC_FPU_TO_MMX_MODE();
5793
5794 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5795 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5796
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 else
5801 {
5802 /* MMX, [mem64] */
5803 IEM_MC_BEGIN(0, 2);
5804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5805 IEM_MC_LOCAL(uint64_t, u64Tmp);
5806
5807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5809 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5811 IEM_MC_FPU_TO_MMX_MODE();
5812
5813 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5814 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5815
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 }
5819 }
5820 else
5821 {
5822 /**
5823 * @opdone
5824 * @opcode 0x6e
5825 * @opcodesub rex.w=0
5826 * @oppfx none
5827 * @opcpuid mmx
5828 * @opgroup og_mmx_datamove
5829 * @opxcpttype 5
5830 * @opfunction iemOp_movd_q_Pd_Ey
5831 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5832 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5833 */
5834 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5835 if (IEM_IS_MODRM_REG_MODE(bRm))
5836 {
5837 /* MMX, greg */
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 IEM_MC_BEGIN(0, 1);
5840 IEM_MC_LOCAL(uint64_t, u64Tmp);
5841
5842 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5843 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5844 IEM_MC_FPU_TO_MMX_MODE();
5845
5846 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5847 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5848
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 else
5853 {
5854 /* MMX, [mem] */
5855 IEM_MC_BEGIN(0, 2);
5856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5857 IEM_MC_LOCAL(uint32_t, u32Tmp);
5858
5859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5861 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5862 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5863 IEM_MC_FPU_TO_MMX_MODE();
5864
5865 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5866 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5867
5868 IEM_MC_ADVANCE_RIP();
5869 IEM_MC_END();
5870 }
5871 }
5872 return VINF_SUCCESS;
5873}
5874
5875FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
5876{
5877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5878 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5879 {
5880 /**
5881 * @opcode 0x6e
5882 * @opcodesub rex.w=1
5883 * @oppfx 0x66
5884 * @opcpuid sse2
5885 * @opgroup og_sse2_simdint_datamove
5886 * @opxcpttype 5
5887 * @optest 64-bit / op1=1 op2=2 -> op1=2
5888 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5889 */
5890 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5891 if (IEM_IS_MODRM_REG_MODE(bRm))
5892 {
5893 /* XMM, greg64 */
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 IEM_MC_BEGIN(0, 1);
5896 IEM_MC_LOCAL(uint64_t, u64Tmp);
5897
5898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5900
5901 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5902 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5903
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 }
5907 else
5908 {
5909 /* XMM, [mem64] */
5910 IEM_MC_BEGIN(0, 2);
5911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5912 IEM_MC_LOCAL(uint64_t, u64Tmp);
5913
5914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5916 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5917 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5918
5919 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5920 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5921
5922 IEM_MC_ADVANCE_RIP();
5923 IEM_MC_END();
5924 }
5925 }
5926 else
5927 {
5928 /**
5929 * @opdone
5930 * @opcode 0x6e
5931 * @opcodesub rex.w=0
5932 * @oppfx 0x66
5933 * @opcpuid sse2
5934 * @opgroup og_sse2_simdint_datamove
5935 * @opxcpttype 5
5936 * @opfunction iemOp_movd_q_Vy_Ey
5937 * @optest op1=1 op2=2 -> op1=2
5938 * @optest op1=0 op2=-42 -> op1=-42
5939 */
5940 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5941 if (IEM_IS_MODRM_REG_MODE(bRm))
5942 {
5943 /* XMM, greg32 */
5944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5945 IEM_MC_BEGIN(0, 1);
5946 IEM_MC_LOCAL(uint32_t, u32Tmp);
5947
5948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5949 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5950
5951 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5952 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5953
5954 IEM_MC_ADVANCE_RIP();
5955 IEM_MC_END();
5956 }
5957 else
5958 {
5959 /* XMM, [mem32] */
5960 IEM_MC_BEGIN(0, 2);
5961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5962 IEM_MC_LOCAL(uint32_t, u32Tmp);
5963
5964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5968
5969 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5970 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5971
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 }
5975 }
5976 return VINF_SUCCESS;
5977}
5978
5979/* Opcode 0xf3 0x0f 0x6e - invalid */
5980
5981
5982/**
5983 * @opcode 0x6f
5984 * @oppfx none
5985 * @opcpuid mmx
5986 * @opgroup og_mmx_datamove
5987 * @opxcpttype 5
5988 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5989 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5990 */
5991FNIEMOP_DEF(iemOp_movq_Pq_Qq)
5992{
5993 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5995 if (IEM_IS_MODRM_REG_MODE(bRm))
5996 {
5997 /*
5998 * Register, register.
5999 */
6000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6001 IEM_MC_BEGIN(0, 1);
6002 IEM_MC_LOCAL(uint64_t, u64Tmp);
6003
6004 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6005 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6006 IEM_MC_FPU_TO_MMX_MODE();
6007
6008 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6009 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6010
6011 IEM_MC_ADVANCE_RIP();
6012 IEM_MC_END();
6013 }
6014 else
6015 {
6016 /*
6017 * Register, memory.
6018 */
6019 IEM_MC_BEGIN(0, 2);
6020 IEM_MC_LOCAL(uint64_t, u64Tmp);
6021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6022
6023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6027 IEM_MC_FPU_TO_MMX_MODE();
6028
6029 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6030 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6031
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 }
6035 return VINF_SUCCESS;
6036}
6037
6038/**
6039 * @opcode 0x6f
6040 * @oppfx 0x66
6041 * @opcpuid sse2
6042 * @opgroup og_sse2_simdint_datamove
6043 * @opxcpttype 1
6044 * @optest op1=1 op2=2 -> op1=2
6045 * @optest op1=0 op2=-42 -> op1=-42
6046 */
6047FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6048{
6049 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6051 if (IEM_IS_MODRM_REG_MODE(bRm))
6052 {
6053 /*
6054 * Register, register.
6055 */
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057 IEM_MC_BEGIN(0, 0);
6058
6059 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6060 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6061
6062 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6063 IEM_GET_MODRM_RM(pVCpu, bRm));
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 }
6067 else
6068 {
6069 /*
6070 * Register, memory.
6071 */
6072 IEM_MC_BEGIN(0, 2);
6073 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6075
6076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6078 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6079 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6080
6081 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6082 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6083
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 return VINF_SUCCESS;
6088}
6089
6090/**
6091 * @opcode 0x6f
6092 * @oppfx 0xf3
6093 * @opcpuid sse2
6094 * @opgroup og_sse2_simdint_datamove
6095 * @opxcpttype 4UA
6096 * @optest op1=1 op2=2 -> op1=2
6097 * @optest op1=0 op2=-42 -> op1=-42
6098 */
6099FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6100{
6101 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6103 if (IEM_IS_MODRM_REG_MODE(bRm))
6104 {
6105 /*
6106 * Register, register.
6107 */
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 IEM_MC_BEGIN(0, 0);
6110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6112 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6113 IEM_GET_MODRM_RM(pVCpu, bRm));
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 else
6118 {
6119 /*
6120 * Register, memory.
6121 */
6122 IEM_MC_BEGIN(0, 2);
6123 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6125
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6128 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6130 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6131 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6132
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 }
6136 return VINF_SUCCESS;
6137}
6138
6139
6140/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6141FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6142{
6143 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6145 if (IEM_IS_MODRM_REG_MODE(bRm))
6146 {
6147 /*
6148 * Register, register.
6149 */
6150 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152
6153 IEM_MC_BEGIN(3, 0);
6154 IEM_MC_ARG(uint64_t *, pDst, 0);
6155 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6156 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6158 IEM_MC_PREPARE_FPU_USAGE();
6159 IEM_MC_FPU_TO_MMX_MODE();
6160
6161 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6162 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6164 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6165
6166 IEM_MC_ADVANCE_RIP();
6167 IEM_MC_END();
6168 }
6169 else
6170 {
6171 /*
6172 * Register, memory.
6173 */
6174 IEM_MC_BEGIN(3, 2);
6175 IEM_MC_ARG(uint64_t *, pDst, 0);
6176 IEM_MC_LOCAL(uint64_t, uSrc);
6177 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6179
6180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6181 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6182 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6185 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6186
6187 IEM_MC_PREPARE_FPU_USAGE();
6188 IEM_MC_FPU_TO_MMX_MODE();
6189
6190 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6192 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6193
6194 IEM_MC_ADVANCE_RIP();
6195 IEM_MC_END();
6196 }
6197 return VINF_SUCCESS;
6198}
6199
6200
6201/**
6202 * Common worker for SSE2 instructions on the forms:
6203 * pshufd xmm1, xmm2/mem128, imm8
6204 * pshufhw xmm1, xmm2/mem128, imm8
6205 * pshuflw xmm1, xmm2/mem128, imm8
6206 *
6207 * Proper alignment of the 128-bit operand is enforced.
6208 * Exceptions type 4. SSE2 cpuid checks.
6209 */
6210FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6211{
6212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6213 if (IEM_IS_MODRM_REG_MODE(bRm))
6214 {
6215 /*
6216 * Register, register.
6217 */
6218 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220
6221 IEM_MC_BEGIN(3, 0);
6222 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6223 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6224 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6226 IEM_MC_PREPARE_SSE_USAGE();
6227 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6228 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6229 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 }
6233 else
6234 {
6235 /*
6236 * Register, memory.
6237 */
6238 IEM_MC_BEGIN(3, 2);
6239 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6240 IEM_MC_LOCAL(RTUINT128U, uSrc);
6241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6243
6244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6245 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6246 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6249
6250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6251 IEM_MC_PREPARE_SSE_USAGE();
6252 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6253 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6254
6255 IEM_MC_ADVANCE_RIP();
6256 IEM_MC_END();
6257 }
6258 return VINF_SUCCESS;
6259}
6260
6261
6262/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6263FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6264{
6265 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6266 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6267}
6268
6269
6270/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6271FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6272{
6273 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6274 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6275}
6276
6277
6278/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6279FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6280{
6281 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6282 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6283}
6284
6285
6286/**
6287 * Common worker for MMX instructions of the form:
6288 * psrlw mm, imm8
6289 * psraw mm, imm8
6290 * psllw mm, imm8
6291 * psrld mm, imm8
6292 * psrad mm, imm8
6293 * pslld mm, imm8
6294 * psrlq mm, imm8
6295 * psllq mm, imm8
6296 *
6297 */
6298FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6299{
6300 if (IEM_IS_MODRM_REG_MODE(bRm))
6301 {
6302 /*
6303 * Register, immediate.
6304 */
6305 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307
6308 IEM_MC_BEGIN(2, 0);
6309 IEM_MC_ARG(uint64_t *, pDst, 0);
6310 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6311 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6312 IEM_MC_PREPARE_FPU_USAGE();
6313 IEM_MC_FPU_TO_MMX_MODE();
6314
6315 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6316 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6317 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6318
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 }
6322 else
6323 {
6324 /*
6325 * Register, memory not supported.
6326 */
6327 /// @todo Caller already enforced register mode?!
6328 }
6329 return VINF_SUCCESS;
6330}
6331
6332
6333/**
6334 * Common worker for SSE2 instructions of the form:
6335 * psrlw xmm, imm8
6336 * psraw xmm, imm8
6337 * psllw xmm, imm8
6338 * psrld xmm, imm8
6339 * psrad xmm, imm8
6340 * pslld xmm, imm8
6341 * psrlq xmm, imm8
6342 * psllq xmm, imm8
6343 *
6344 */
6345FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6346{
6347 if (IEM_IS_MODRM_REG_MODE(bRm))
6348 {
6349 /*
6350 * Register, immediate.
6351 */
6352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354
6355 IEM_MC_BEGIN(2, 0);
6356 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6357 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6358 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6359 IEM_MC_PREPARE_SSE_USAGE();
6360 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6361 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 }
6365 else
6366 {
6367 /*
6368 * Register, memory.
6369 */
6370 /// @todo Caller already enforced register mode?!
6371 }
6372 return VINF_SUCCESS;
6373}
6374
6375
6376/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6377FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6378{
6379// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6380 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6381}
6382
6383
6384/** Opcode 0x66 0x0f 0x71 11/2. */
6385FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6386{
6387// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6388 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6389}
6390
6391
6392/** Opcode 0x0f 0x71 11/4. */
6393FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6394{
6395// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6396 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6397}
6398
6399
6400/** Opcode 0x66 0x0f 0x71 11/4. */
6401FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6402{
6403// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6404 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6405}
6406
6407
6408/** Opcode 0x0f 0x71 11/6. */
6409FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6410{
6411// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6412 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6413}
6414
6415
6416/** Opcode 0x66 0x0f 0x71 11/6. */
6417FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6418{
6419// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6420 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6421}
6422
6423
6424/**
6425 * Group 12 jump table for register variant.
6426 */
6427IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6428{
6429 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6430 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6431 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6432 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6433 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6434 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6435 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6436 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6437};
6438AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6439
6440
6441/** Opcode 0x0f 0x71. */
6442FNIEMOP_DEF(iemOp_Grp12)
6443{
6444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6445 if (IEM_IS_MODRM_REG_MODE(bRm))
6446 /* register, register */
6447 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6448 + pVCpu->iem.s.idxPrefix], bRm);
6449 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6450}
6451
6452
6453/** Opcode 0x0f 0x72 11/2. */
6454FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6455{
6456// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6457 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6458}
6459
6460
6461/** Opcode 0x66 0x0f 0x72 11/2. */
6462FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6463{
6464// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6465 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6466}
6467
6468
6469/** Opcode 0x0f 0x72 11/4. */
6470FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6471{
6472// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6473 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6474}
6475
6476
6477/** Opcode 0x66 0x0f 0x72 11/4. */
6478FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6479{
6480// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6481 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6482}
6483
6484
6485/** Opcode 0x0f 0x72 11/6. */
6486FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6487{
6488// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6489 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6490}
6491
6492/** Opcode 0x66 0x0f 0x72 11/6. */
6493FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6494{
6495// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6496 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6497}
6498
6499
6500/**
6501 * Group 13 jump table for register variant.
6502 */
6503IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6504{
6505 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6506 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6507 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6508 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6509 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6510 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6511 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6512 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6513};
6514AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6515
6516/** Opcode 0x0f 0x72. */
6517FNIEMOP_DEF(iemOp_Grp13)
6518{
6519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6520 if (IEM_IS_MODRM_REG_MODE(bRm))
6521 /* register, register */
6522 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6523 + pVCpu->iem.s.idxPrefix], bRm);
6524 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6525}
6526
6527
6528/** Opcode 0x0f 0x73 11/2. */
6529FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6530{
6531// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6532 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6533}
6534
6535
6536/** Opcode 0x66 0x0f 0x73 11/2. */
6537FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6538{
6539// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6540 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6541}
6542
6543
6544/** Opcode 0x66 0x0f 0x73 11/3. */
6545FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6546{
6547// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6548 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6549}
6550
6551
6552/** Opcode 0x0f 0x73 11/6. */
6553FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6554{
6555// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6556 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6557}
6558
6559
6560/** Opcode 0x66 0x0f 0x73 11/6. */
6561FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6562{
6563// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6564 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6565}
6566
6567
6568/** Opcode 0x66 0x0f 0x73 11/7. */
6569FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6570{
6571// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6572 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6573}
6574
6575/**
6576 * Group 14 jump table for register variant.
6577 */
6578IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6579{
6580 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6581 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6582 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6583 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6584 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6585 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6586 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6587 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6588};
6589AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6590
6591
6592/** Opcode 0x0f 0x73. */
6593FNIEMOP_DEF(iemOp_Grp14)
6594{
6595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6596 if (IEM_IS_MODRM_REG_MODE(bRm))
6597 /* register, register */
6598 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6599 + pVCpu->iem.s.idxPrefix], bRm);
6600 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6601}
6602
6603
6604/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6605FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6606{
6607 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6608 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6609}
6610
6611
6612/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6613FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6614{
6615 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6616 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6617}
6618
6619
6620/* Opcode 0xf3 0x0f 0x74 - invalid */
6621/* Opcode 0xf2 0x0f 0x74 - invalid */
6622
6623
6624/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6625FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6626{
6627 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6628 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6629}
6630
6631
6632/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6633FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6634{
6635 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6636 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6637}
6638
6639
6640/* Opcode 0xf3 0x0f 0x75 - invalid */
6641/* Opcode 0xf2 0x0f 0x75 - invalid */
6642
6643
6644/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6645FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6646{
6647 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6648 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6649}
6650
6651
6652/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6653FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6654{
6655 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6656 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6657}
6658
6659
6660/* Opcode 0xf3 0x0f 0x76 - invalid */
6661/* Opcode 0xf2 0x0f 0x76 - invalid */
6662
6663
6664/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6665FNIEMOP_DEF(iemOp_emms)
6666{
6667 IEMOP_MNEMONIC(emms, "emms");
6668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6669
6670 IEM_MC_BEGIN(0,0);
6671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6673 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6674 IEM_MC_FPU_FROM_MMX_MODE();
6675 IEM_MC_ADVANCE_RIP();
6676 IEM_MC_END();
6677 return VINF_SUCCESS;
6678}
6679
6680/* Opcode 0x66 0x0f 0x77 - invalid */
6681/* Opcode 0xf3 0x0f 0x77 - invalid */
6682/* Opcode 0xf2 0x0f 0x77 - invalid */
6683
6684/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6685#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6686FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6687{
6688 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6689 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6690 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6691 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6692
6693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6694 if (IEM_IS_MODRM_REG_MODE(bRm))
6695 {
6696 /*
6697 * Register, register.
6698 */
6699 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6700 if (enmEffOpSize == IEMMODE_64BIT)
6701 {
6702 IEM_MC_BEGIN(2, 0);
6703 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6704 IEM_MC_ARG(uint64_t, u64Enc, 1);
6705 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6706 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6707 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6708 IEM_MC_END();
6709 }
6710 else
6711 {
6712 IEM_MC_BEGIN(2, 0);
6713 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6714 IEM_MC_ARG(uint32_t, u32Enc, 1);
6715 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6716 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6717 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
6718 IEM_MC_END();
6719 }
6720 }
6721 else
6722 {
6723 /*
6724 * Memory, register.
6725 */
6726 if (enmEffOpSize == IEMMODE_64BIT)
6727 {
6728 IEM_MC_BEGIN(3, 0);
6729 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6730 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6731 IEM_MC_ARG(uint64_t, u64Enc, 2);
6732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6733 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6734 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6735 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6736 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6737 IEM_MC_END();
6738 }
6739 else
6740 {
6741 IEM_MC_BEGIN(3, 0);
6742 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6743 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6744 IEM_MC_ARG(uint32_t, u32Enc, 2);
6745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6746 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6747 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6748 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6749 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6750 IEM_MC_END();
6751 }
6752 }
6753 return VINF_SUCCESS;
6754}
6755#else
6756FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
6757#endif
6758
6759/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6760FNIEMOP_STUB(iemOp_AmdGrp17);
6761/* Opcode 0xf3 0x0f 0x78 - invalid */
6762/* Opcode 0xf2 0x0f 0x78 - invalid */
6763
6764/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6765#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6766FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6767{
6768 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6769 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6770 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6771 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6772
6773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6774 if (IEM_IS_MODRM_REG_MODE(bRm))
6775 {
6776 /*
6777 * Register, register.
6778 */
6779 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6780 if (enmEffOpSize == IEMMODE_64BIT)
6781 {
6782 IEM_MC_BEGIN(2, 0);
6783 IEM_MC_ARG(uint64_t, u64Val, 0);
6784 IEM_MC_ARG(uint64_t, u64Enc, 1);
6785 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6786 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6787 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
6788 IEM_MC_END();
6789 }
6790 else
6791 {
6792 IEM_MC_BEGIN(2, 0);
6793 IEM_MC_ARG(uint32_t, u32Val, 0);
6794 IEM_MC_ARG(uint32_t, u32Enc, 1);
6795 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6796 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6797 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
6798 IEM_MC_END();
6799 }
6800 }
6801 else
6802 {
6803 /*
6804 * Register, memory.
6805 */
6806 if (enmEffOpSize == IEMMODE_64BIT)
6807 {
6808 IEM_MC_BEGIN(3, 0);
6809 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6810 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6811 IEM_MC_ARG(uint64_t, u64Enc, 2);
6812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6813 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6814 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6815 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6816 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
6817 IEM_MC_END();
6818 }
6819 else
6820 {
6821 IEM_MC_BEGIN(3, 0);
6822 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6823 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6824 IEM_MC_ARG(uint32_t, u32Enc, 2);
6825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6826 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6827 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6829 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
6830 IEM_MC_END();
6831 }
6832 }
6833 return VINF_SUCCESS;
6834}
6835#else
6836FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
6837#endif
6838/* Opcode 0x66 0x0f 0x79 - invalid */
6839/* Opcode 0xf3 0x0f 0x79 - invalid */
6840/* Opcode 0xf2 0x0f 0x79 - invalid */
6841
6842/* Opcode 0x0f 0x7a - invalid */
6843/* Opcode 0x66 0x0f 0x7a - invalid */
6844/* Opcode 0xf3 0x0f 0x7a - invalid */
6845/* Opcode 0xf2 0x0f 0x7a - invalid */
6846
6847/* Opcode 0x0f 0x7b - invalid */
6848/* Opcode 0x66 0x0f 0x7b - invalid */
6849/* Opcode 0xf3 0x0f 0x7b - invalid */
6850/* Opcode 0xf2 0x0f 0x7b - invalid */
6851
6852/* Opcode 0x0f 0x7c - invalid */
6853
6854
6855/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
6856FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
6857{
6858 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6859 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
6860}
6861
6862
6863/* Opcode 0xf3 0x0f 0x7c - invalid */
6864
6865
6866/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
6867FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
6868{
6869 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6870 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
6871}
6872
6873
6874/* Opcode 0x0f 0x7d - invalid */
6875
6876
6877/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
6878FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
6879{
6880 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6881 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
6882}
6883
6884
6885/* Opcode 0xf3 0x0f 0x7d - invalid */
6886
6887
6888/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
6889FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
6890{
6891 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6892 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
6893}
6894
6895
6896/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
6897FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
6898{
6899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6900 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6901 {
6902 /**
6903 * @opcode 0x7e
6904 * @opcodesub rex.w=1
6905 * @oppfx none
6906 * @opcpuid mmx
6907 * @opgroup og_mmx_datamove
6908 * @opxcpttype 5
6909 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6910 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6911 */
6912 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6913 if (IEM_IS_MODRM_REG_MODE(bRm))
6914 {
6915 /* greg64, MMX */
6916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6917 IEM_MC_BEGIN(0, 1);
6918 IEM_MC_LOCAL(uint64_t, u64Tmp);
6919
6920 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6921 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6922 IEM_MC_FPU_TO_MMX_MODE();
6923
6924 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6925 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
6926
6927 IEM_MC_ADVANCE_RIP();
6928 IEM_MC_END();
6929 }
6930 else
6931 {
6932 /* [mem64], MMX */
6933 IEM_MC_BEGIN(0, 2);
6934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6935 IEM_MC_LOCAL(uint64_t, u64Tmp);
6936
6937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6939 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6940 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6941 IEM_MC_FPU_TO_MMX_MODE();
6942
6943 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6944 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6945
6946 IEM_MC_ADVANCE_RIP();
6947 IEM_MC_END();
6948 }
6949 }
6950 else
6951 {
6952 /**
6953 * @opdone
6954 * @opcode 0x7e
6955 * @opcodesub rex.w=0
6956 * @oppfx none
6957 * @opcpuid mmx
6958 * @opgroup og_mmx_datamove
6959 * @opxcpttype 5
6960 * @opfunction iemOp_movd_q_Pd_Ey
6961 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6962 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6963 */
6964 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6965 if (IEM_IS_MODRM_REG_MODE(bRm))
6966 {
6967 /* greg32, MMX */
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 IEM_MC_BEGIN(0, 1);
6970 IEM_MC_LOCAL(uint32_t, u32Tmp);
6971
6972 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6973 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6974 IEM_MC_FPU_TO_MMX_MODE();
6975
6976 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6977 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
6978
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 }
6982 else
6983 {
6984 /* [mem32], MMX */
6985 IEM_MC_BEGIN(0, 2);
6986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6987 IEM_MC_LOCAL(uint32_t, u32Tmp);
6988
6989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6991 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6992 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6993 IEM_MC_FPU_TO_MMX_MODE();
6994
6995 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6996 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
6997
6998 IEM_MC_ADVANCE_RIP();
6999 IEM_MC_END();
7000 }
7001 }
7002 return VINF_SUCCESS;
7003
7004}
7005
7006
7007FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7008{
7009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7010 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7011 {
7012 /**
7013 * @opcode 0x7e
7014 * @opcodesub rex.w=1
7015 * @oppfx 0x66
7016 * @opcpuid sse2
7017 * @opgroup og_sse2_simdint_datamove
7018 * @opxcpttype 5
7019 * @optest 64-bit / op1=1 op2=2 -> op1=2
7020 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7021 */
7022 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7023 if (IEM_IS_MODRM_REG_MODE(bRm))
7024 {
7025 /* greg64, XMM */
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027 IEM_MC_BEGIN(0, 1);
7028 IEM_MC_LOCAL(uint64_t, u64Tmp);
7029
7030 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7032
7033 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7034 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7035
7036 IEM_MC_ADVANCE_RIP();
7037 IEM_MC_END();
7038 }
7039 else
7040 {
7041 /* [mem64], XMM */
7042 IEM_MC_BEGIN(0, 2);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7044 IEM_MC_LOCAL(uint64_t, u64Tmp);
7045
7046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7048 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7049 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7050
7051 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7052 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7053
7054 IEM_MC_ADVANCE_RIP();
7055 IEM_MC_END();
7056 }
7057 }
7058 else
7059 {
7060 /**
7061 * @opdone
7062 * @opcode 0x7e
7063 * @opcodesub rex.w=0
7064 * @oppfx 0x66
7065 * @opcpuid sse2
7066 * @opgroup og_sse2_simdint_datamove
7067 * @opxcpttype 5
7068 * @opfunction iemOp_movd_q_Vy_Ey
7069 * @optest op1=1 op2=2 -> op1=2
7070 * @optest op1=0 op2=-42 -> op1=-42
7071 */
7072 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7073 if (IEM_IS_MODRM_REG_MODE(bRm))
7074 {
7075 /* greg32, XMM */
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077 IEM_MC_BEGIN(0, 1);
7078 IEM_MC_LOCAL(uint32_t, u32Tmp);
7079
7080 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7082
7083 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7084 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7085
7086 IEM_MC_ADVANCE_RIP();
7087 IEM_MC_END();
7088 }
7089 else
7090 {
7091 /* [mem32], XMM */
7092 IEM_MC_BEGIN(0, 2);
7093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7094 IEM_MC_LOCAL(uint32_t, u32Tmp);
7095
7096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7100
7101 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7102 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7103
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 }
7107 }
7108 return VINF_SUCCESS;
7109
7110}
7111
7112/**
7113 * @opcode 0x7e
7114 * @oppfx 0xf3
7115 * @opcpuid sse2
7116 * @opgroup og_sse2_pcksclr_datamove
7117 * @opxcpttype none
7118 * @optest op1=1 op2=2 -> op1=2
7119 * @optest op1=0 op2=-42 -> op1=-42
7120 */
7121FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7122{
7123 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7125 if (IEM_IS_MODRM_REG_MODE(bRm))
7126 {
7127 /*
7128 * Register, register.
7129 */
7130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7131 IEM_MC_BEGIN(0, 2);
7132 IEM_MC_LOCAL(uint64_t, uSrc);
7133
7134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7136
7137 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7138 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7139
7140 IEM_MC_ADVANCE_RIP();
7141 IEM_MC_END();
7142 }
7143 else
7144 {
7145 /*
7146 * Memory, register.
7147 */
7148 IEM_MC_BEGIN(0, 2);
7149 IEM_MC_LOCAL(uint64_t, uSrc);
7150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7151
7152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7156
7157 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7158 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7159
7160 IEM_MC_ADVANCE_RIP();
7161 IEM_MC_END();
7162 }
7163 return VINF_SUCCESS;
7164}
7165
7166/* Opcode 0xf2 0x0f 0x7e - invalid */
7167
7168
7169/** Opcode 0x0f 0x7f - movq Qq, Pq */
7170FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7171{
7172 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7174 if (IEM_IS_MODRM_REG_MODE(bRm))
7175 {
7176 /*
7177 * Register, register.
7178 */
7179 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7180 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7182 IEM_MC_BEGIN(0, 1);
7183 IEM_MC_LOCAL(uint64_t, u64Tmp);
7184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7185 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7186 IEM_MC_FPU_TO_MMX_MODE();
7187
7188 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7189 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7190
7191 IEM_MC_ADVANCE_RIP();
7192 IEM_MC_END();
7193 }
7194 else
7195 {
7196 /*
7197 * Memory, Register.
7198 */
7199 IEM_MC_BEGIN(0, 2);
7200 IEM_MC_LOCAL(uint64_t, u64Tmp);
7201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7202
7203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7206 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7207 IEM_MC_FPU_TO_MMX_MODE();
7208
7209 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7210 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7211
7212 IEM_MC_ADVANCE_RIP();
7213 IEM_MC_END();
7214 }
7215 return VINF_SUCCESS;
7216}
7217
7218/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7219FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7220{
7221 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7223 if (IEM_IS_MODRM_REG_MODE(bRm))
7224 {
7225 /*
7226 * Register, register.
7227 */
7228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7229 IEM_MC_BEGIN(0, 0);
7230 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7231 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7232 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7233 IEM_GET_MODRM_REG(pVCpu, bRm));
7234 IEM_MC_ADVANCE_RIP();
7235 IEM_MC_END();
7236 }
7237 else
7238 {
7239 /*
7240 * Register, memory.
7241 */
7242 IEM_MC_BEGIN(0, 2);
7243 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7245
7246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7250
7251 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7252 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7253
7254 IEM_MC_ADVANCE_RIP();
7255 IEM_MC_END();
7256 }
7257 return VINF_SUCCESS;
7258}
7259
7260/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7261FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7262{
7263 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7265 if (IEM_IS_MODRM_REG_MODE(bRm))
7266 {
7267 /*
7268 * Register, register.
7269 */
7270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7271 IEM_MC_BEGIN(0, 0);
7272 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7273 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7274 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7275 IEM_GET_MODRM_REG(pVCpu, bRm));
7276 IEM_MC_ADVANCE_RIP();
7277 IEM_MC_END();
7278 }
7279 else
7280 {
7281 /*
7282 * Register, memory.
7283 */
7284 IEM_MC_BEGIN(0, 2);
7285 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7287
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7292
7293 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7294 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7295
7296 IEM_MC_ADVANCE_RIP();
7297 IEM_MC_END();
7298 }
7299 return VINF_SUCCESS;
7300}
7301
7302/* Opcode 0xf2 0x0f 0x7f - invalid */
7303
7304
7305
7306/** Opcode 0x0f 0x80. */
7307FNIEMOP_DEF(iemOp_jo_Jv)
7308{
7309 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7310 IEMOP_HLP_MIN_386();
7311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7312 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7313 {
7314 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7316
7317 IEM_MC_BEGIN(0, 0);
7318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7319 IEM_MC_REL_JMP_S16(i16Imm);
7320 } IEM_MC_ELSE() {
7321 IEM_MC_ADVANCE_RIP();
7322 } IEM_MC_ENDIF();
7323 IEM_MC_END();
7324 }
7325 else
7326 {
7327 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7329
7330 IEM_MC_BEGIN(0, 0);
7331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7332 IEM_MC_REL_JMP_S32(i32Imm);
7333 } IEM_MC_ELSE() {
7334 IEM_MC_ADVANCE_RIP();
7335 } IEM_MC_ENDIF();
7336 IEM_MC_END();
7337 }
7338 return VINF_SUCCESS;
7339}
7340
7341
7342/** Opcode 0x0f 0x81. */
7343FNIEMOP_DEF(iemOp_jno_Jv)
7344{
7345 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7346 IEMOP_HLP_MIN_386();
7347 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7348 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7349 {
7350 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7352
7353 IEM_MC_BEGIN(0, 0);
7354 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7355 IEM_MC_ADVANCE_RIP();
7356 } IEM_MC_ELSE() {
7357 IEM_MC_REL_JMP_S16(i16Imm);
7358 } IEM_MC_ENDIF();
7359 IEM_MC_END();
7360 }
7361 else
7362 {
7363 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7365
7366 IEM_MC_BEGIN(0, 0);
7367 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7368 IEM_MC_ADVANCE_RIP();
7369 } IEM_MC_ELSE() {
7370 IEM_MC_REL_JMP_S32(i32Imm);
7371 } IEM_MC_ENDIF();
7372 IEM_MC_END();
7373 }
7374 return VINF_SUCCESS;
7375}
7376
7377
7378/** Opcode 0x0f 0x82. */
7379FNIEMOP_DEF(iemOp_jc_Jv)
7380{
7381 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7382 IEMOP_HLP_MIN_386();
7383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7385 {
7386 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7388
7389 IEM_MC_BEGIN(0, 0);
7390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7391 IEM_MC_REL_JMP_S16(i16Imm);
7392 } IEM_MC_ELSE() {
7393 IEM_MC_ADVANCE_RIP();
7394 } IEM_MC_ENDIF();
7395 IEM_MC_END();
7396 }
7397 else
7398 {
7399 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7401
7402 IEM_MC_BEGIN(0, 0);
7403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7404 IEM_MC_REL_JMP_S32(i32Imm);
7405 } IEM_MC_ELSE() {
7406 IEM_MC_ADVANCE_RIP();
7407 } IEM_MC_ENDIF();
7408 IEM_MC_END();
7409 }
7410 return VINF_SUCCESS;
7411}
7412
7413
7414/** Opcode 0x0f 0x83. */
7415FNIEMOP_DEF(iemOp_jnc_Jv)
7416{
7417 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7418 IEMOP_HLP_MIN_386();
7419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7420 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7421 {
7422 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424
7425 IEM_MC_BEGIN(0, 0);
7426 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7427 IEM_MC_ADVANCE_RIP();
7428 } IEM_MC_ELSE() {
7429 IEM_MC_REL_JMP_S16(i16Imm);
7430 } IEM_MC_ENDIF();
7431 IEM_MC_END();
7432 }
7433 else
7434 {
7435 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437
7438 IEM_MC_BEGIN(0, 0);
7439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7440 IEM_MC_ADVANCE_RIP();
7441 } IEM_MC_ELSE() {
7442 IEM_MC_REL_JMP_S32(i32Imm);
7443 } IEM_MC_ENDIF();
7444 IEM_MC_END();
7445 }
7446 return VINF_SUCCESS;
7447}
7448
7449
7450/** Opcode 0x0f 0x84. */
7451FNIEMOP_DEF(iemOp_je_Jv)
7452{
7453 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7454 IEMOP_HLP_MIN_386();
7455 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7456 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7457 {
7458 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7460
7461 IEM_MC_BEGIN(0, 0);
7462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7463 IEM_MC_REL_JMP_S16(i16Imm);
7464 } IEM_MC_ELSE() {
7465 IEM_MC_ADVANCE_RIP();
7466 } IEM_MC_ENDIF();
7467 IEM_MC_END();
7468 }
7469 else
7470 {
7471 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473
7474 IEM_MC_BEGIN(0, 0);
7475 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7476 IEM_MC_REL_JMP_S32(i32Imm);
7477 } IEM_MC_ELSE() {
7478 IEM_MC_ADVANCE_RIP();
7479 } IEM_MC_ENDIF();
7480 IEM_MC_END();
7481 }
7482 return VINF_SUCCESS;
7483}
7484
7485
7486/** Opcode 0x0f 0x85. */
7487FNIEMOP_DEF(iemOp_jne_Jv)
7488{
7489 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7490 IEMOP_HLP_MIN_386();
7491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7492 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7493 {
7494 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7496
7497 IEM_MC_BEGIN(0, 0);
7498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7499 IEM_MC_ADVANCE_RIP();
7500 } IEM_MC_ELSE() {
7501 IEM_MC_REL_JMP_S16(i16Imm);
7502 } IEM_MC_ENDIF();
7503 IEM_MC_END();
7504 }
7505 else
7506 {
7507 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7509
7510 IEM_MC_BEGIN(0, 0);
7511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7512 IEM_MC_ADVANCE_RIP();
7513 } IEM_MC_ELSE() {
7514 IEM_MC_REL_JMP_S32(i32Imm);
7515 } IEM_MC_ENDIF();
7516 IEM_MC_END();
7517 }
7518 return VINF_SUCCESS;
7519}
7520
7521
7522/** Opcode 0x0f 0x86. */
7523FNIEMOP_DEF(iemOp_jbe_Jv)
7524{
7525 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7526 IEMOP_HLP_MIN_386();
7527 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7529 {
7530 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532
7533 IEM_MC_BEGIN(0, 0);
7534 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7535 IEM_MC_REL_JMP_S16(i16Imm);
7536 } IEM_MC_ELSE() {
7537 IEM_MC_ADVANCE_RIP();
7538 } IEM_MC_ENDIF();
7539 IEM_MC_END();
7540 }
7541 else
7542 {
7543 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7545
7546 IEM_MC_BEGIN(0, 0);
7547 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7548 IEM_MC_REL_JMP_S32(i32Imm);
7549 } IEM_MC_ELSE() {
7550 IEM_MC_ADVANCE_RIP();
7551 } IEM_MC_ENDIF();
7552 IEM_MC_END();
7553 }
7554 return VINF_SUCCESS;
7555}
7556
7557
7558/** Opcode 0x0f 0x87. */
7559FNIEMOP_DEF(iemOp_jnbe_Jv)
7560{
7561 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7562 IEMOP_HLP_MIN_386();
7563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7564 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7565 {
7566 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7568
7569 IEM_MC_BEGIN(0, 0);
7570 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7571 IEM_MC_ADVANCE_RIP();
7572 } IEM_MC_ELSE() {
7573 IEM_MC_REL_JMP_S16(i16Imm);
7574 } IEM_MC_ENDIF();
7575 IEM_MC_END();
7576 }
7577 else
7578 {
7579 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581
7582 IEM_MC_BEGIN(0, 0);
7583 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7584 IEM_MC_ADVANCE_RIP();
7585 } IEM_MC_ELSE() {
7586 IEM_MC_REL_JMP_S32(i32Imm);
7587 } IEM_MC_ENDIF();
7588 IEM_MC_END();
7589 }
7590 return VINF_SUCCESS;
7591}
7592
7593
7594/** Opcode 0x0f 0x88. */
7595FNIEMOP_DEF(iemOp_js_Jv)
7596{
7597 IEMOP_MNEMONIC(js_Jv, "js Jv");
7598 IEMOP_HLP_MIN_386();
7599 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7600 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7601 {
7602 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7604
7605 IEM_MC_BEGIN(0, 0);
7606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7607 IEM_MC_REL_JMP_S16(i16Imm);
7608 } IEM_MC_ELSE() {
7609 IEM_MC_ADVANCE_RIP();
7610 } IEM_MC_ENDIF();
7611 IEM_MC_END();
7612 }
7613 else
7614 {
7615 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7617
7618 IEM_MC_BEGIN(0, 0);
7619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7620 IEM_MC_REL_JMP_S32(i32Imm);
7621 } IEM_MC_ELSE() {
7622 IEM_MC_ADVANCE_RIP();
7623 } IEM_MC_ENDIF();
7624 IEM_MC_END();
7625 }
7626 return VINF_SUCCESS;
7627}
7628
7629
7630/** Opcode 0x0f 0x89. */
7631FNIEMOP_DEF(iemOp_jns_Jv)
7632{
7633 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7634 IEMOP_HLP_MIN_386();
7635 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7636 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7637 {
7638 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640
7641 IEM_MC_BEGIN(0, 0);
7642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7643 IEM_MC_ADVANCE_RIP();
7644 } IEM_MC_ELSE() {
7645 IEM_MC_REL_JMP_S16(i16Imm);
7646 } IEM_MC_ENDIF();
7647 IEM_MC_END();
7648 }
7649 else
7650 {
7651 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7653
7654 IEM_MC_BEGIN(0, 0);
7655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7656 IEM_MC_ADVANCE_RIP();
7657 } IEM_MC_ELSE() {
7658 IEM_MC_REL_JMP_S32(i32Imm);
7659 } IEM_MC_ENDIF();
7660 IEM_MC_END();
7661 }
7662 return VINF_SUCCESS;
7663}
7664
7665
7666/** Opcode 0x0f 0x8a. */
7667FNIEMOP_DEF(iemOp_jp_Jv)
7668{
7669 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7670 IEMOP_HLP_MIN_386();
7671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7672 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7673 {
7674 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676
7677 IEM_MC_BEGIN(0, 0);
7678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7679 IEM_MC_REL_JMP_S16(i16Imm);
7680 } IEM_MC_ELSE() {
7681 IEM_MC_ADVANCE_RIP();
7682 } IEM_MC_ENDIF();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689
7690 IEM_MC_BEGIN(0, 0);
7691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7692 IEM_MC_REL_JMP_S32(i32Imm);
7693 } IEM_MC_ELSE() {
7694 IEM_MC_ADVANCE_RIP();
7695 } IEM_MC_ENDIF();
7696 IEM_MC_END();
7697 }
7698 return VINF_SUCCESS;
7699}
7700
7701
7702/** Opcode 0x0f 0x8b. */
7703FNIEMOP_DEF(iemOp_jnp_Jv)
7704{
7705 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7706 IEMOP_HLP_MIN_386();
7707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7708 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7709 {
7710 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712
7713 IEM_MC_BEGIN(0, 0);
7714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7715 IEM_MC_ADVANCE_RIP();
7716 } IEM_MC_ELSE() {
7717 IEM_MC_REL_JMP_S16(i16Imm);
7718 } IEM_MC_ENDIF();
7719 IEM_MC_END();
7720 }
7721 else
7722 {
7723 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7725
7726 IEM_MC_BEGIN(0, 0);
7727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7728 IEM_MC_ADVANCE_RIP();
7729 } IEM_MC_ELSE() {
7730 IEM_MC_REL_JMP_S32(i32Imm);
7731 } IEM_MC_ENDIF();
7732 IEM_MC_END();
7733 }
7734 return VINF_SUCCESS;
7735}
7736
7737
7738/** Opcode 0x0f 0x8c. */
7739FNIEMOP_DEF(iemOp_jl_Jv)
7740{
7741 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7742 IEMOP_HLP_MIN_386();
7743 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7744 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7745 {
7746 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748
7749 IEM_MC_BEGIN(0, 0);
7750 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7751 IEM_MC_REL_JMP_S16(i16Imm);
7752 } IEM_MC_ELSE() {
7753 IEM_MC_ADVANCE_RIP();
7754 } IEM_MC_ENDIF();
7755 IEM_MC_END();
7756 }
7757 else
7758 {
7759 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761
7762 IEM_MC_BEGIN(0, 0);
7763 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7764 IEM_MC_REL_JMP_S32(i32Imm);
7765 } IEM_MC_ELSE() {
7766 IEM_MC_ADVANCE_RIP();
7767 } IEM_MC_ENDIF();
7768 IEM_MC_END();
7769 }
7770 return VINF_SUCCESS;
7771}
7772
7773
7774/** Opcode 0x0f 0x8d. */
7775FNIEMOP_DEF(iemOp_jnl_Jv)
7776{
7777 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7778 IEMOP_HLP_MIN_386();
7779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7780 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7781 {
7782 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784
7785 IEM_MC_BEGIN(0, 0);
7786 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7787 IEM_MC_ADVANCE_RIP();
7788 } IEM_MC_ELSE() {
7789 IEM_MC_REL_JMP_S16(i16Imm);
7790 } IEM_MC_ENDIF();
7791 IEM_MC_END();
7792 }
7793 else
7794 {
7795 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7797
7798 IEM_MC_BEGIN(0, 0);
7799 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7800 IEM_MC_ADVANCE_RIP();
7801 } IEM_MC_ELSE() {
7802 IEM_MC_REL_JMP_S32(i32Imm);
7803 } IEM_MC_ENDIF();
7804 IEM_MC_END();
7805 }
7806 return VINF_SUCCESS;
7807}
7808
7809
7810/** Opcode 0x0f 0x8e. */
7811FNIEMOP_DEF(iemOp_jle_Jv)
7812{
7813 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
7814 IEMOP_HLP_MIN_386();
7815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7816 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7817 {
7818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7820
7821 IEM_MC_BEGIN(0, 0);
7822 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7823 IEM_MC_REL_JMP_S16(i16Imm);
7824 } IEM_MC_ELSE() {
7825 IEM_MC_ADVANCE_RIP();
7826 } IEM_MC_ENDIF();
7827 IEM_MC_END();
7828 }
7829 else
7830 {
7831 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7833
7834 IEM_MC_BEGIN(0, 0);
7835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7836 IEM_MC_REL_JMP_S32(i32Imm);
7837 } IEM_MC_ELSE() {
7838 IEM_MC_ADVANCE_RIP();
7839 } IEM_MC_ENDIF();
7840 IEM_MC_END();
7841 }
7842 return VINF_SUCCESS;
7843}
7844
7845
7846/** Opcode 0x0f 0x8f. */
7847FNIEMOP_DEF(iemOp_jnle_Jv)
7848{
7849 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
7850 IEMOP_HLP_MIN_386();
7851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7852 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7853 {
7854 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7856
7857 IEM_MC_BEGIN(0, 0);
7858 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7859 IEM_MC_ADVANCE_RIP();
7860 } IEM_MC_ELSE() {
7861 IEM_MC_REL_JMP_S16(i16Imm);
7862 } IEM_MC_ENDIF();
7863 IEM_MC_END();
7864 }
7865 else
7866 {
7867 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7869
7870 IEM_MC_BEGIN(0, 0);
7871 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7872 IEM_MC_ADVANCE_RIP();
7873 } IEM_MC_ELSE() {
7874 IEM_MC_REL_JMP_S32(i32Imm);
7875 } IEM_MC_ENDIF();
7876 IEM_MC_END();
7877 }
7878 return VINF_SUCCESS;
7879}
7880
7881
7882/** Opcode 0x0f 0x90. */
7883FNIEMOP_DEF(iemOp_seto_Eb)
7884{
7885 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
7886 IEMOP_HLP_MIN_386();
7887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7888
7889 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7890 * any way. AMD says it's "unused", whatever that means. We're
7891 * ignoring for now. */
7892 if (IEM_IS_MODRM_REG_MODE(bRm))
7893 {
7894 /* register target */
7895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7896 IEM_MC_BEGIN(0, 0);
7897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7898 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7899 } IEM_MC_ELSE() {
7900 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_ADVANCE_RIP();
7903 IEM_MC_END();
7904 }
7905 else
7906 {
7907 /* memory target */
7908 IEM_MC_BEGIN(0, 1);
7909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7914 } IEM_MC_ELSE() {
7915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7916 } IEM_MC_ENDIF();
7917 IEM_MC_ADVANCE_RIP();
7918 IEM_MC_END();
7919 }
7920 return VINF_SUCCESS;
7921}
7922
7923
7924/** Opcode 0x0f 0x91. */
7925FNIEMOP_DEF(iemOp_setno_Eb)
7926{
7927 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
7928 IEMOP_HLP_MIN_386();
7929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7930
7931 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7932 * any way. AMD says it's "unused", whatever that means. We're
7933 * ignoring for now. */
7934 if (IEM_IS_MODRM_REG_MODE(bRm))
7935 {
7936 /* register target */
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938 IEM_MC_BEGIN(0, 0);
7939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7940 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7941 } IEM_MC_ELSE() {
7942 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7943 } IEM_MC_ENDIF();
7944 IEM_MC_ADVANCE_RIP();
7945 IEM_MC_END();
7946 }
7947 else
7948 {
7949 /* memory target */
7950 IEM_MC_BEGIN(0, 1);
7951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7955 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7956 } IEM_MC_ELSE() {
7957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7958 } IEM_MC_ENDIF();
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 }
7962 return VINF_SUCCESS;
7963}
7964
7965
7966/** Opcode 0x0f 0x92. */
7967FNIEMOP_DEF(iemOp_setc_Eb)
7968{
7969 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
7970 IEMOP_HLP_MIN_386();
7971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7972
7973 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7974 * any way. AMD says it's "unused", whatever that means. We're
7975 * ignoring for now. */
7976 if (IEM_IS_MODRM_REG_MODE(bRm))
7977 {
7978 /* register target */
7979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7980 IEM_MC_BEGIN(0, 0);
7981 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7982 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7983 } IEM_MC_ELSE() {
7984 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7985 } IEM_MC_ENDIF();
7986 IEM_MC_ADVANCE_RIP();
7987 IEM_MC_END();
7988 }
7989 else
7990 {
7991 /* memory target */
7992 IEM_MC_BEGIN(0, 1);
7993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7997 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7998 } IEM_MC_ELSE() {
7999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8000 } IEM_MC_ENDIF();
8001 IEM_MC_ADVANCE_RIP();
8002 IEM_MC_END();
8003 }
8004 return VINF_SUCCESS;
8005}
8006
8007
8008/** Opcode 0x0f 0x93. */
8009FNIEMOP_DEF(iemOp_setnc_Eb)
8010{
8011 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8012 IEMOP_HLP_MIN_386();
8013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8014
8015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8016 * any way. AMD says it's "unused", whatever that means. We're
8017 * ignoring for now. */
8018 if (IEM_IS_MODRM_REG_MODE(bRm))
8019 {
8020 /* register target */
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022 IEM_MC_BEGIN(0, 0);
8023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8024 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8025 } IEM_MC_ELSE() {
8026 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8027 } IEM_MC_ENDIF();
8028 IEM_MC_ADVANCE_RIP();
8029 IEM_MC_END();
8030 }
8031 else
8032 {
8033 /* memory target */
8034 IEM_MC_BEGIN(0, 1);
8035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8039 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8040 } IEM_MC_ELSE() {
8041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8042 } IEM_MC_ENDIF();
8043 IEM_MC_ADVANCE_RIP();
8044 IEM_MC_END();
8045 }
8046 return VINF_SUCCESS;
8047}
8048
8049
8050/** Opcode 0x0f 0x94. */
8051FNIEMOP_DEF(iemOp_sete_Eb)
8052{
8053 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8054 IEMOP_HLP_MIN_386();
8055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8056
8057 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8058 * any way. AMD says it's "unused", whatever that means. We're
8059 * ignoring for now. */
8060 if (IEM_IS_MODRM_REG_MODE(bRm))
8061 {
8062 /* register target */
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_BEGIN(0, 0);
8065 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8066 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8067 } IEM_MC_ELSE() {
8068 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8069 } IEM_MC_ENDIF();
8070 IEM_MC_ADVANCE_RIP();
8071 IEM_MC_END();
8072 }
8073 else
8074 {
8075 /* memory target */
8076 IEM_MC_BEGIN(0, 1);
8077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8081 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8082 } IEM_MC_ELSE() {
8083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8084 } IEM_MC_ENDIF();
8085 IEM_MC_ADVANCE_RIP();
8086 IEM_MC_END();
8087 }
8088 return VINF_SUCCESS;
8089}
8090
8091
8092/** Opcode 0x0f 0x95. */
8093FNIEMOP_DEF(iemOp_setne_Eb)
8094{
8095 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8096 IEMOP_HLP_MIN_386();
8097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8098
8099 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8100 * any way. AMD says it's "unused", whatever that means. We're
8101 * ignoring for now. */
8102 if (IEM_IS_MODRM_REG_MODE(bRm))
8103 {
8104 /* register target */
8105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8106 IEM_MC_BEGIN(0, 0);
8107 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8108 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8109 } IEM_MC_ELSE() {
8110 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8111 } IEM_MC_ENDIF();
8112 IEM_MC_ADVANCE_RIP();
8113 IEM_MC_END();
8114 }
8115 else
8116 {
8117 /* memory target */
8118 IEM_MC_BEGIN(0, 1);
8119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8124 } IEM_MC_ELSE() {
8125 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8126 } IEM_MC_ENDIF();
8127 IEM_MC_ADVANCE_RIP();
8128 IEM_MC_END();
8129 }
8130 return VINF_SUCCESS;
8131}
8132
8133
8134/** Opcode 0x0f 0x96. */
8135FNIEMOP_DEF(iemOp_setbe_Eb)
8136{
8137 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8138 IEMOP_HLP_MIN_386();
8139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8140
8141 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8142 * any way. AMD says it's "unused", whatever that means. We're
8143 * ignoring for now. */
8144 if (IEM_IS_MODRM_REG_MODE(bRm))
8145 {
8146 /* register target */
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_BEGIN(0, 0);
8149 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8150 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8151 } IEM_MC_ELSE() {
8152 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8153 } IEM_MC_ENDIF();
8154 IEM_MC_ADVANCE_RIP();
8155 IEM_MC_END();
8156 }
8157 else
8158 {
8159 /* memory target */
8160 IEM_MC_BEGIN(0, 1);
8161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8164 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8166 } IEM_MC_ELSE() {
8167 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8168 } IEM_MC_ENDIF();
8169 IEM_MC_ADVANCE_RIP();
8170 IEM_MC_END();
8171 }
8172 return VINF_SUCCESS;
8173}
8174
8175
8176/** Opcode 0x0f 0x97. */
8177FNIEMOP_DEF(iemOp_setnbe_Eb)
8178{
8179 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8180 IEMOP_HLP_MIN_386();
8181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8182
8183 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8184 * any way. AMD says it's "unused", whatever that means. We're
8185 * ignoring for now. */
8186 if (IEM_IS_MODRM_REG_MODE(bRm))
8187 {
8188 /* register target */
8189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8190 IEM_MC_BEGIN(0, 0);
8191 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8192 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8193 } IEM_MC_ELSE() {
8194 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8195 } IEM_MC_ENDIF();
8196 IEM_MC_ADVANCE_RIP();
8197 IEM_MC_END();
8198 }
8199 else
8200 {
8201 /* memory target */
8202 IEM_MC_BEGIN(0, 1);
8203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8208 } IEM_MC_ELSE() {
8209 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8210 } IEM_MC_ENDIF();
8211 IEM_MC_ADVANCE_RIP();
8212 IEM_MC_END();
8213 }
8214 return VINF_SUCCESS;
8215}
8216
8217
8218/** Opcode 0x0f 0x98. */
8219FNIEMOP_DEF(iemOp_sets_Eb)
8220{
8221 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8222 IEMOP_HLP_MIN_386();
8223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8224
8225 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8226 * any way. AMD says it's "unused", whatever that means. We're
8227 * ignoring for now. */
8228 if (IEM_IS_MODRM_REG_MODE(bRm))
8229 {
8230 /* register target */
8231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8232 IEM_MC_BEGIN(0, 0);
8233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8234 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8235 } IEM_MC_ELSE() {
8236 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8237 } IEM_MC_ENDIF();
8238 IEM_MC_ADVANCE_RIP();
8239 IEM_MC_END();
8240 }
8241 else
8242 {
8243 /* memory target */
8244 IEM_MC_BEGIN(0, 1);
8245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8250 } IEM_MC_ELSE() {
8251 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8252 } IEM_MC_ENDIF();
8253 IEM_MC_ADVANCE_RIP();
8254 IEM_MC_END();
8255 }
8256 return VINF_SUCCESS;
8257}
8258
8259
8260/** Opcode 0x0f 0x99. */
8261FNIEMOP_DEF(iemOp_setns_Eb)
8262{
8263 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8264 IEMOP_HLP_MIN_386();
8265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8266
8267 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8268 * any way. AMD says it's "unused", whatever that means. We're
8269 * ignoring for now. */
8270 if (IEM_IS_MODRM_REG_MODE(bRm))
8271 {
8272 /* register target */
8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8274 IEM_MC_BEGIN(0, 0);
8275 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8276 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8277 } IEM_MC_ELSE() {
8278 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8279 } IEM_MC_ENDIF();
8280 IEM_MC_ADVANCE_RIP();
8281 IEM_MC_END();
8282 }
8283 else
8284 {
8285 /* memory target */
8286 IEM_MC_BEGIN(0, 1);
8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8292 } IEM_MC_ELSE() {
8293 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8294 } IEM_MC_ENDIF();
8295 IEM_MC_ADVANCE_RIP();
8296 IEM_MC_END();
8297 }
8298 return VINF_SUCCESS;
8299}
8300
8301
8302/** Opcode 0x0f 0x9a. */
8303FNIEMOP_DEF(iemOp_setp_Eb)
8304{
8305 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8306 IEMOP_HLP_MIN_386();
8307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8308
8309 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8310 * any way. AMD says it's "unused", whatever that means. We're
8311 * ignoring for now. */
8312 if (IEM_IS_MODRM_REG_MODE(bRm))
8313 {
8314 /* register target */
8315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8316 IEM_MC_BEGIN(0, 0);
8317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8318 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8319 } IEM_MC_ELSE() {
8320 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8321 } IEM_MC_ENDIF();
8322 IEM_MC_ADVANCE_RIP();
8323 IEM_MC_END();
8324 }
8325 else
8326 {
8327 /* memory target */
8328 IEM_MC_BEGIN(0, 1);
8329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8334 } IEM_MC_ELSE() {
8335 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8336 } IEM_MC_ENDIF();
8337 IEM_MC_ADVANCE_RIP();
8338 IEM_MC_END();
8339 }
8340 return VINF_SUCCESS;
8341}
8342
8343
8344/** Opcode 0x0f 0x9b. */
8345FNIEMOP_DEF(iemOp_setnp_Eb)
8346{
8347 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8348 IEMOP_HLP_MIN_386();
8349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8350
8351 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8352 * any way. AMD says it's "unused", whatever that means. We're
8353 * ignoring for now. */
8354 if (IEM_IS_MODRM_REG_MODE(bRm))
8355 {
8356 /* register target */
8357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8358 IEM_MC_BEGIN(0, 0);
8359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8360 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8361 } IEM_MC_ELSE() {
8362 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8363 } IEM_MC_ENDIF();
8364 IEM_MC_ADVANCE_RIP();
8365 IEM_MC_END();
8366 }
8367 else
8368 {
8369 /* memory target */
8370 IEM_MC_BEGIN(0, 1);
8371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8376 } IEM_MC_ELSE() {
8377 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8378 } IEM_MC_ENDIF();
8379 IEM_MC_ADVANCE_RIP();
8380 IEM_MC_END();
8381 }
8382 return VINF_SUCCESS;
8383}
8384
8385
8386/** Opcode 0x0f 0x9c. */
8387FNIEMOP_DEF(iemOp_setl_Eb)
8388{
8389 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8390 IEMOP_HLP_MIN_386();
8391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8392
8393 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8394 * any way. AMD says it's "unused", whatever that means. We're
8395 * ignoring for now. */
8396 if (IEM_IS_MODRM_REG_MODE(bRm))
8397 {
8398 /* register target */
8399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8400 IEM_MC_BEGIN(0, 0);
8401 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8402 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8403 } IEM_MC_ELSE() {
8404 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8405 } IEM_MC_ENDIF();
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 else
8410 {
8411 /* memory target */
8412 IEM_MC_BEGIN(0, 1);
8413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8418 } IEM_MC_ELSE() {
8419 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8420 } IEM_MC_ENDIF();
8421 IEM_MC_ADVANCE_RIP();
8422 IEM_MC_END();
8423 }
8424 return VINF_SUCCESS;
8425}
8426
8427
8428/** Opcode 0x0f 0x9d. */
8429FNIEMOP_DEF(iemOp_setnl_Eb)
8430{
8431 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8432 IEMOP_HLP_MIN_386();
8433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8434
8435 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8436 * any way. AMD says it's "unused", whatever that means. We're
8437 * ignoring for now. */
8438 if (IEM_IS_MODRM_REG_MODE(bRm))
8439 {
8440 /* register target */
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 IEM_MC_BEGIN(0, 0);
8443 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8444 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8445 } IEM_MC_ELSE() {
8446 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8447 } IEM_MC_ENDIF();
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 }
8451 else
8452 {
8453 /* memory target */
8454 IEM_MC_BEGIN(0, 1);
8455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8458 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8460 } IEM_MC_ELSE() {
8461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8462 } IEM_MC_ENDIF();
8463 IEM_MC_ADVANCE_RIP();
8464 IEM_MC_END();
8465 }
8466 return VINF_SUCCESS;
8467}
8468
8469
8470/** Opcode 0x0f 0x9e. */
8471FNIEMOP_DEF(iemOp_setle_Eb)
8472{
8473 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8474 IEMOP_HLP_MIN_386();
8475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8476
8477 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8478 * any way. AMD says it's "unused", whatever that means. We're
8479 * ignoring for now. */
8480 if (IEM_IS_MODRM_REG_MODE(bRm))
8481 {
8482 /* register target */
8483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8484 IEM_MC_BEGIN(0, 0);
8485 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8486 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8487 } IEM_MC_ELSE() {
8488 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8489 } IEM_MC_ENDIF();
8490 IEM_MC_ADVANCE_RIP();
8491 IEM_MC_END();
8492 }
8493 else
8494 {
8495 /* memory target */
8496 IEM_MC_BEGIN(0, 1);
8497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP();
8506 IEM_MC_END();
8507 }
8508 return VINF_SUCCESS;
8509}
8510
8511
8512/** Opcode 0x0f 0x9f. */
8513FNIEMOP_DEF(iemOp_setnle_Eb)
8514{
8515 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8516 IEMOP_HLP_MIN_386();
8517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8518
8519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8520 * any way. AMD says it's "unused", whatever that means. We're
8521 * ignoring for now. */
8522 if (IEM_IS_MODRM_REG_MODE(bRm))
8523 {
8524 /* register target */
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_BEGIN(0, 0);
8527 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 else
8536 {
8537 /* memory target */
8538 IEM_MC_BEGIN(0, 1);
8539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8544 } IEM_MC_ELSE() {
8545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8546 } IEM_MC_ENDIF();
8547 IEM_MC_ADVANCE_RIP();
8548 IEM_MC_END();
8549 }
8550 return VINF_SUCCESS;
8551}
8552
8553
8554/**
8555 * Common 'push segment-register' helper.
8556 */
8557FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8558{
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8561 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8562
8563 switch (pVCpu->iem.s.enmEffOpSize)
8564 {
8565 case IEMMODE_16BIT:
8566 IEM_MC_BEGIN(0, 1);
8567 IEM_MC_LOCAL(uint16_t, u16Value);
8568 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8569 IEM_MC_PUSH_U16(u16Value);
8570 IEM_MC_ADVANCE_RIP();
8571 IEM_MC_END();
8572 break;
8573
8574 case IEMMODE_32BIT:
8575 IEM_MC_BEGIN(0, 1);
8576 IEM_MC_LOCAL(uint32_t, u32Value);
8577 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8578 IEM_MC_PUSH_U32_SREG(u32Value);
8579 IEM_MC_ADVANCE_RIP();
8580 IEM_MC_END();
8581 break;
8582
8583 case IEMMODE_64BIT:
8584 IEM_MC_BEGIN(0, 1);
8585 IEM_MC_LOCAL(uint64_t, u64Value);
8586 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8587 IEM_MC_PUSH_U64(u64Value);
8588 IEM_MC_ADVANCE_RIP();
8589 IEM_MC_END();
8590 break;
8591 }
8592
8593 return VINF_SUCCESS;
8594}
8595
8596
8597/** Opcode 0x0f 0xa0. */
8598FNIEMOP_DEF(iemOp_push_fs)
8599{
8600 IEMOP_MNEMONIC(push_fs, "push fs");
8601 IEMOP_HLP_MIN_386();
8602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8603 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8604}
8605
8606
8607/** Opcode 0x0f 0xa1. */
8608FNIEMOP_DEF(iemOp_pop_fs)
8609{
8610 IEMOP_MNEMONIC(pop_fs, "pop fs");
8611 IEMOP_HLP_MIN_386();
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8614}
8615
8616
8617/** Opcode 0x0f 0xa2. */
8618FNIEMOP_DEF(iemOp_cpuid)
8619{
8620 IEMOP_MNEMONIC(cpuid, "cpuid");
8621 IEMOP_HLP_MIN_486(); /* not all 486es. */
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8624}
8625
8626
8627/**
8628 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8629 * iemOp_bts_Ev_Gv.
8630 */
8631FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
8632{
8633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8635
8636 if (IEM_IS_MODRM_REG_MODE(bRm))
8637 {
8638 /* register destination. */
8639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8640 switch (pVCpu->iem.s.enmEffOpSize)
8641 {
8642 case IEMMODE_16BIT:
8643 IEM_MC_BEGIN(3, 0);
8644 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8645 IEM_MC_ARG(uint16_t, u16Src, 1);
8646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8647
8648 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8649 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
8650 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8651 IEM_MC_REF_EFLAGS(pEFlags);
8652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8653
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 return VINF_SUCCESS;
8657
8658 case IEMMODE_32BIT:
8659 IEM_MC_BEGIN(3, 0);
8660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8661 IEM_MC_ARG(uint32_t, u32Src, 1);
8662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8663
8664 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8665 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
8666 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8667 IEM_MC_REF_EFLAGS(pEFlags);
8668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8669
8670 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 return VINF_SUCCESS;
8674
8675 case IEMMODE_64BIT:
8676 IEM_MC_BEGIN(3, 0);
8677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8678 IEM_MC_ARG(uint64_t, u64Src, 1);
8679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8680
8681 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8682 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
8683 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8684 IEM_MC_REF_EFLAGS(pEFlags);
8685 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8686
8687 IEM_MC_ADVANCE_RIP();
8688 IEM_MC_END();
8689 return VINF_SUCCESS;
8690
8691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8692 }
8693 }
8694 else
8695 {
8696 /* memory destination. */
8697
8698 uint32_t fAccess;
8699 if (pImpl->pfnLockedU16)
8700 fAccess = IEM_ACCESS_DATA_RW;
8701 else /* BT */
8702 fAccess = IEM_ACCESS_DATA_R;
8703
8704 /** @todo test negative bit offsets! */
8705 switch (pVCpu->iem.s.enmEffOpSize)
8706 {
8707 case IEMMODE_16BIT:
8708 IEM_MC_BEGIN(3, 2);
8709 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8710 IEM_MC_ARG(uint16_t, u16Src, 1);
8711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8713 IEM_MC_LOCAL(int16_t, i16AddrAdj);
8714
8715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8716 if (pImpl->pfnLockedU16)
8717 IEMOP_HLP_DONE_DECODING();
8718 else
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8721 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
8722 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
8723 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
8724 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
8725 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
8726 IEM_MC_FETCH_EFLAGS(EFlags);
8727
8728 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8729 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8731 else
8732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8733 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8734
8735 IEM_MC_COMMIT_EFLAGS(EFlags);
8736 IEM_MC_ADVANCE_RIP();
8737 IEM_MC_END();
8738 return VINF_SUCCESS;
8739
8740 case IEMMODE_32BIT:
8741 IEM_MC_BEGIN(3, 2);
8742 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8743 IEM_MC_ARG(uint32_t, u32Src, 1);
8744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8746 IEM_MC_LOCAL(int32_t, i32AddrAdj);
8747
8748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8749 if (pImpl->pfnLockedU16)
8750 IEMOP_HLP_DONE_DECODING();
8751 else
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8754 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
8755 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
8756 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
8757 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
8758 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
8759 IEM_MC_FETCH_EFLAGS(EFlags);
8760
8761 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8762 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8764 else
8765 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8767
8768 IEM_MC_COMMIT_EFLAGS(EFlags);
8769 IEM_MC_ADVANCE_RIP();
8770 IEM_MC_END();
8771 return VINF_SUCCESS;
8772
8773 case IEMMODE_64BIT:
8774 IEM_MC_BEGIN(3, 2);
8775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8776 IEM_MC_ARG(uint64_t, u64Src, 1);
8777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8779 IEM_MC_LOCAL(int64_t, i64AddrAdj);
8780
8781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8782 if (pImpl->pfnLockedU16)
8783 IEMOP_HLP_DONE_DECODING();
8784 else
8785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8786 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8787 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
8788 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
8789 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
8790 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
8791 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
8792 IEM_MC_FETCH_EFLAGS(EFlags);
8793
8794 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8795 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8797 else
8798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8799 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8800
8801 IEM_MC_COMMIT_EFLAGS(EFlags);
8802 IEM_MC_ADVANCE_RIP();
8803 IEM_MC_END();
8804 return VINF_SUCCESS;
8805
8806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8807 }
8808 }
8809}
8810
8811
8812/** Opcode 0x0f 0xa3. */
8813FNIEMOP_DEF(iemOp_bt_Ev_Gv)
8814{
8815 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
8816 IEMOP_HLP_MIN_386();
8817 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
8818}
8819
8820
8821/**
8822 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
8823 */
8824FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
8825{
8826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8827 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8828
8829 if (IEM_IS_MODRM_REG_MODE(bRm))
8830 {
8831 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833
8834 switch (pVCpu->iem.s.enmEffOpSize)
8835 {
8836 case IEMMODE_16BIT:
8837 IEM_MC_BEGIN(4, 0);
8838 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8839 IEM_MC_ARG(uint16_t, u16Src, 1);
8840 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8841 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8842
8843 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8844 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8845 IEM_MC_REF_EFLAGS(pEFlags);
8846 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8847
8848 IEM_MC_ADVANCE_RIP();
8849 IEM_MC_END();
8850 return VINF_SUCCESS;
8851
8852 case IEMMODE_32BIT:
8853 IEM_MC_BEGIN(4, 0);
8854 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8855 IEM_MC_ARG(uint32_t, u32Src, 1);
8856 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8857 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8858
8859 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8860 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8861 IEM_MC_REF_EFLAGS(pEFlags);
8862 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8863
8864 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8865 IEM_MC_ADVANCE_RIP();
8866 IEM_MC_END();
8867 return VINF_SUCCESS;
8868
8869 case IEMMODE_64BIT:
8870 IEM_MC_BEGIN(4, 0);
8871 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8872 IEM_MC_ARG(uint64_t, u64Src, 1);
8873 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8874 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8875
8876 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8877 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8878 IEM_MC_REF_EFLAGS(pEFlags);
8879 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8880
8881 IEM_MC_ADVANCE_RIP();
8882 IEM_MC_END();
8883 return VINF_SUCCESS;
8884
8885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8886 }
8887 }
8888 else
8889 {
8890 switch (pVCpu->iem.s.enmEffOpSize)
8891 {
8892 case IEMMODE_16BIT:
8893 IEM_MC_BEGIN(4, 2);
8894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8895 IEM_MC_ARG(uint16_t, u16Src, 1);
8896 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8899
8900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8901 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8902 IEM_MC_ASSIGN(cShiftArg, cShift);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8905 IEM_MC_FETCH_EFLAGS(EFlags);
8906 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8907 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8908
8909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8910 IEM_MC_COMMIT_EFLAGS(EFlags);
8911 IEM_MC_ADVANCE_RIP();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914
8915 case IEMMODE_32BIT:
8916 IEM_MC_BEGIN(4, 2);
8917 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8918 IEM_MC_ARG(uint32_t, u32Src, 1);
8919 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8922
8923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8924 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8925 IEM_MC_ASSIGN(cShiftArg, cShift);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8928 IEM_MC_FETCH_EFLAGS(EFlags);
8929 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8930 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8931
8932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8933 IEM_MC_COMMIT_EFLAGS(EFlags);
8934 IEM_MC_ADVANCE_RIP();
8935 IEM_MC_END();
8936 return VINF_SUCCESS;
8937
8938 case IEMMODE_64BIT:
8939 IEM_MC_BEGIN(4, 2);
8940 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8941 IEM_MC_ARG(uint64_t, u64Src, 1);
8942 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8945
8946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8947 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8948 IEM_MC_ASSIGN(cShiftArg, cShift);
8949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8950 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8951 IEM_MC_FETCH_EFLAGS(EFlags);
8952 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8953 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8954
8955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8956 IEM_MC_COMMIT_EFLAGS(EFlags);
8957 IEM_MC_ADVANCE_RIP();
8958 IEM_MC_END();
8959 return VINF_SUCCESS;
8960
8961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8962 }
8963 }
8964}
8965
8966
8967/**
8968 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
8969 */
8970FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
8971{
8972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8974
8975 if (IEM_IS_MODRM_REG_MODE(bRm))
8976 {
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8978
8979 switch (pVCpu->iem.s.enmEffOpSize)
8980 {
8981 case IEMMODE_16BIT:
8982 IEM_MC_BEGIN(4, 0);
8983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8984 IEM_MC_ARG(uint16_t, u16Src, 1);
8985 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8986 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8987
8988 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8989 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8990 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8991 IEM_MC_REF_EFLAGS(pEFlags);
8992 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8993
8994 IEM_MC_ADVANCE_RIP();
8995 IEM_MC_END();
8996 return VINF_SUCCESS;
8997
8998 case IEMMODE_32BIT:
8999 IEM_MC_BEGIN(4, 0);
9000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9001 IEM_MC_ARG(uint32_t, u32Src, 1);
9002 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9003 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9004
9005 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9006 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9007 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9008 IEM_MC_REF_EFLAGS(pEFlags);
9009 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9010
9011 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9012 IEM_MC_ADVANCE_RIP();
9013 IEM_MC_END();
9014 return VINF_SUCCESS;
9015
9016 case IEMMODE_64BIT:
9017 IEM_MC_BEGIN(4, 0);
9018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9019 IEM_MC_ARG(uint64_t, u64Src, 1);
9020 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9021 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9022
9023 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9024 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9025 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9026 IEM_MC_REF_EFLAGS(pEFlags);
9027 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9028
9029 IEM_MC_ADVANCE_RIP();
9030 IEM_MC_END();
9031 return VINF_SUCCESS;
9032
9033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9034 }
9035 }
9036 else
9037 {
9038 switch (pVCpu->iem.s.enmEffOpSize)
9039 {
9040 case IEMMODE_16BIT:
9041 IEM_MC_BEGIN(4, 2);
9042 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9043 IEM_MC_ARG(uint16_t, u16Src, 1);
9044 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9047
9048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9050 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9051 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9052 IEM_MC_FETCH_EFLAGS(EFlags);
9053 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9054 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9055
9056 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9057 IEM_MC_COMMIT_EFLAGS(EFlags);
9058 IEM_MC_ADVANCE_RIP();
9059 IEM_MC_END();
9060 return VINF_SUCCESS;
9061
9062 case IEMMODE_32BIT:
9063 IEM_MC_BEGIN(4, 2);
9064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9065 IEM_MC_ARG(uint32_t, u32Src, 1);
9066 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9067 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9069
9070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9072 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9073 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9074 IEM_MC_FETCH_EFLAGS(EFlags);
9075 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9076 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9077
9078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9079 IEM_MC_COMMIT_EFLAGS(EFlags);
9080 IEM_MC_ADVANCE_RIP();
9081 IEM_MC_END();
9082 return VINF_SUCCESS;
9083
9084 case IEMMODE_64BIT:
9085 IEM_MC_BEGIN(4, 2);
9086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9087 IEM_MC_ARG(uint64_t, u64Src, 1);
9088 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9091
9092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9095 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9096 IEM_MC_FETCH_EFLAGS(EFlags);
9097 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9098 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9099
9100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9101 IEM_MC_COMMIT_EFLAGS(EFlags);
9102 IEM_MC_ADVANCE_RIP();
9103 IEM_MC_END();
9104 return VINF_SUCCESS;
9105
9106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9107 }
9108 }
9109}
9110
9111
9112
9113/** Opcode 0x0f 0xa4. */
9114FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9115{
9116 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9117 IEMOP_HLP_MIN_386();
9118 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9119}
9120
9121
9122/** Opcode 0x0f 0xa5. */
9123FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9124{
9125 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9126 IEMOP_HLP_MIN_386();
9127 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9128}
9129
9130
9131/** Opcode 0x0f 0xa8. */
9132FNIEMOP_DEF(iemOp_push_gs)
9133{
9134 IEMOP_MNEMONIC(push_gs, "push gs");
9135 IEMOP_HLP_MIN_386();
9136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9137 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9138}
9139
9140
9141/** Opcode 0x0f 0xa9. */
9142FNIEMOP_DEF(iemOp_pop_gs)
9143{
9144 IEMOP_MNEMONIC(pop_gs, "pop gs");
9145 IEMOP_HLP_MIN_386();
9146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9147 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9148}
9149
9150
9151/** Opcode 0x0f 0xaa. */
9152FNIEMOP_DEF(iemOp_rsm)
9153{
9154 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9155 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9157 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9158}
9159
9160
9161
9162/** Opcode 0x0f 0xab. */
9163FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9164{
9165 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9166 IEMOP_HLP_MIN_386();
9167 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9168}
9169
9170
9171/** Opcode 0x0f 0xac. */
9172FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9173{
9174 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9175 IEMOP_HLP_MIN_386();
9176 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9177}
9178
9179
9180/** Opcode 0x0f 0xad. */
9181FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9182{
9183 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9184 IEMOP_HLP_MIN_386();
9185 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9186}
9187
9188
9189/** Opcode 0x0f 0xae mem/0. */
9190FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9191{
9192 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9193 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9194 return IEMOP_RAISE_INVALID_OPCODE();
9195
9196 IEM_MC_BEGIN(3, 1);
9197 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9198 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9199 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9202 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9203 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9204 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9205 IEM_MC_END();
9206 return VINF_SUCCESS;
9207}
9208
9209
9210/** Opcode 0x0f 0xae mem/1. */
9211FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9212{
9213 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9214 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9215 return IEMOP_RAISE_INVALID_OPCODE();
9216
9217 IEM_MC_BEGIN(3, 1);
9218 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9219 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9220 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9223 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9224 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9225 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9226 IEM_MC_END();
9227 return VINF_SUCCESS;
9228}
9229
9230
9231/**
9232 * @opmaps grp15
9233 * @opcode !11/2
9234 * @oppfx none
9235 * @opcpuid sse
9236 * @opgroup og_sse_mxcsrsm
9237 * @opxcpttype 5
9238 * @optest op1=0 -> mxcsr=0
9239 * @optest op1=0x2083 -> mxcsr=0x2083
9240 * @optest op1=0xfffffffe -> value.xcpt=0xd
9241 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9242 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9243 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9244 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9245 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9246 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9247 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9248 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9249 */
9250FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9251{
9252 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9253 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9254 return IEMOP_RAISE_INVALID_OPCODE();
9255
9256 IEM_MC_BEGIN(2, 0);
9257 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9258 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9262 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9263 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9264 IEM_MC_END();
9265 return VINF_SUCCESS;
9266}
9267
9268
9269/**
9270 * @opmaps grp15
9271 * @opcode !11/3
9272 * @oppfx none
9273 * @opcpuid sse
9274 * @opgroup og_sse_mxcsrsm
9275 * @opxcpttype 5
9276 * @optest mxcsr=0 -> op1=0
9277 * @optest mxcsr=0x2083 -> op1=0x2083
9278 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9279 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9280 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9281 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9282 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9283 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9284 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9285 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9286 */
9287FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9290 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9291 return IEMOP_RAISE_INVALID_OPCODE();
9292
9293 IEM_MC_BEGIN(2, 0);
9294 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9295 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9299 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9300 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9301 IEM_MC_END();
9302 return VINF_SUCCESS;
9303}
9304
9305
9306/**
9307 * @opmaps grp15
9308 * @opcode !11/4
9309 * @oppfx none
9310 * @opcpuid xsave
9311 * @opgroup og_system
9312 * @opxcpttype none
9313 */
9314FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9315{
9316 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9317 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9318 return IEMOP_RAISE_INVALID_OPCODE();
9319
9320 IEM_MC_BEGIN(3, 0);
9321 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9322 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9323 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9326 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9327 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9328 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9329 IEM_MC_END();
9330 return VINF_SUCCESS;
9331}
9332
9333
9334/**
9335 * @opmaps grp15
9336 * @opcode !11/5
9337 * @oppfx none
9338 * @opcpuid xsave
9339 * @opgroup og_system
9340 * @opxcpttype none
9341 */
9342FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9343{
9344 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9345 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9346 return IEMOP_RAISE_INVALID_OPCODE();
9347
9348 IEM_MC_BEGIN(3, 0);
9349 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9350 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9351 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9355 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9356 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9357 IEM_MC_END();
9358 return VINF_SUCCESS;
9359}
9360
9361/** Opcode 0x0f 0xae mem/6. */
9362FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9363
9364/**
9365 * @opmaps grp15
9366 * @opcode !11/7
9367 * @oppfx none
9368 * @opcpuid clfsh
9369 * @opgroup og_cachectl
9370 * @optest op1=1 ->
9371 */
9372FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9373{
9374 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9375 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9376 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9377
9378 IEM_MC_BEGIN(2, 0);
9379 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9380 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9384 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9385 IEM_MC_END();
9386 return VINF_SUCCESS;
9387}
9388
9389/**
9390 * @opmaps grp15
9391 * @opcode !11/7
9392 * @oppfx 0x66
9393 * @opcpuid clflushopt
9394 * @opgroup og_cachectl
9395 * @optest op1=1 ->
9396 */
9397FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9400 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9401 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9402
9403 IEM_MC_BEGIN(2, 0);
9404 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9405 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9408 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9409 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9410 IEM_MC_END();
9411 return VINF_SUCCESS;
9412}
9413
9414
9415/** Opcode 0x0f 0xae 11b/5. */
9416FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9417{
9418 RT_NOREF_PV(bRm);
9419 IEMOP_MNEMONIC(lfence, "lfence");
9420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9421 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9422 return IEMOP_RAISE_INVALID_OPCODE();
9423
9424 IEM_MC_BEGIN(0, 0);
9425#ifndef RT_ARCH_ARM64
9426 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9427#endif
9428 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9429#ifndef RT_ARCH_ARM64
9430 else
9431 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9432#endif
9433 IEM_MC_ADVANCE_RIP();
9434 IEM_MC_END();
9435 return VINF_SUCCESS;
9436}
9437
9438
9439/** Opcode 0x0f 0xae 11b/6. */
9440FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9441{
9442 RT_NOREF_PV(bRm);
9443 IEMOP_MNEMONIC(mfence, "mfence");
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9445 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9446 return IEMOP_RAISE_INVALID_OPCODE();
9447
9448 IEM_MC_BEGIN(0, 0);
9449#ifndef RT_ARCH_ARM64
9450 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9451#endif
9452 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9453#ifndef RT_ARCH_ARM64
9454 else
9455 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9456#endif
9457 IEM_MC_ADVANCE_RIP();
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460}
9461
9462
9463/** Opcode 0x0f 0xae 11b/7. */
9464FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9465{
9466 RT_NOREF_PV(bRm);
9467 IEMOP_MNEMONIC(sfence, "sfence");
9468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9469 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9470 return IEMOP_RAISE_INVALID_OPCODE();
9471
9472 IEM_MC_BEGIN(0, 0);
9473#ifndef RT_ARCH_ARM64
9474 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9475#endif
9476 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9477#ifndef RT_ARCH_ARM64
9478 else
9479 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9480#endif
9481 IEM_MC_ADVANCE_RIP();
9482 IEM_MC_END();
9483 return VINF_SUCCESS;
9484}
9485
9486
9487/** Opcode 0xf3 0x0f 0xae 11b/0. */
9488FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9489{
9490 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9492 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9493 {
9494 IEM_MC_BEGIN(1, 0);
9495 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9496 IEM_MC_ARG(uint64_t, u64Dst, 0);
9497 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9498 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9499 IEM_MC_ADVANCE_RIP();
9500 IEM_MC_END();
9501 }
9502 else
9503 {
9504 IEM_MC_BEGIN(1, 0);
9505 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9506 IEM_MC_ARG(uint32_t, u32Dst, 0);
9507 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9509 IEM_MC_ADVANCE_RIP();
9510 IEM_MC_END();
9511 }
9512 return VINF_SUCCESS;
9513}
9514
9515
9516/** Opcode 0xf3 0x0f 0xae 11b/1. */
9517FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9518{
9519 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9522 {
9523 IEM_MC_BEGIN(1, 0);
9524 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9525 IEM_MC_ARG(uint64_t, u64Dst, 0);
9526 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9527 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9528 IEM_MC_ADVANCE_RIP();
9529 IEM_MC_END();
9530 }
9531 else
9532 {
9533 IEM_MC_BEGIN(1, 0);
9534 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9535 IEM_MC_ARG(uint32_t, u32Dst, 0);
9536 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9537 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9538 IEM_MC_ADVANCE_RIP();
9539 IEM_MC_END();
9540 }
9541 return VINF_SUCCESS;
9542}
9543
9544
9545/** Opcode 0xf3 0x0f 0xae 11b/2. */
9546FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9547{
9548 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9550 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9551 {
9552 IEM_MC_BEGIN(1, 0);
9553 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9554 IEM_MC_ARG(uint64_t, u64Dst, 0);
9555 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9556 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9557 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9558 IEM_MC_ADVANCE_RIP();
9559 IEM_MC_END();
9560 }
9561 else
9562 {
9563 IEM_MC_BEGIN(1, 0);
9564 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9565 IEM_MC_ARG(uint32_t, u32Dst, 0);
9566 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9567 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9568 IEM_MC_ADVANCE_RIP();
9569 IEM_MC_END();
9570 }
9571 return VINF_SUCCESS;
9572}
9573
9574
9575/** Opcode 0xf3 0x0f 0xae 11b/3. */
9576FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9577{
9578 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9580 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9581 {
9582 IEM_MC_BEGIN(1, 0);
9583 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9584 IEM_MC_ARG(uint64_t, u64Dst, 0);
9585 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9586 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9587 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9588 IEM_MC_ADVANCE_RIP();
9589 IEM_MC_END();
9590 }
9591 else
9592 {
9593 IEM_MC_BEGIN(1, 0);
9594 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9595 IEM_MC_ARG(uint32_t, u32Dst, 0);
9596 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9597 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9598 IEM_MC_ADVANCE_RIP();
9599 IEM_MC_END();
9600 }
9601 return VINF_SUCCESS;
9602}
9603
9604
9605/**
9606 * Group 15 jump table for register variant.
9607 */
9608IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9609{ /* pfx: none, 066h, 0f3h, 0f2h */
9610 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9611 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9612 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9613 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9614 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9615 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9616 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9617 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9618};
9619AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9620
9621
9622/**
9623 * Group 15 jump table for memory variant.
9624 */
9625IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9626{ /* pfx: none, 066h, 0f3h, 0f2h */
9627 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9628 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9629 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9630 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9631 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9632 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9633 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9634 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9635};
9636AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9637
9638
9639/** Opcode 0x0f 0xae. */
9640FNIEMOP_DEF(iemOp_Grp15)
9641{
9642 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9644 if (IEM_IS_MODRM_REG_MODE(bRm))
9645 /* register, register */
9646 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9647 + pVCpu->iem.s.idxPrefix], bRm);
9648 /* memory, register */
9649 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9650 + pVCpu->iem.s.idxPrefix], bRm);
9651}
9652
9653
9654/** Opcode 0x0f 0xaf. */
9655FNIEMOP_DEF(iemOp_imul_Gv_Ev)
9656{
9657 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
9658 IEMOP_HLP_MIN_386();
9659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9660 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
9661}
9662
9663
9664/** Opcode 0x0f 0xb0. */
9665FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
9666{
9667 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
9668 IEMOP_HLP_MIN_486();
9669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9670
9671 if (IEM_IS_MODRM_REG_MODE(bRm))
9672 {
9673 IEMOP_HLP_DONE_DECODING();
9674 IEM_MC_BEGIN(4, 0);
9675 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9676 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9677 IEM_MC_ARG(uint8_t, u8Src, 2);
9678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9679
9680 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9681 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9682 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
9683 IEM_MC_REF_EFLAGS(pEFlags);
9684 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9685 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9686 else
9687 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9688
9689 IEM_MC_ADVANCE_RIP();
9690 IEM_MC_END();
9691 }
9692 else
9693 {
9694 IEM_MC_BEGIN(4, 3);
9695 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9696 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9697 IEM_MC_ARG(uint8_t, u8Src, 2);
9698 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9700 IEM_MC_LOCAL(uint8_t, u8Al);
9701
9702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9703 IEMOP_HLP_DONE_DECODING();
9704 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9705 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9706 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
9707 IEM_MC_FETCH_EFLAGS(EFlags);
9708 IEM_MC_REF_LOCAL(pu8Al, u8Al);
9709 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9710 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9711 else
9712 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9713
9714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9715 IEM_MC_COMMIT_EFLAGS(EFlags);
9716 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
9717 IEM_MC_ADVANCE_RIP();
9718 IEM_MC_END();
9719 }
9720 return VINF_SUCCESS;
9721}
9722
9723/** Opcode 0x0f 0xb1. */
9724FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
9725{
9726 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
9727 IEMOP_HLP_MIN_486();
9728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9729
9730 if (IEM_IS_MODRM_REG_MODE(bRm))
9731 {
9732 IEMOP_HLP_DONE_DECODING();
9733 switch (pVCpu->iem.s.enmEffOpSize)
9734 {
9735 case IEMMODE_16BIT:
9736 IEM_MC_BEGIN(4, 0);
9737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9738 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9739 IEM_MC_ARG(uint16_t, u16Src, 2);
9740 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9741
9742 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9743 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9744 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
9745 IEM_MC_REF_EFLAGS(pEFlags);
9746 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9747 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9748 else
9749 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9750
9751 IEM_MC_ADVANCE_RIP();
9752 IEM_MC_END();
9753 return VINF_SUCCESS;
9754
9755 case IEMMODE_32BIT:
9756 IEM_MC_BEGIN(4, 0);
9757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9758 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9759 IEM_MC_ARG(uint32_t, u32Src, 2);
9760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9761
9762 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9763 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9764 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
9765 IEM_MC_REF_EFLAGS(pEFlags);
9766 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9767 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9768 else
9769 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9770
9771 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
9772 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9773 IEM_MC_ADVANCE_RIP();
9774 IEM_MC_END();
9775 return VINF_SUCCESS;
9776
9777 case IEMMODE_64BIT:
9778 IEM_MC_BEGIN(4, 0);
9779 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9780 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9781#ifdef RT_ARCH_X86
9782 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9783#else
9784 IEM_MC_ARG(uint64_t, u64Src, 2);
9785#endif
9786 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9787
9788 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9789 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
9790 IEM_MC_REF_EFLAGS(pEFlags);
9791#ifdef RT_ARCH_X86
9792 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9795 else
9796 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9797#else
9798 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9799 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9800 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9801 else
9802 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9803#endif
9804
9805 IEM_MC_ADVANCE_RIP();
9806 IEM_MC_END();
9807 return VINF_SUCCESS;
9808
9809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9810 }
9811 }
9812 else
9813 {
9814 switch (pVCpu->iem.s.enmEffOpSize)
9815 {
9816 case IEMMODE_16BIT:
9817 IEM_MC_BEGIN(4, 3);
9818 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9819 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9820 IEM_MC_ARG(uint16_t, u16Src, 2);
9821 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9823 IEM_MC_LOCAL(uint16_t, u16Ax);
9824
9825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9826 IEMOP_HLP_DONE_DECODING();
9827 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9828 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9829 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
9830 IEM_MC_FETCH_EFLAGS(EFlags);
9831 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
9832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9834 else
9835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9836
9837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9838 IEM_MC_COMMIT_EFLAGS(EFlags);
9839 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
9840 IEM_MC_ADVANCE_RIP();
9841 IEM_MC_END();
9842 return VINF_SUCCESS;
9843
9844 case IEMMODE_32BIT:
9845 IEM_MC_BEGIN(4, 3);
9846 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9847 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9848 IEM_MC_ARG(uint32_t, u32Src, 2);
9849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9851 IEM_MC_LOCAL(uint32_t, u32Eax);
9852
9853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9854 IEMOP_HLP_DONE_DECODING();
9855 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9856 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9857 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
9858 IEM_MC_FETCH_EFLAGS(EFlags);
9859 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
9860 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9861 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9862 else
9863 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9864
9865 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9866 IEM_MC_COMMIT_EFLAGS(EFlags);
9867 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
9868 IEM_MC_ADVANCE_RIP();
9869 IEM_MC_END();
9870 return VINF_SUCCESS;
9871
9872 case IEMMODE_64BIT:
9873 IEM_MC_BEGIN(4, 3);
9874 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9875 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9876#ifdef RT_ARCH_X86
9877 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9878#else
9879 IEM_MC_ARG(uint64_t, u64Src, 2);
9880#endif
9881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9883 IEM_MC_LOCAL(uint64_t, u64Rax);
9884
9885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9886 IEMOP_HLP_DONE_DECODING();
9887 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9888 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
9889 IEM_MC_FETCH_EFLAGS(EFlags);
9890 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
9891#ifdef RT_ARCH_X86
9892 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9893 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9894 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9895 else
9896 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9897#else
9898 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9899 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9900 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9901 else
9902 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9903#endif
9904
9905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9906 IEM_MC_COMMIT_EFLAGS(EFlags);
9907 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
9908 IEM_MC_ADVANCE_RIP();
9909 IEM_MC_END();
9910 return VINF_SUCCESS;
9911
9912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9913 }
9914 }
9915}
9916
9917
9918FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
9919{
9920 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
9921 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
9922
9923 switch (pVCpu->iem.s.enmEffOpSize)
9924 {
9925 case IEMMODE_16BIT:
9926 IEM_MC_BEGIN(5, 1);
9927 IEM_MC_ARG(uint16_t, uSel, 0);
9928 IEM_MC_ARG(uint16_t, offSeg, 1);
9929 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9930 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9931 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9932 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9935 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9936 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
9937 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9938 IEM_MC_END();
9939 return VINF_SUCCESS;
9940
9941 case IEMMODE_32BIT:
9942 IEM_MC_BEGIN(5, 1);
9943 IEM_MC_ARG(uint16_t, uSel, 0);
9944 IEM_MC_ARG(uint32_t, offSeg, 1);
9945 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9946 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9947 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9948 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9952 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
9953 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9954 IEM_MC_END();
9955 return VINF_SUCCESS;
9956
9957 case IEMMODE_64BIT:
9958 IEM_MC_BEGIN(5, 1);
9959 IEM_MC_ARG(uint16_t, uSel, 0);
9960 IEM_MC_ARG(uint64_t, offSeg, 1);
9961 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9962 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9963 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9964 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9967 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
9968 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9969 else
9970 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9971 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
9972 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9973 IEM_MC_END();
9974 return VINF_SUCCESS;
9975
9976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9977 }
9978}
9979
9980
9981/** Opcode 0x0f 0xb2. */
9982FNIEMOP_DEF(iemOp_lss_Gv_Mp)
9983{
9984 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
9985 IEMOP_HLP_MIN_386();
9986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9987 if (IEM_IS_MODRM_REG_MODE(bRm))
9988 return IEMOP_RAISE_INVALID_OPCODE();
9989 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
9990}
9991
9992
9993/** Opcode 0x0f 0xb3. */
9994FNIEMOP_DEF(iemOp_btr_Ev_Gv)
9995{
9996 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
9997 IEMOP_HLP_MIN_386();
9998 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
9999}
10000
10001
10002/** Opcode 0x0f 0xb4. */
10003FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10004{
10005 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10006 IEMOP_HLP_MIN_386();
10007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10008 if (IEM_IS_MODRM_REG_MODE(bRm))
10009 return IEMOP_RAISE_INVALID_OPCODE();
10010 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10011}
10012
10013
10014/** Opcode 0x0f 0xb5. */
10015FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10016{
10017 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10018 IEMOP_HLP_MIN_386();
10019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10020 if (IEM_IS_MODRM_REG_MODE(bRm))
10021 return IEMOP_RAISE_INVALID_OPCODE();
10022 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10023}
10024
10025
10026/** Opcode 0x0f 0xb6. */
10027FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10028{
10029 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10030 IEMOP_HLP_MIN_386();
10031
10032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10033
10034 /*
10035 * If rm is denoting a register, no more instruction bytes.
10036 */
10037 if (IEM_IS_MODRM_REG_MODE(bRm))
10038 {
10039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10040 switch (pVCpu->iem.s.enmEffOpSize)
10041 {
10042 case IEMMODE_16BIT:
10043 IEM_MC_BEGIN(0, 1);
10044 IEM_MC_LOCAL(uint16_t, u16Value);
10045 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10046 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10047 IEM_MC_ADVANCE_RIP();
10048 IEM_MC_END();
10049 return VINF_SUCCESS;
10050
10051 case IEMMODE_32BIT:
10052 IEM_MC_BEGIN(0, 1);
10053 IEM_MC_LOCAL(uint32_t, u32Value);
10054 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10055 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10056 IEM_MC_ADVANCE_RIP();
10057 IEM_MC_END();
10058 return VINF_SUCCESS;
10059
10060 case IEMMODE_64BIT:
10061 IEM_MC_BEGIN(0, 1);
10062 IEM_MC_LOCAL(uint64_t, u64Value);
10063 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10064 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10065 IEM_MC_ADVANCE_RIP();
10066 IEM_MC_END();
10067 return VINF_SUCCESS;
10068
10069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10070 }
10071 }
10072 else
10073 {
10074 /*
10075 * We're loading a register from memory.
10076 */
10077 switch (pVCpu->iem.s.enmEffOpSize)
10078 {
10079 case IEMMODE_16BIT:
10080 IEM_MC_BEGIN(0, 2);
10081 IEM_MC_LOCAL(uint16_t, u16Value);
10082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10085 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10087 IEM_MC_ADVANCE_RIP();
10088 IEM_MC_END();
10089 return VINF_SUCCESS;
10090
10091 case IEMMODE_32BIT:
10092 IEM_MC_BEGIN(0, 2);
10093 IEM_MC_LOCAL(uint32_t, u32Value);
10094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10097 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10098 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10099 IEM_MC_ADVANCE_RIP();
10100 IEM_MC_END();
10101 return VINF_SUCCESS;
10102
10103 case IEMMODE_64BIT:
10104 IEM_MC_BEGIN(0, 2);
10105 IEM_MC_LOCAL(uint64_t, u64Value);
10106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10109 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10110 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10111 IEM_MC_ADVANCE_RIP();
10112 IEM_MC_END();
10113 return VINF_SUCCESS;
10114
10115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10116 }
10117 }
10118}
10119
10120
10121/** Opcode 0x0f 0xb7. */
10122FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10123{
10124 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10125 IEMOP_HLP_MIN_386();
10126
10127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10128
10129 /** @todo Not entirely sure how the operand size prefix is handled here,
10130 * assuming that it will be ignored. Would be nice to have a few
10131 * test for this. */
10132 /*
10133 * If rm is denoting a register, no more instruction bytes.
10134 */
10135 if (IEM_IS_MODRM_REG_MODE(bRm))
10136 {
10137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10138 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10139 {
10140 IEM_MC_BEGIN(0, 1);
10141 IEM_MC_LOCAL(uint32_t, u32Value);
10142 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10143 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10144 IEM_MC_ADVANCE_RIP();
10145 IEM_MC_END();
10146 }
10147 else
10148 {
10149 IEM_MC_BEGIN(0, 1);
10150 IEM_MC_LOCAL(uint64_t, u64Value);
10151 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10152 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10153 IEM_MC_ADVANCE_RIP();
10154 IEM_MC_END();
10155 }
10156 }
10157 else
10158 {
10159 /*
10160 * We're loading a register from memory.
10161 */
10162 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10163 {
10164 IEM_MC_BEGIN(0, 2);
10165 IEM_MC_LOCAL(uint32_t, u32Value);
10166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10169 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10170 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10171 IEM_MC_ADVANCE_RIP();
10172 IEM_MC_END();
10173 }
10174 else
10175 {
10176 IEM_MC_BEGIN(0, 2);
10177 IEM_MC_LOCAL(uint64_t, u64Value);
10178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10181 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10182 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10183 IEM_MC_ADVANCE_RIP();
10184 IEM_MC_END();
10185 }
10186 }
10187 return VINF_SUCCESS;
10188}
10189
10190
10191/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10192FNIEMOP_UD_STUB(iemOp_jmpe);
10193
10194
10195/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10196FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10197{
10198 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10199 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10200 return iemOp_InvalidNeedRM(pVCpu);
10201#ifndef TST_IEM_CHECK_MC
10202# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10203 static const IEMOPBINSIZES s_Native =
10204 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10205# endif
10206 static const IEMOPBINSIZES s_Fallback =
10207 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10208#endif
10209 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10210}
10211
10212
10213/**
10214 * @opcode 0xb9
10215 * @opinvalid intel-modrm
10216 * @optest ->
10217 */
10218FNIEMOP_DEF(iemOp_Grp10)
10219{
10220 /*
10221 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10222 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10223 */
10224 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10225 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10226 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10227}
10228
10229
10230/** Opcode 0x0f 0xba. */
10231FNIEMOP_DEF(iemOp_Grp8)
10232{
10233 IEMOP_HLP_MIN_386();
10234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10235 PCIEMOPBINSIZES pImpl;
10236 switch (IEM_GET_MODRM_REG_8(bRm))
10237 {
10238 case 0: case 1: case 2: case 3:
10239 /* Both AMD and Intel want full modr/m decoding and imm8. */
10240 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10241 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10242 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10243 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10244 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10246 }
10247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10248
10249 if (IEM_IS_MODRM_REG_MODE(bRm))
10250 {
10251 /* register destination. */
10252 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10254
10255 switch (pVCpu->iem.s.enmEffOpSize)
10256 {
10257 case IEMMODE_16BIT:
10258 IEM_MC_BEGIN(3, 0);
10259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10260 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10261 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10262
10263 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10264 IEM_MC_REF_EFLAGS(pEFlags);
10265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10266
10267 IEM_MC_ADVANCE_RIP();
10268 IEM_MC_END();
10269 return VINF_SUCCESS;
10270
10271 case IEMMODE_32BIT:
10272 IEM_MC_BEGIN(3, 0);
10273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10274 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10276
10277 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10278 IEM_MC_REF_EFLAGS(pEFlags);
10279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10280
10281 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10282 IEM_MC_ADVANCE_RIP();
10283 IEM_MC_END();
10284 return VINF_SUCCESS;
10285
10286 case IEMMODE_64BIT:
10287 IEM_MC_BEGIN(3, 0);
10288 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10289 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10290 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10291
10292 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10293 IEM_MC_REF_EFLAGS(pEFlags);
10294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10295
10296 IEM_MC_ADVANCE_RIP();
10297 IEM_MC_END();
10298 return VINF_SUCCESS;
10299
10300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10301 }
10302 }
10303 else
10304 {
10305 /* memory destination. */
10306
10307 uint32_t fAccess;
10308 if (pImpl->pfnLockedU16)
10309 fAccess = IEM_ACCESS_DATA_RW;
10310 else /* BT */
10311 fAccess = IEM_ACCESS_DATA_R;
10312
10313 /** @todo test negative bit offsets! */
10314 switch (pVCpu->iem.s.enmEffOpSize)
10315 {
10316 case IEMMODE_16BIT:
10317 IEM_MC_BEGIN(3, 1);
10318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10319 IEM_MC_ARG(uint16_t, u16Src, 1);
10320 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10322
10323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10324 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10325 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10326 if (pImpl->pfnLockedU16)
10327 IEMOP_HLP_DONE_DECODING();
10328 else
10329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10330 IEM_MC_FETCH_EFLAGS(EFlags);
10331 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10334 else
10335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10337
10338 IEM_MC_COMMIT_EFLAGS(EFlags);
10339 IEM_MC_ADVANCE_RIP();
10340 IEM_MC_END();
10341 return VINF_SUCCESS;
10342
10343 case IEMMODE_32BIT:
10344 IEM_MC_BEGIN(3, 1);
10345 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10346 IEM_MC_ARG(uint32_t, u32Src, 1);
10347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10349
10350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10351 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10352 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10353 if (pImpl->pfnLockedU16)
10354 IEMOP_HLP_DONE_DECODING();
10355 else
10356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10357 IEM_MC_FETCH_EFLAGS(EFlags);
10358 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10359 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10361 else
10362 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10363 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10364
10365 IEM_MC_COMMIT_EFLAGS(EFlags);
10366 IEM_MC_ADVANCE_RIP();
10367 IEM_MC_END();
10368 return VINF_SUCCESS;
10369
10370 case IEMMODE_64BIT:
10371 IEM_MC_BEGIN(3, 1);
10372 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10373 IEM_MC_ARG(uint64_t, u64Src, 1);
10374 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10376
10377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10378 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10379 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10380 if (pImpl->pfnLockedU16)
10381 IEMOP_HLP_DONE_DECODING();
10382 else
10383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10384 IEM_MC_FETCH_EFLAGS(EFlags);
10385 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10386 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10387 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10388 else
10389 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10390 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10391
10392 IEM_MC_COMMIT_EFLAGS(EFlags);
10393 IEM_MC_ADVANCE_RIP();
10394 IEM_MC_END();
10395 return VINF_SUCCESS;
10396
10397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10398 }
10399 }
10400}
10401
10402
10403/** Opcode 0x0f 0xbb. */
10404FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10405{
10406 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10407 IEMOP_HLP_MIN_386();
10408 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10409}
10410
10411
10412/**
10413 * Common worker for BSF and BSR instructions.
10414 *
10415 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10416 * the destination register, which means that for 32-bit operations the high
10417 * bits must be left alone.
10418 *
10419 * @param pImpl Pointer to the instruction implementation (assembly).
10420 */
10421FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10422{
10423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10424
10425 /*
10426 * If rm is denoting a register, no more instruction bytes.
10427 */
10428 if (IEM_IS_MODRM_REG_MODE(bRm))
10429 {
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 switch (pVCpu->iem.s.enmEffOpSize)
10432 {
10433 case IEMMODE_16BIT:
10434 IEM_MC_BEGIN(3, 0);
10435 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10436 IEM_MC_ARG(uint16_t, u16Src, 1);
10437 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10438
10439 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10440 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10441 IEM_MC_REF_EFLAGS(pEFlags);
10442 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10443
10444 IEM_MC_ADVANCE_RIP();
10445 IEM_MC_END();
10446 break;
10447
10448 case IEMMODE_32BIT:
10449 IEM_MC_BEGIN(3, 0);
10450 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10451 IEM_MC_ARG(uint32_t, u32Src, 1);
10452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10453
10454 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10455 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10456 IEM_MC_REF_EFLAGS(pEFlags);
10457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10458 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10459 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10460 IEM_MC_ENDIF();
10461 IEM_MC_ADVANCE_RIP();
10462 IEM_MC_END();
10463 break;
10464
10465 case IEMMODE_64BIT:
10466 IEM_MC_BEGIN(3, 0);
10467 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10468 IEM_MC_ARG(uint64_t, u64Src, 1);
10469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10470
10471 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10472 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10473 IEM_MC_REF_EFLAGS(pEFlags);
10474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10475
10476 IEM_MC_ADVANCE_RIP();
10477 IEM_MC_END();
10478 break;
10479 }
10480 }
10481 else
10482 {
10483 /*
10484 * We're accessing memory.
10485 */
10486 switch (pVCpu->iem.s.enmEffOpSize)
10487 {
10488 case IEMMODE_16BIT:
10489 IEM_MC_BEGIN(3, 1);
10490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10491 IEM_MC_ARG(uint16_t, u16Src, 1);
10492 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10494
10495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10497 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10498 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10499 IEM_MC_REF_EFLAGS(pEFlags);
10500 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10501
10502 IEM_MC_ADVANCE_RIP();
10503 IEM_MC_END();
10504 break;
10505
10506 case IEMMODE_32BIT:
10507 IEM_MC_BEGIN(3, 1);
10508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10509 IEM_MC_ARG(uint32_t, u32Src, 1);
10510 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10512
10513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10515 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10517 IEM_MC_REF_EFLAGS(pEFlags);
10518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10519
10520 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10521 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10522 IEM_MC_ENDIF();
10523 IEM_MC_ADVANCE_RIP();
10524 IEM_MC_END();
10525 break;
10526
10527 case IEMMODE_64BIT:
10528 IEM_MC_BEGIN(3, 1);
10529 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10530 IEM_MC_ARG(uint64_t, u64Src, 1);
10531 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10533
10534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10536 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10537 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10538 IEM_MC_REF_EFLAGS(pEFlags);
10539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10540
10541 IEM_MC_ADVANCE_RIP();
10542 IEM_MC_END();
10543 break;
10544 }
10545 }
10546 return VINF_SUCCESS;
10547}
10548
10549
10550/** Opcode 0x0f 0xbc. */
10551FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10552{
10553 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10554 IEMOP_HLP_MIN_386();
10555 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10556 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10557}
10558
10559
10560/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10561FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10562{
10563 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10564 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10565 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10566
10567#ifndef TST_IEM_CHECK_MC
10568 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10569 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10570 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10571 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10572 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10573 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10574 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10575 {
10576 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10577 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10578 };
10579#endif
10580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10582 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10583}
10584
10585
10586/** Opcode 0x0f 0xbd. */
10587FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10588{
10589 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10590 IEMOP_HLP_MIN_386();
10591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10592 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10593}
10594
10595
10596/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10597FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10598{
10599 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10600 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10601 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10602
10603#ifndef TST_IEM_CHECK_MC
10604 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10605 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10606 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10607 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10608 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10609 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10610 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10611 {
10612 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10613 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10614 };
10615#endif
10616 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10618 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10619}
10620
10621
10622
10623/** Opcode 0x0f 0xbe. */
10624FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10625{
10626 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
10627 IEMOP_HLP_MIN_386();
10628
10629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10630
10631 /*
10632 * If rm is denoting a register, no more instruction bytes.
10633 */
10634 if (IEM_IS_MODRM_REG_MODE(bRm))
10635 {
10636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10637 switch (pVCpu->iem.s.enmEffOpSize)
10638 {
10639 case IEMMODE_16BIT:
10640 IEM_MC_BEGIN(0, 1);
10641 IEM_MC_LOCAL(uint16_t, u16Value);
10642 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10643 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10644 IEM_MC_ADVANCE_RIP();
10645 IEM_MC_END();
10646 return VINF_SUCCESS;
10647
10648 case IEMMODE_32BIT:
10649 IEM_MC_BEGIN(0, 1);
10650 IEM_MC_LOCAL(uint32_t, u32Value);
10651 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10652 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10653 IEM_MC_ADVANCE_RIP();
10654 IEM_MC_END();
10655 return VINF_SUCCESS;
10656
10657 case IEMMODE_64BIT:
10658 IEM_MC_BEGIN(0, 1);
10659 IEM_MC_LOCAL(uint64_t, u64Value);
10660 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10661 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10662 IEM_MC_ADVANCE_RIP();
10663 IEM_MC_END();
10664 return VINF_SUCCESS;
10665
10666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10667 }
10668 }
10669 else
10670 {
10671 /*
10672 * We're loading a register from memory.
10673 */
10674 switch (pVCpu->iem.s.enmEffOpSize)
10675 {
10676 case IEMMODE_16BIT:
10677 IEM_MC_BEGIN(0, 2);
10678 IEM_MC_LOCAL(uint16_t, u16Value);
10679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10683 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10684 IEM_MC_ADVANCE_RIP();
10685 IEM_MC_END();
10686 return VINF_SUCCESS;
10687
10688 case IEMMODE_32BIT:
10689 IEM_MC_BEGIN(0, 2);
10690 IEM_MC_LOCAL(uint32_t, u32Value);
10691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10694 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10695 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10696 IEM_MC_ADVANCE_RIP();
10697 IEM_MC_END();
10698 return VINF_SUCCESS;
10699
10700 case IEMMODE_64BIT:
10701 IEM_MC_BEGIN(0, 2);
10702 IEM_MC_LOCAL(uint64_t, u64Value);
10703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10706 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10708 IEM_MC_ADVANCE_RIP();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711
10712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10713 }
10714 }
10715}
10716
10717
10718/** Opcode 0x0f 0xbf. */
10719FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
10720{
10721 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
10722 IEMOP_HLP_MIN_386();
10723
10724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10725
10726 /** @todo Not entirely sure how the operand size prefix is handled here,
10727 * assuming that it will be ignored. Would be nice to have a few
10728 * test for this. */
10729 /*
10730 * If rm is denoting a register, no more instruction bytes.
10731 */
10732 if (IEM_IS_MODRM_REG_MODE(bRm))
10733 {
10734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10735 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10736 {
10737 IEM_MC_BEGIN(0, 1);
10738 IEM_MC_LOCAL(uint32_t, u32Value);
10739 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10740 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10741 IEM_MC_ADVANCE_RIP();
10742 IEM_MC_END();
10743 }
10744 else
10745 {
10746 IEM_MC_BEGIN(0, 1);
10747 IEM_MC_LOCAL(uint64_t, u64Value);
10748 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10749 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10750 IEM_MC_ADVANCE_RIP();
10751 IEM_MC_END();
10752 }
10753 }
10754 else
10755 {
10756 /*
10757 * We're loading a register from memory.
10758 */
10759 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10760 {
10761 IEM_MC_BEGIN(0, 2);
10762 IEM_MC_LOCAL(uint32_t, u32Value);
10763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10766 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10767 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10768 IEM_MC_ADVANCE_RIP();
10769 IEM_MC_END();
10770 }
10771 else
10772 {
10773 IEM_MC_BEGIN(0, 2);
10774 IEM_MC_LOCAL(uint64_t, u64Value);
10775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10779 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10780 IEM_MC_ADVANCE_RIP();
10781 IEM_MC_END();
10782 }
10783 }
10784 return VINF_SUCCESS;
10785}
10786
10787
10788/** Opcode 0x0f 0xc0. */
10789FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
10790{
10791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10792 IEMOP_HLP_MIN_486();
10793 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
10794
10795 /*
10796 * If rm is denoting a register, no more instruction bytes.
10797 */
10798 if (IEM_IS_MODRM_REG_MODE(bRm))
10799 {
10800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10801
10802 IEM_MC_BEGIN(3, 0);
10803 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10804 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10806
10807 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10808 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10809 IEM_MC_REF_EFLAGS(pEFlags);
10810 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10811
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 }
10815 else
10816 {
10817 /*
10818 * We're accessing memory.
10819 */
10820 IEM_MC_BEGIN(3, 3);
10821 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10822 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10823 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10824 IEM_MC_LOCAL(uint8_t, u8RegCopy);
10825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10826
10827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10828 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10829 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10830 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
10831 IEM_MC_FETCH_EFLAGS(EFlags);
10832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10833 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10834 else
10835 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
10836
10837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10838 IEM_MC_COMMIT_EFLAGS(EFlags);
10839 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
10840 IEM_MC_ADVANCE_RIP();
10841 IEM_MC_END();
10842 return VINF_SUCCESS;
10843 }
10844 return VINF_SUCCESS;
10845}
10846
10847
10848/** Opcode 0x0f 0xc1. */
10849FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
10850{
10851 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
10852 IEMOP_HLP_MIN_486();
10853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10854
10855 /*
10856 * If rm is denoting a register, no more instruction bytes.
10857 */
10858 if (IEM_IS_MODRM_REG_MODE(bRm))
10859 {
10860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10861
10862 switch (pVCpu->iem.s.enmEffOpSize)
10863 {
10864 case IEMMODE_16BIT:
10865 IEM_MC_BEGIN(3, 0);
10866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10867 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10869
10870 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10871 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10872 IEM_MC_REF_EFLAGS(pEFlags);
10873 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10874
10875 IEM_MC_ADVANCE_RIP();
10876 IEM_MC_END();
10877 return VINF_SUCCESS;
10878
10879 case IEMMODE_32BIT:
10880 IEM_MC_BEGIN(3, 0);
10881 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10882 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10883 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10884
10885 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10886 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10887 IEM_MC_REF_EFLAGS(pEFlags);
10888 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10889
10890 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10891 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10892 IEM_MC_ADVANCE_RIP();
10893 IEM_MC_END();
10894 return VINF_SUCCESS;
10895
10896 case IEMMODE_64BIT:
10897 IEM_MC_BEGIN(3, 0);
10898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10899 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10901
10902 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10903 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10904 IEM_MC_REF_EFLAGS(pEFlags);
10905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10906
10907 IEM_MC_ADVANCE_RIP();
10908 IEM_MC_END();
10909 return VINF_SUCCESS;
10910
10911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10912 }
10913 }
10914 else
10915 {
10916 /*
10917 * We're accessing memory.
10918 */
10919 switch (pVCpu->iem.s.enmEffOpSize)
10920 {
10921 case IEMMODE_16BIT:
10922 IEM_MC_BEGIN(3, 3);
10923 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10924 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10925 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10926 IEM_MC_LOCAL(uint16_t, u16RegCopy);
10927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10928
10929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10930 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10931 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10932 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
10933 IEM_MC_FETCH_EFLAGS(EFlags);
10934 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10935 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10936 else
10937 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
10938
10939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10940 IEM_MC_COMMIT_EFLAGS(EFlags);
10941 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
10942 IEM_MC_ADVANCE_RIP();
10943 IEM_MC_END();
10944 return VINF_SUCCESS;
10945
10946 case IEMMODE_32BIT:
10947 IEM_MC_BEGIN(3, 3);
10948 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10949 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10950 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10951 IEM_MC_LOCAL(uint32_t, u32RegCopy);
10952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10953
10954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10955 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10956 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10957 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
10958 IEM_MC_FETCH_EFLAGS(EFlags);
10959 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10960 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10961 else
10962 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
10963
10964 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10965 IEM_MC_COMMIT_EFLAGS(EFlags);
10966 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
10967 IEM_MC_ADVANCE_RIP();
10968 IEM_MC_END();
10969 return VINF_SUCCESS;
10970
10971 case IEMMODE_64BIT:
10972 IEM_MC_BEGIN(3, 3);
10973 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10974 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10975 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10976 IEM_MC_LOCAL(uint64_t, u64RegCopy);
10977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10978
10979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10980 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10981 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10982 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
10983 IEM_MC_FETCH_EFLAGS(EFlags);
10984 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10986 else
10987 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
10988
10989 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10990 IEM_MC_COMMIT_EFLAGS(EFlags);
10991 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
10992 IEM_MC_ADVANCE_RIP();
10993 IEM_MC_END();
10994 return VINF_SUCCESS;
10995
10996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10997 }
10998 }
10999}
11000
11001
11002/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11003FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11004{
11005 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11006
11007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11008 if (IEM_IS_MODRM_REG_MODE(bRm))
11009 {
11010 /*
11011 * Register, register.
11012 */
11013 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11015 IEM_MC_BEGIN(4, 2);
11016 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11017 IEM_MC_LOCAL(X86XMMREG, Dst);
11018 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11019 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11020 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11021 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11023 IEM_MC_PREPARE_SSE_USAGE();
11024 IEM_MC_REF_MXCSR(pfMxcsr);
11025 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11026 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11027 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11028 IEM_MC_IF_MXCSR_XCPT_PENDING()
11029 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11030 IEM_MC_ELSE()
11031 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11032 IEM_MC_ENDIF();
11033
11034 IEM_MC_ADVANCE_RIP();
11035 IEM_MC_END();
11036 }
11037 else
11038 {
11039 /*
11040 * Register, memory.
11041 */
11042 IEM_MC_BEGIN(4, 3);
11043 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11044 IEM_MC_LOCAL(X86XMMREG, Dst);
11045 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11046 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11047 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11049
11050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11051 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11052 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11054 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11055 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11056
11057 IEM_MC_PREPARE_SSE_USAGE();
11058 IEM_MC_REF_MXCSR(pfMxcsr);
11059 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11060 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11061 IEM_MC_IF_MXCSR_XCPT_PENDING()
11062 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11063 IEM_MC_ELSE()
11064 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11065 IEM_MC_ENDIF();
11066
11067 IEM_MC_ADVANCE_RIP();
11068 IEM_MC_END();
11069 }
11070 return VINF_SUCCESS;
11071}
11072
11073
11074/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11075FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11076{
11077 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11078
11079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11080 if (IEM_IS_MODRM_REG_MODE(bRm))
11081 {
11082 /*
11083 * Register, register.
11084 */
11085 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_BEGIN(4, 2);
11088 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11089 IEM_MC_LOCAL(X86XMMREG, Dst);
11090 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11091 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11092 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11093 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11095 IEM_MC_PREPARE_SSE_USAGE();
11096 IEM_MC_REF_MXCSR(pfMxcsr);
11097 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11098 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11099 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11100 IEM_MC_IF_MXCSR_XCPT_PENDING()
11101 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11102 IEM_MC_ELSE()
11103 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11104 IEM_MC_ENDIF();
11105
11106 IEM_MC_ADVANCE_RIP();
11107 IEM_MC_END();
11108 }
11109 else
11110 {
11111 /*
11112 * Register, memory.
11113 */
11114 IEM_MC_BEGIN(4, 3);
11115 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11116 IEM_MC_LOCAL(X86XMMREG, Dst);
11117 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11118 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11119 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11121
11122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11123 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11124 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11126 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11127 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11128
11129 IEM_MC_PREPARE_SSE_USAGE();
11130 IEM_MC_REF_MXCSR(pfMxcsr);
11131 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11132 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11133 IEM_MC_IF_MXCSR_XCPT_PENDING()
11134 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11135 IEM_MC_ELSE()
11136 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11137 IEM_MC_ENDIF();
11138
11139 IEM_MC_ADVANCE_RIP();
11140 IEM_MC_END();
11141 }
11142 return VINF_SUCCESS;
11143}
11144
11145
11146/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11147FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11148{
11149 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11150
11151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11152 if (IEM_IS_MODRM_REG_MODE(bRm))
11153 {
11154 /*
11155 * Register, register.
11156 */
11157 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11159 IEM_MC_BEGIN(4, 2);
11160 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11161 IEM_MC_LOCAL(X86XMMREG, Dst);
11162 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11163 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11164 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11165 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11166 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11167 IEM_MC_PREPARE_SSE_USAGE();
11168 IEM_MC_REF_MXCSR(pfMxcsr);
11169 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11170 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11171 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11172 IEM_MC_IF_MXCSR_XCPT_PENDING()
11173 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11174 IEM_MC_ELSE()
11175 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11176 IEM_MC_ENDIF();
11177
11178 IEM_MC_ADVANCE_RIP();
11179 IEM_MC_END();
11180 }
11181 else
11182 {
11183 /*
11184 * Register, memory.
11185 */
11186 IEM_MC_BEGIN(4, 3);
11187 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11188 IEM_MC_LOCAL(X86XMMREG, Dst);
11189 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11190 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11191 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11193
11194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11195 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11196 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11198 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11199 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11200
11201 IEM_MC_PREPARE_SSE_USAGE();
11202 IEM_MC_REF_MXCSR(pfMxcsr);
11203 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11204 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11205 IEM_MC_IF_MXCSR_XCPT_PENDING()
11206 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11207 IEM_MC_ELSE()
11208 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11209 IEM_MC_ENDIF();
11210
11211 IEM_MC_ADVANCE_RIP();
11212 IEM_MC_END();
11213 }
11214 return VINF_SUCCESS;
11215}
11216
11217
11218/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11219FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11220{
11221 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11222
11223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11224 if (IEM_IS_MODRM_REG_MODE(bRm))
11225 {
11226 /*
11227 * Register, register.
11228 */
11229 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11231 IEM_MC_BEGIN(4, 2);
11232 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11233 IEM_MC_LOCAL(X86XMMREG, Dst);
11234 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11235 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11236 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11237 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11238 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11239 IEM_MC_PREPARE_SSE_USAGE();
11240 IEM_MC_REF_MXCSR(pfMxcsr);
11241 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11242 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11244 IEM_MC_IF_MXCSR_XCPT_PENDING()
11245 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11246 IEM_MC_ELSE()
11247 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11248 IEM_MC_ENDIF();
11249
11250 IEM_MC_ADVANCE_RIP();
11251 IEM_MC_END();
11252 }
11253 else
11254 {
11255 /*
11256 * Register, memory.
11257 */
11258 IEM_MC_BEGIN(4, 3);
11259 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11260 IEM_MC_LOCAL(X86XMMREG, Dst);
11261 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11262 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11263 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11265
11266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11267 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11268 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11270 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11271 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11272
11273 IEM_MC_PREPARE_SSE_USAGE();
11274 IEM_MC_REF_MXCSR(pfMxcsr);
11275 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11277 IEM_MC_IF_MXCSR_XCPT_PENDING()
11278 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11279 IEM_MC_ELSE()
11280 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11281 IEM_MC_ENDIF();
11282
11283 IEM_MC_ADVANCE_RIP();
11284 IEM_MC_END();
11285 }
11286 return VINF_SUCCESS;
11287}
11288
11289
11290/** Opcode 0x0f 0xc3. */
11291FNIEMOP_DEF(iemOp_movnti_My_Gy)
11292{
11293 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11294
11295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11296
11297 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11298 if (IEM_IS_MODRM_MEM_MODE(bRm))
11299 {
11300 switch (pVCpu->iem.s.enmEffOpSize)
11301 {
11302 case IEMMODE_32BIT:
11303 IEM_MC_BEGIN(0, 2);
11304 IEM_MC_LOCAL(uint32_t, u32Value);
11305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11306
11307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11309 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11310 return IEMOP_RAISE_INVALID_OPCODE();
11311
11312 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11313 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11314 IEM_MC_ADVANCE_RIP();
11315 IEM_MC_END();
11316 break;
11317
11318 case IEMMODE_64BIT:
11319 IEM_MC_BEGIN(0, 2);
11320 IEM_MC_LOCAL(uint64_t, u64Value);
11321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11322
11323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11325 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11326 return IEMOP_RAISE_INVALID_OPCODE();
11327
11328 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11329 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11330 IEM_MC_ADVANCE_RIP();
11331 IEM_MC_END();
11332 break;
11333
11334 case IEMMODE_16BIT:
11335 /** @todo check this form. */
11336 return IEMOP_RAISE_INVALID_OPCODE();
11337 }
11338 }
11339 else
11340 return IEMOP_RAISE_INVALID_OPCODE();
11341 return VINF_SUCCESS;
11342}
11343
11344
11345/* Opcode 0x66 0x0f 0xc3 - invalid */
11346/* Opcode 0xf3 0x0f 0xc3 - invalid */
11347/* Opcode 0xf2 0x0f 0xc3 - invalid */
11348
11349
11350/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11351FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11352{
11353 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11355 if (IEM_IS_MODRM_REG_MODE(bRm))
11356 {
11357 /*
11358 * Register, register.
11359 */
11360 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11362 IEM_MC_BEGIN(3, 0);
11363 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11364 IEM_MC_ARG(uint16_t, u16Src, 1);
11365 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11366 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11367 IEM_MC_PREPARE_FPU_USAGE();
11368 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11369 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11370 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11371 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11372 IEM_MC_FPU_TO_MMX_MODE();
11373 IEM_MC_ADVANCE_RIP();
11374 IEM_MC_END();
11375 }
11376 else
11377 {
11378 /*
11379 * Register, memory.
11380 */
11381 IEM_MC_BEGIN(3, 2);
11382 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11383 IEM_MC_ARG(uint16_t, u16Src, 1);
11384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11385
11386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11387 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11388 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11390 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11391 IEM_MC_PREPARE_FPU_USAGE();
11392
11393 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11394 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11396 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11397 IEM_MC_FPU_TO_MMX_MODE();
11398 IEM_MC_ADVANCE_RIP();
11399 IEM_MC_END();
11400 }
11401 return VINF_SUCCESS;
11402}
11403
11404
11405/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11406FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11407{
11408 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11410 if (IEM_IS_MODRM_REG_MODE(bRm))
11411 {
11412 /*
11413 * Register, register.
11414 */
11415 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 IEM_MC_BEGIN(3, 0);
11418 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11419 IEM_MC_ARG(uint16_t, u16Src, 1);
11420 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11421 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11422 IEM_MC_PREPARE_SSE_USAGE();
11423 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11424 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11425 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11426 IEM_MC_ADVANCE_RIP();
11427 IEM_MC_END();
11428 }
11429 else
11430 {
11431 /*
11432 * Register, memory.
11433 */
11434 IEM_MC_BEGIN(3, 2);
11435 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11436 IEM_MC_ARG(uint16_t, u16Src, 1);
11437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11438
11439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11440 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11441 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11443 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11444 IEM_MC_PREPARE_SSE_USAGE();
11445
11446 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11447 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11448 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11449 IEM_MC_ADVANCE_RIP();
11450 IEM_MC_END();
11451 }
11452 return VINF_SUCCESS;
11453}
11454
11455
11456/* Opcode 0xf3 0x0f 0xc4 - invalid */
11457/* Opcode 0xf2 0x0f 0xc4 - invalid */
11458
11459
11460/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11461FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11462{
11463 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11465 if (IEM_IS_MODRM_REG_MODE(bRm))
11466 {
11467 /*
11468 * Register, register.
11469 */
11470 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11472 IEM_MC_BEGIN(3, 1);
11473 IEM_MC_LOCAL(uint16_t, u16Dst);
11474 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11475 IEM_MC_ARG(uint64_t, u64Src, 1);
11476 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11477 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11478 IEM_MC_PREPARE_FPU_USAGE();
11479 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11480 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11481 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11482 IEM_MC_FPU_TO_MMX_MODE();
11483 IEM_MC_ADVANCE_RIP();
11484 IEM_MC_END();
11485 return VINF_SUCCESS;
11486 }
11487
11488 /* No memory operand. */
11489 return IEMOP_RAISE_INVALID_OPCODE();
11490}
11491
11492
11493/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11494FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11495{
11496 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11498 if (IEM_IS_MODRM_REG_MODE(bRm))
11499 {
11500 /*
11501 * Register, register.
11502 */
11503 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11505 IEM_MC_BEGIN(3, 1);
11506 IEM_MC_LOCAL(uint16_t, u16Dst);
11507 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11508 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11509 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11511 IEM_MC_PREPARE_SSE_USAGE();
11512 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11513 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11515 IEM_MC_ADVANCE_RIP();
11516 IEM_MC_END();
11517 return VINF_SUCCESS;
11518 }
11519
11520 /* No memory operand. */
11521 return IEMOP_RAISE_INVALID_OPCODE();
11522}
11523
11524
11525/* Opcode 0xf3 0x0f 0xc5 - invalid */
11526/* Opcode 0xf2 0x0f 0xc5 - invalid */
11527
11528
11529/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11530FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11531{
11532 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11534 if (IEM_IS_MODRM_REG_MODE(bRm))
11535 {
11536 /*
11537 * Register, register.
11538 */
11539 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_BEGIN(3, 0);
11542 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11543 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11544 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11546 IEM_MC_PREPARE_SSE_USAGE();
11547 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11548 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11549 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11550 IEM_MC_ADVANCE_RIP();
11551 IEM_MC_END();
11552 }
11553 else
11554 {
11555 /*
11556 * Register, memory.
11557 */
11558 IEM_MC_BEGIN(3, 2);
11559 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11560 IEM_MC_LOCAL(RTUINT128U, uSrc);
11561 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11563
11564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11565 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11566 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11569 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11570
11571 IEM_MC_PREPARE_SSE_USAGE();
11572 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11574
11575 IEM_MC_ADVANCE_RIP();
11576 IEM_MC_END();
11577 }
11578 return VINF_SUCCESS;
11579}
11580
11581
11582/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11583FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11584{
11585 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11587 if (IEM_IS_MODRM_REG_MODE(bRm))
11588 {
11589 /*
11590 * Register, register.
11591 */
11592 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11594 IEM_MC_BEGIN(3, 0);
11595 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11596 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11597 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11599 IEM_MC_PREPARE_SSE_USAGE();
11600 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11601 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11603 IEM_MC_ADVANCE_RIP();
11604 IEM_MC_END();
11605 }
11606 else
11607 {
11608 /*
11609 * Register, memory.
11610 */
11611 IEM_MC_BEGIN(3, 2);
11612 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11613 IEM_MC_LOCAL(RTUINT128U, uSrc);
11614 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11616
11617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11618 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11619 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11621 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11622 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11623
11624 IEM_MC_PREPARE_SSE_USAGE();
11625 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11626 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11627
11628 IEM_MC_ADVANCE_RIP();
11629 IEM_MC_END();
11630 }
11631 return VINF_SUCCESS;
11632}
11633
11634
11635/* Opcode 0xf3 0x0f 0xc6 - invalid */
11636/* Opcode 0xf2 0x0f 0xc6 - invalid */
11637
11638
11639/** Opcode 0x0f 0xc7 !11/1. */
11640FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
11641{
11642 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
11643
11644 IEM_MC_BEGIN(4, 3);
11645 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
11646 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
11647 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
11648 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11649 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
11650 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
11651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11652
11653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11654 IEMOP_HLP_DONE_DECODING();
11655 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11656
11657 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
11658 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
11659 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
11660
11661 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
11662 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
11663 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
11664
11665 IEM_MC_FETCH_EFLAGS(EFlags);
11666 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11667 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11668 else
11669 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11670
11671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
11672 IEM_MC_COMMIT_EFLAGS(EFlags);
11673 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11674 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
11675 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
11676 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
11677 IEM_MC_ENDIF();
11678 IEM_MC_ADVANCE_RIP();
11679
11680 IEM_MC_END();
11681 return VINF_SUCCESS;
11682}
11683
11684
11685/** Opcode REX.W 0x0f 0xc7 !11/1. */
11686FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
11687{
11688 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
11689 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11690 {
11691#if 0
11692 RT_NOREF(bRm);
11693 IEMOP_BITCH_ABOUT_STUB();
11694 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
11695#else
11696 IEM_MC_BEGIN(4, 3);
11697 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
11698 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
11699 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
11700 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11701 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
11702 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
11703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11704
11705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11706 IEMOP_HLP_DONE_DECODING();
11707 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
11708 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11709
11710 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
11711 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
11712 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
11713
11714 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
11715 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
11716 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
11717
11718 IEM_MC_FETCH_EFLAGS(EFlags);
11719# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
11720# if defined(RT_ARCH_AMD64)
11721 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11722# endif
11723 {
11724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11726 else
11727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11728 }
11729# if defined(RT_ARCH_AMD64)
11730 else
11731# endif
11732# endif
11733# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
11734 {
11735 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
11736 accesses and not all all atomic, which works fine on in UNI CPU guest
11737 configuration (ignoring DMA). If guest SMP is active we have no choice
11738 but to use a rendezvous callback here. Sigh. */
11739 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
11740 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11741 else
11742 {
11743 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11744 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
11745 }
11746 }
11747# endif
11748
11749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
11750 IEM_MC_COMMIT_EFLAGS(EFlags);
11751 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11752 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
11753 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
11754 IEM_MC_ENDIF();
11755 IEM_MC_ADVANCE_RIP();
11756
11757 IEM_MC_END();
11758 return VINF_SUCCESS;
11759#endif
11760 }
11761 Log(("cmpxchg16b -> #UD\n"));
11762 return IEMOP_RAISE_INVALID_OPCODE();
11763}
11764
11765FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
11766{
11767 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
11768 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
11769 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
11770}
11771
11772/** Opcode 0x0f 0xc7 11/6. */
11773FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
11774
11775/** Opcode 0x0f 0xc7 !11/6. */
11776#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11777FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
11778{
11779 IEMOP_MNEMONIC(vmptrld, "vmptrld");
11780 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
11781 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
11782 IEM_MC_BEGIN(2, 0);
11783 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11784 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11786 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11787 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11788 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
11789 IEM_MC_END();
11790 return VINF_SUCCESS;
11791}
11792#else
11793FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
11794#endif
11795
11796/** Opcode 0x66 0x0f 0xc7 !11/6. */
11797#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11798FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
11799{
11800 IEMOP_MNEMONIC(vmclear, "vmclear");
11801 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
11802 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
11803 IEM_MC_BEGIN(2, 0);
11804 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11805 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11807 IEMOP_HLP_DONE_DECODING();
11808 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11809 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
11810 IEM_MC_END();
11811 return VINF_SUCCESS;
11812}
11813#else
11814FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
11815#endif
11816
11817/** Opcode 0xf3 0x0f 0xc7 !11/6. */
11818#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11819FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
11820{
11821 IEMOP_MNEMONIC(vmxon, "vmxon");
11822 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
11823 IEM_MC_BEGIN(2, 0);
11824 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11825 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11827 IEMOP_HLP_DONE_DECODING();
11828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11829 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
11830 IEM_MC_END();
11831 return VINF_SUCCESS;
11832}
11833#else
11834FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
11835#endif
11836
11837/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
11838#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11839FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
11840{
11841 IEMOP_MNEMONIC(vmptrst, "vmptrst");
11842 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
11843 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
11844 IEM_MC_BEGIN(2, 0);
11845 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11846 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11848 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11849 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11850 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
11851 IEM_MC_END();
11852 return VINF_SUCCESS;
11853}
11854#else
11855FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
11856#endif
11857
11858/** Opcode 0x0f 0xc7 11/7. */
11859FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
11860
11861
11862/**
11863 * Group 9 jump table for register variant.
11864 */
11865IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
11866{ /* pfx: none, 066h, 0f3h, 0f2h */
11867 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11868 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
11869 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11870 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11871 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11872 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11873 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11874 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11875};
11876AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
11877
11878
11879/**
11880 * Group 9 jump table for memory variant.
11881 */
11882IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
11883{ /* pfx: none, 066h, 0f3h, 0f2h */
11884 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11885 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
11886 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11887 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11888 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11889 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11890 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
11891 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11892};
11893AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
11894
11895
11896/** Opcode 0x0f 0xc7. */
11897FNIEMOP_DEF(iemOp_Grp9)
11898{
11899 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
11900 if (IEM_IS_MODRM_REG_MODE(bRm))
11901 /* register, register */
11902 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11903 + pVCpu->iem.s.idxPrefix], bRm);
11904 /* memory, register */
11905 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11906 + pVCpu->iem.s.idxPrefix], bRm);
11907}
11908
11909
11910/**
11911 * Common 'bswap register' helper.
11912 */
11913FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
11914{
11915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11916 switch (pVCpu->iem.s.enmEffOpSize)
11917 {
11918 case IEMMODE_16BIT:
11919 IEM_MC_BEGIN(1, 0);
11920 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11921 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
11922 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
11923 IEM_MC_ADVANCE_RIP();
11924 IEM_MC_END();
11925 return VINF_SUCCESS;
11926
11927 case IEMMODE_32BIT:
11928 IEM_MC_BEGIN(1, 0);
11929 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11930 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
11931 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11932 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
11933 IEM_MC_ADVANCE_RIP();
11934 IEM_MC_END();
11935 return VINF_SUCCESS;
11936
11937 case IEMMODE_64BIT:
11938 IEM_MC_BEGIN(1, 0);
11939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11940 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
11941 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
11942 IEM_MC_ADVANCE_RIP();
11943 IEM_MC_END();
11944 return VINF_SUCCESS;
11945
11946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11947 }
11948}
11949
11950
11951/** Opcode 0x0f 0xc8. */
11952FNIEMOP_DEF(iemOp_bswap_rAX_r8)
11953{
11954 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
11955 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
11956 prefix. REX.B is the correct prefix it appears. For a parallel
11957 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
11958 IEMOP_HLP_MIN_486();
11959 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
11960}
11961
11962
11963/** Opcode 0x0f 0xc9. */
11964FNIEMOP_DEF(iemOp_bswap_rCX_r9)
11965{
11966 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
11967 IEMOP_HLP_MIN_486();
11968 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
11969}
11970
11971
11972/** Opcode 0x0f 0xca. */
11973FNIEMOP_DEF(iemOp_bswap_rDX_r10)
11974{
11975 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
11976 IEMOP_HLP_MIN_486();
11977 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
11978}
11979
11980
11981/** Opcode 0x0f 0xcb. */
11982FNIEMOP_DEF(iemOp_bswap_rBX_r11)
11983{
11984 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
11985 IEMOP_HLP_MIN_486();
11986 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
11987}
11988
11989
11990/** Opcode 0x0f 0xcc. */
11991FNIEMOP_DEF(iemOp_bswap_rSP_r12)
11992{
11993 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
11994 IEMOP_HLP_MIN_486();
11995 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
11996}
11997
11998
11999/** Opcode 0x0f 0xcd. */
12000FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12001{
12002 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12003 IEMOP_HLP_MIN_486();
12004 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12005}
12006
12007
12008/** Opcode 0x0f 0xce. */
12009FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12010{
12011 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12012 IEMOP_HLP_MIN_486();
12013 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12014}
12015
12016
12017/** Opcode 0x0f 0xcf. */
12018FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12019{
12020 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12021 IEMOP_HLP_MIN_486();
12022 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12023}
12024
12025
12026/* Opcode 0x0f 0xd0 - invalid */
12027
12028
12029/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12030FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12031{
12032 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12033 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12034}
12035
12036
12037/* Opcode 0xf3 0x0f 0xd0 - invalid */
12038
12039
12040/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12041FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12042{
12043 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12044 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12045}
12046
12047
12048
12049/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12050FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12051{
12052 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12053 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12054}
12055
12056/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12057FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12058{
12059 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12060 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12061}
12062
12063/* Opcode 0xf3 0x0f 0xd1 - invalid */
12064/* Opcode 0xf2 0x0f 0xd1 - invalid */
12065
12066/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12067FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12068{
12069 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12070 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12071}
12072
12073
12074/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12075FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12076{
12077 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12078 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12079}
12080
12081
12082/* Opcode 0xf3 0x0f 0xd2 - invalid */
12083/* Opcode 0xf2 0x0f 0xd2 - invalid */
12084
12085/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12086FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12087{
12088 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12089 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12090}
12091
12092
12093/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12094FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12095{
12096 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12097 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12098}
12099
12100
12101/* Opcode 0xf3 0x0f 0xd3 - invalid */
12102/* Opcode 0xf2 0x0f 0xd3 - invalid */
12103
12104
12105/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12106FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12107{
12108 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12109 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12110}
12111
12112
12113/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12114FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12115{
12116 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12117 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12118}
12119
12120
12121/* Opcode 0xf3 0x0f 0xd4 - invalid */
12122/* Opcode 0xf2 0x0f 0xd4 - invalid */
12123
12124/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12125FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12126{
12127 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12128 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12129}
12130
12131/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12132FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12133{
12134 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12135 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12136}
12137
12138
12139/* Opcode 0xf3 0x0f 0xd5 - invalid */
12140/* Opcode 0xf2 0x0f 0xd5 - invalid */
12141
12142/* Opcode 0x0f 0xd6 - invalid */
12143
12144/**
12145 * @opcode 0xd6
12146 * @oppfx 0x66
12147 * @opcpuid sse2
12148 * @opgroup og_sse2_pcksclr_datamove
12149 * @opxcpttype none
12150 * @optest op1=-1 op2=2 -> op1=2
12151 * @optest op1=0 op2=-42 -> op1=-42
12152 */
12153FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12154{
12155 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12157 if (IEM_IS_MODRM_REG_MODE(bRm))
12158 {
12159 /*
12160 * Register, register.
12161 */
12162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12163 IEM_MC_BEGIN(0, 2);
12164 IEM_MC_LOCAL(uint64_t, uSrc);
12165
12166 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12167 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12168
12169 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12170 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12171
12172 IEM_MC_ADVANCE_RIP();
12173 IEM_MC_END();
12174 }
12175 else
12176 {
12177 /*
12178 * Memory, register.
12179 */
12180 IEM_MC_BEGIN(0, 2);
12181 IEM_MC_LOCAL(uint64_t, uSrc);
12182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12183
12184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12188
12189 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12190 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12191
12192 IEM_MC_ADVANCE_RIP();
12193 IEM_MC_END();
12194 }
12195 return VINF_SUCCESS;
12196}
12197
12198
12199/**
12200 * @opcode 0xd6
12201 * @opcodesub 11 mr/reg
12202 * @oppfx f3
12203 * @opcpuid sse2
12204 * @opgroup og_sse2_simdint_datamove
12205 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12206 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12207 */
12208FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12209{
12210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12211 if (IEM_IS_MODRM_REG_MODE(bRm))
12212 {
12213 /*
12214 * Register, register.
12215 */
12216 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12218 IEM_MC_BEGIN(0, 1);
12219 IEM_MC_LOCAL(uint64_t, uSrc);
12220
12221 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12223 IEM_MC_FPU_TO_MMX_MODE();
12224
12225 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12226 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12227
12228 IEM_MC_ADVANCE_RIP();
12229 IEM_MC_END();
12230 return VINF_SUCCESS;
12231 }
12232
12233 /**
12234 * @opdone
12235 * @opmnemonic udf30fd6mem
12236 * @opcode 0xd6
12237 * @opcodesub !11 mr/reg
12238 * @oppfx f3
12239 * @opunused intel-modrm
12240 * @opcpuid sse
12241 * @optest ->
12242 */
12243 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12244}
12245
12246
12247/**
12248 * @opcode 0xd6
12249 * @opcodesub 11 mr/reg
12250 * @oppfx f2
12251 * @opcpuid sse2
12252 * @opgroup og_sse2_simdint_datamove
12253 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12254 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12255 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12256 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12257 * @optest op1=-42 op2=0xfedcba9876543210
12258 * -> op1=0xfedcba9876543210 ftw=0xff
12259 */
12260FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12261{
12262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12263 if (IEM_IS_MODRM_REG_MODE(bRm))
12264 {
12265 /*
12266 * Register, register.
12267 */
12268 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12270 IEM_MC_BEGIN(0, 1);
12271 IEM_MC_LOCAL(uint64_t, uSrc);
12272
12273 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12275 IEM_MC_FPU_TO_MMX_MODE();
12276
12277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12278 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12279
12280 IEM_MC_ADVANCE_RIP();
12281 IEM_MC_END();
12282 return VINF_SUCCESS;
12283 }
12284
12285 /**
12286 * @opdone
12287 * @opmnemonic udf20fd6mem
12288 * @opcode 0xd6
12289 * @opcodesub !11 mr/reg
12290 * @oppfx f2
12291 * @opunused intel-modrm
12292 * @opcpuid sse
12293 * @optest ->
12294 */
12295 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12296}
12297
12298
12299/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12300FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12301{
12302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12303 /* Docs says register only. */
12304 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12305 {
12306 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12307 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12309 IEM_MC_BEGIN(2, 0);
12310 IEM_MC_ARG(uint64_t *, puDst, 0);
12311 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12312 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12313 IEM_MC_PREPARE_FPU_USAGE();
12314 IEM_MC_FPU_TO_MMX_MODE();
12315
12316 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12317 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12318 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12319
12320 IEM_MC_ADVANCE_RIP();
12321 IEM_MC_END();
12322 return VINF_SUCCESS;
12323 }
12324 return IEMOP_RAISE_INVALID_OPCODE();
12325}
12326
12327
12328/** Opcode 0x66 0x0f 0xd7 - */
12329FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12330{
12331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12332 /* Docs says register only. */
12333 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12334 {
12335 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12336 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12338 IEM_MC_BEGIN(2, 0);
12339 IEM_MC_ARG(uint64_t *, puDst, 0);
12340 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12341 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12342 IEM_MC_PREPARE_SSE_USAGE();
12343 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12344 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12345 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12346 IEM_MC_ADVANCE_RIP();
12347 IEM_MC_END();
12348 return VINF_SUCCESS;
12349 }
12350 return IEMOP_RAISE_INVALID_OPCODE();
12351}
12352
12353
12354/* Opcode 0xf3 0x0f 0xd7 - invalid */
12355/* Opcode 0xf2 0x0f 0xd7 - invalid */
12356
12357
12358/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12359FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12360{
12361 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12362 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12363}
12364
12365
12366/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12367FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12368{
12369 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12370 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12371}
12372
12373
12374/* Opcode 0xf3 0x0f 0xd8 - invalid */
12375/* Opcode 0xf2 0x0f 0xd8 - invalid */
12376
12377/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12378FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12379{
12380 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12381 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12382}
12383
12384
12385/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12386FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12387{
12388 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12389 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12390}
12391
12392
12393/* Opcode 0xf3 0x0f 0xd9 - invalid */
12394/* Opcode 0xf2 0x0f 0xd9 - invalid */
12395
12396/** Opcode 0x0f 0xda - pminub Pq, Qq */
12397FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12398{
12399 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12400 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12401}
12402
12403
12404/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12405FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12406{
12407 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12408 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12409}
12410
12411/* Opcode 0xf3 0x0f 0xda - invalid */
12412/* Opcode 0xf2 0x0f 0xda - invalid */
12413
12414/** Opcode 0x0f 0xdb - pand Pq, Qq */
12415FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12416{
12417 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12418 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12419}
12420
12421
12422/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12423FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12424{
12425 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12426 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12427}
12428
12429
12430/* Opcode 0xf3 0x0f 0xdb - invalid */
12431/* Opcode 0xf2 0x0f 0xdb - invalid */
12432
12433/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12434FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12435{
12436 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12437 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12438}
12439
12440
12441/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12442FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12443{
12444 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12445 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12446}
12447
12448
12449/* Opcode 0xf3 0x0f 0xdc - invalid */
12450/* Opcode 0xf2 0x0f 0xdc - invalid */
12451
12452/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12453FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12454{
12455 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12456 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12457}
12458
12459
12460/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12461FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12462{
12463 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12464 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12465}
12466
12467
12468/* Opcode 0xf3 0x0f 0xdd - invalid */
12469/* Opcode 0xf2 0x0f 0xdd - invalid */
12470
12471/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12472FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12473{
12474 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12475 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12476}
12477
12478
12479/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12480FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12481{
12482 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12483 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12484}
12485
12486/* Opcode 0xf3 0x0f 0xde - invalid */
12487/* Opcode 0xf2 0x0f 0xde - invalid */
12488
12489
12490/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12491FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12492{
12493 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12494 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12495}
12496
12497
12498/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12499FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12500{
12501 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12502 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12503}
12504
12505
12506/* Opcode 0xf3 0x0f 0xdf - invalid */
12507/* Opcode 0xf2 0x0f 0xdf - invalid */
12508
12509/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12510FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12511{
12512 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12513 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12514}
12515
12516
12517/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12518FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12519{
12520 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12521 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12522}
12523
12524
12525/* Opcode 0xf3 0x0f 0xe0 - invalid */
12526/* Opcode 0xf2 0x0f 0xe0 - invalid */
12527
12528/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12529FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12530{
12531 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12532 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12533}
12534
12535
12536/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12537FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12538{
12539 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12540 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12541}
12542
12543
12544/* Opcode 0xf3 0x0f 0xe1 - invalid */
12545/* Opcode 0xf2 0x0f 0xe1 - invalid */
12546
12547/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12548FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12549{
12550 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12551 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12552}
12553
12554
12555/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12556FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12557{
12558 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12559 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12560}
12561
12562
12563/* Opcode 0xf3 0x0f 0xe2 - invalid */
12564/* Opcode 0xf2 0x0f 0xe2 - invalid */
12565
12566/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12567FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12568{
12569 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12570 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12571}
12572
12573
12574/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12575FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12576{
12577 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12578 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12579}
12580
12581
12582/* Opcode 0xf3 0x0f 0xe3 - invalid */
12583/* Opcode 0xf2 0x0f 0xe3 - invalid */
12584
12585/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12586FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12587{
12588 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12589 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
12590}
12591
12592
12593/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
12594FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
12595{
12596 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12597 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
12598}
12599
12600
12601/* Opcode 0xf3 0x0f 0xe4 - invalid */
12602/* Opcode 0xf2 0x0f 0xe4 - invalid */
12603
12604/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
12605FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
12606{
12607 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12608 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
12609}
12610
12611
12612/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
12613FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
12614{
12615 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12616 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
12617}
12618
12619
12620/* Opcode 0xf3 0x0f 0xe5 - invalid */
12621/* Opcode 0xf2 0x0f 0xe5 - invalid */
12622/* Opcode 0x0f 0xe6 - invalid */
12623
12624
12625/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
12626FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
12627{
12628 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12629 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
12630}
12631
12632
12633/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
12634FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
12635{
12636 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12637 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
12638}
12639
12640
12641/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
12642FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
12643{
12644 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12645 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
12646}
12647
12648
12649/**
12650 * @opcode 0xe7
12651 * @opcodesub !11 mr/reg
12652 * @oppfx none
12653 * @opcpuid sse
12654 * @opgroup og_sse1_cachect
12655 * @opxcpttype none
12656 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
12657 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12658 */
12659FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
12660{
12661 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12663 if (IEM_IS_MODRM_MEM_MODE(bRm))
12664 {
12665 /* Register, memory. */
12666 IEM_MC_BEGIN(0, 2);
12667 IEM_MC_LOCAL(uint64_t, uSrc);
12668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12669
12670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12672 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12673 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12674 IEM_MC_FPU_TO_MMX_MODE();
12675
12676 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
12677 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12678
12679 IEM_MC_ADVANCE_RIP();
12680 IEM_MC_END();
12681 return VINF_SUCCESS;
12682 }
12683 /**
12684 * @opdone
12685 * @opmnemonic ud0fe7reg
12686 * @opcode 0xe7
12687 * @opcodesub 11 mr/reg
12688 * @oppfx none
12689 * @opunused immediate
12690 * @opcpuid sse
12691 * @optest ->
12692 */
12693 return IEMOP_RAISE_INVALID_OPCODE();
12694}
12695
12696/**
12697 * @opcode 0xe7
12698 * @opcodesub !11 mr/reg
12699 * @oppfx 0x66
12700 * @opcpuid sse2
12701 * @opgroup og_sse2_cachect
12702 * @opxcpttype 1
12703 * @optest op1=-1 op2=2 -> op1=2
12704 * @optest op1=0 op2=-42 -> op1=-42
12705 */
12706FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
12707{
12708 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12710 if (IEM_IS_MODRM_MEM_MODE(bRm))
12711 {
12712 /* Register, memory. */
12713 IEM_MC_BEGIN(0, 2);
12714 IEM_MC_LOCAL(RTUINT128U, uSrc);
12715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12716
12717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12719 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12720 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12721
12722 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12723 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12724
12725 IEM_MC_ADVANCE_RIP();
12726 IEM_MC_END();
12727 return VINF_SUCCESS;
12728 }
12729
12730 /**
12731 * @opdone
12732 * @opmnemonic ud660fe7reg
12733 * @opcode 0xe7
12734 * @opcodesub 11 mr/reg
12735 * @oppfx 0x66
12736 * @opunused immediate
12737 * @opcpuid sse
12738 * @optest ->
12739 */
12740 return IEMOP_RAISE_INVALID_OPCODE();
12741}
12742
12743/* Opcode 0xf3 0x0f 0xe7 - invalid */
12744/* Opcode 0xf2 0x0f 0xe7 - invalid */
12745
12746
12747/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
12748FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
12749{
12750 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12751 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
12752}
12753
12754
12755/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
12756FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
12757{
12758 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12759 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
12760}
12761
12762
12763/* Opcode 0xf3 0x0f 0xe8 - invalid */
12764/* Opcode 0xf2 0x0f 0xe8 - invalid */
12765
12766/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
12767FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
12768{
12769 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12770 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
12771}
12772
12773
12774/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
12775FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
12776{
12777 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12778 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
12779}
12780
12781
12782/* Opcode 0xf3 0x0f 0xe9 - invalid */
12783/* Opcode 0xf2 0x0f 0xe9 - invalid */
12784
12785
12786/** Opcode 0x0f 0xea - pminsw Pq, Qq */
12787FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
12788{
12789 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12790 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
12791}
12792
12793
12794/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
12795FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
12796{
12797 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12798 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
12799}
12800
12801
12802/* Opcode 0xf3 0x0f 0xea - invalid */
12803/* Opcode 0xf2 0x0f 0xea - invalid */
12804
12805
12806/** Opcode 0x0f 0xeb - por Pq, Qq */
12807FNIEMOP_DEF(iemOp_por_Pq_Qq)
12808{
12809 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12810 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
12811}
12812
12813
12814/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
12815FNIEMOP_DEF(iemOp_por_Vx_Wx)
12816{
12817 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12818 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
12819}
12820
12821
12822/* Opcode 0xf3 0x0f 0xeb - invalid */
12823/* Opcode 0xf2 0x0f 0xeb - invalid */
12824
12825/** Opcode 0x0f 0xec - paddsb Pq, Qq */
12826FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
12827{
12828 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12829 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
12830}
12831
12832
12833/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
12834FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
12835{
12836 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12837 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
12838}
12839
12840
12841/* Opcode 0xf3 0x0f 0xec - invalid */
12842/* Opcode 0xf2 0x0f 0xec - invalid */
12843
12844/** Opcode 0x0f 0xed - paddsw Pq, Qq */
12845FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
12846{
12847 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12848 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
12849}
12850
12851
12852/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
12853FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
12854{
12855 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12856 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
12857}
12858
12859
12860/* Opcode 0xf3 0x0f 0xed - invalid */
12861/* Opcode 0xf2 0x0f 0xed - invalid */
12862
12863
12864/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
12865FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
12866{
12867 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12868 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
12869}
12870
12871
12872/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
12873FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
12874{
12875 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12876 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
12877}
12878
12879
12880/* Opcode 0xf3 0x0f 0xee - invalid */
12881/* Opcode 0xf2 0x0f 0xee - invalid */
12882
12883
12884/** Opcode 0x0f 0xef - pxor Pq, Qq */
12885FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
12886{
12887 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12888 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
12889}
12890
12891
12892/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
12893FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
12894{
12895 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12896 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
12897}
12898
12899
12900/* Opcode 0xf3 0x0f 0xef - invalid */
12901/* Opcode 0xf2 0x0f 0xef - invalid */
12902
12903/* Opcode 0x0f 0xf0 - invalid */
12904/* Opcode 0x66 0x0f 0xf0 - invalid */
12905
12906
12907/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
12908FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
12909{
12910 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12912 if (IEM_IS_MODRM_REG_MODE(bRm))
12913 {
12914 /*
12915 * Register, register - (not implemented, assuming it raises \#UD).
12916 */
12917 return IEMOP_RAISE_INVALID_OPCODE();
12918 }
12919 else
12920 {
12921 /*
12922 * Register, memory.
12923 */
12924 IEM_MC_BEGIN(0, 2);
12925 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
12926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12927
12928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12930 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
12931 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12932 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12933 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
12934
12935 IEM_MC_ADVANCE_RIP();
12936 IEM_MC_END();
12937 }
12938 return VINF_SUCCESS;
12939}
12940
12941
12942/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
12943FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
12944{
12945 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12946 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
12947}
12948
12949
12950/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
12951FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
12952{
12953 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12954 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
12955}
12956
12957
12958/* Opcode 0xf2 0x0f 0xf1 - invalid */
12959
12960/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
12961FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
12962{
12963 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12964 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
12965}
12966
12967
12968/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
12969FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
12970{
12971 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12972 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
12973}
12974
12975
12976/* Opcode 0xf2 0x0f 0xf2 - invalid */
12977
12978/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
12979FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
12980{
12981 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12982 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
12983}
12984
12985
12986/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
12987FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
12988{
12989 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12990 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
12991}
12992
12993/* Opcode 0xf2 0x0f 0xf3 - invalid */
12994
12995/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
12996FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
12997{
12998 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12999 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13000}
13001
13002
13003/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13004FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13005{
13006 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13007 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13008}
13009
13010
13011/* Opcode 0xf2 0x0f 0xf4 - invalid */
13012
13013/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13014FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13015{
13016 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13017 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13018}
13019
13020
13021/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13022FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13023{
13024 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13025 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13026}
13027
13028/* Opcode 0xf2 0x0f 0xf5 - invalid */
13029
13030/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13031FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13032{
13033 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13034 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13035}
13036
13037
13038/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13039FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13040{
13041 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13042 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13043}
13044
13045
13046/* Opcode 0xf2 0x0f 0xf6 - invalid */
13047
13048/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13049FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13050/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13051FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13052/* Opcode 0xf2 0x0f 0xf7 - invalid */
13053
13054
13055/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13056FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13057{
13058 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13059 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13060}
13061
13062
13063/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13064FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13065{
13066 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13067 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13068}
13069
13070
13071/* Opcode 0xf2 0x0f 0xf8 - invalid */
13072
13073
13074/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13075FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13076{
13077 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13078 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13079}
13080
13081
13082/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13083FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13084{
13085 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13086 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13087}
13088
13089
13090/* Opcode 0xf2 0x0f 0xf9 - invalid */
13091
13092
13093/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13094FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13095{
13096 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13097 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13098}
13099
13100
13101/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13102FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13103{
13104 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13105 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13106}
13107
13108
13109/* Opcode 0xf2 0x0f 0xfa - invalid */
13110
13111
13112/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13113FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13114{
13115 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13116 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13117}
13118
13119
13120/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13121FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13122{
13123 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13124 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13125}
13126
13127
13128/* Opcode 0xf2 0x0f 0xfb - invalid */
13129
13130
13131/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13132FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13133{
13134 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13135 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13136}
13137
13138
13139/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13140FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13141{
13142 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13143 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13144}
13145
13146
13147/* Opcode 0xf2 0x0f 0xfc - invalid */
13148
13149
13150/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13151FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13152{
13153 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13154 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13155}
13156
13157
13158/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13159FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13160{
13161 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13162 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13163}
13164
13165
13166/* Opcode 0xf2 0x0f 0xfd - invalid */
13167
13168
13169/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13170FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13171{
13172 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13173 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13174}
13175
13176
13177/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13178FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13179{
13180 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13181 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13182}
13183
13184
13185/* Opcode 0xf2 0x0f 0xfe - invalid */
13186
13187
13188/** Opcode **** 0x0f 0xff - UD0 */
13189FNIEMOP_DEF(iemOp_ud0)
13190{
13191 IEMOP_MNEMONIC(ud0, "ud0");
13192 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13193 {
13194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13195#ifndef TST_IEM_CHECK_MC
13196 if (IEM_IS_MODRM_MEM_MODE(bRm))
13197 {
13198 RTGCPTR GCPtrEff;
13199 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13200 if (rcStrict != VINF_SUCCESS)
13201 return rcStrict;
13202 }
13203#endif
13204 IEMOP_HLP_DONE_DECODING();
13205 }
13206 return IEMOP_RAISE_INVALID_OPCODE();
13207}
13208
13209
13210
13211/**
13212 * Two byte opcode map, first byte 0x0f.
13213 *
13214 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13215 * check if it needs updating as well when making changes.
13216 */
13217IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13218{
13219 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13220 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13221 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13222 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13223 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13224 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13225 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13226 /* 0x06 */ IEMOP_X4(iemOp_clts),
13227 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13228 /* 0x08 */ IEMOP_X4(iemOp_invd),
13229 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13230 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13231 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13232 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13233 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13234 /* 0x0e */ IEMOP_X4(iemOp_femms),
13235 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13236
13237 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13238 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13239 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13240 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13241 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13242 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13243 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13244 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13245 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13246 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13247 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13248 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13249 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13250 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13251 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13252 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13253
13254 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13255 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13256 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13257 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13258 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13259 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13260 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13261 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13262 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13263 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13264 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13265 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13266 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13267 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13268 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13269 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13270
13271 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13272 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13273 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13274 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13275 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13276 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13277 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13278 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13279 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13280 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13281 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13282 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13283 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13284 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13285 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13286 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13287
13288 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13289 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13290 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13291 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13292 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13293 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13294 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13295 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13296 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13297 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13298 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13299 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13300 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13301 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13302 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13303 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13304
13305 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13306 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13307 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13308 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13309 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13310 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13311 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13312 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13313 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13314 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13315 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13316 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13317 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13318 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13319 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13320 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13321
13322 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13323 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13324 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13325 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13326 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13327 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13328 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13329 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13330 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13331 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13332 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13333 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13334 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13335 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13336 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13337 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13338
13339 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13340 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13341 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13342 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13343 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13344 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13345 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13346 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13347
13348 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13349 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13350 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13351 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13352 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13353 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13354 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13355 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13356
13357 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13358 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13359 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13360 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13361 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13362 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13363 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13364 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13365 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13366 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13367 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13368 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13369 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13370 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13371 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13372 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13373
13374 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13375 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13376 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13377 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13378 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13379 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13380 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13381 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13382 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13383 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13384 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13385 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13386 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13387 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13388 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13389 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13390
13391 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13392 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13393 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13394 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13395 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13396 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13397 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13398 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13399 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13400 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13401 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13402 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13403 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13404 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13405 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13406 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13407
13408 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13409 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13410 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13411 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13412 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13413 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13414 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13415 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13416 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13417 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13418 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13419 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13420 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13421 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13422 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13423 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13424
13425 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13426 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13427 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13428 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13429 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13430 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13431 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13432 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13433 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13434 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13435 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13436 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13437 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13438 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13439 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13440 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13441
13442 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13443 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13444 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13445 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13446 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13447 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13448 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13449 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13450 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13451 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13452 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13453 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13454 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13455 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13456 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13457 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13458
13459 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13460 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13461 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13462 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13463 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13464 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13465 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13466 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13467 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13468 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13469 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13470 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13471 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13472 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13473 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13474 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13475
13476 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13477 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13478 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13479 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13480 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13481 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13482 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13483 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13484 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13485 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13486 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13487 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13488 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13489 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13490 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13491 /* 0xff */ IEMOP_X4(iemOp_ud0),
13492};
13493AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13494
13495/** @} */
13496
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use