VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96811

Last change on this file since 96811 was 96807, checked in by vboxsync, 21 months ago

IEM: Fixed MMX/SSE shift immediate instruction decoding.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 461.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96807 2022-09-20 20:26:41Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
59 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
60 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
61 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
62 IEM_MC_FPU_TO_MMX_MODE();
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 }
66 else
67 {
68 /*
69 * Register, memory.
70 */
71 IEM_MC_BEGIN(2, 2);
72 IEM_MC_ARG(uint64_t *, pDst, 0);
73 IEM_MC_LOCAL(uint64_t, uSrc);
74 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
75 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
76
77 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
79 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
80 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
81
82 IEM_MC_PREPARE_FPU_USAGE();
83 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
84 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
85 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
86 IEM_MC_FPU_TO_MMX_MODE();
87
88 IEM_MC_ADVANCE_RIP();
89 IEM_MC_END();
90 }
91 return VINF_SUCCESS;
92}
93
94
95/**
96 * Common worker for MMX instructions on the form:
97 * pxxx mm1, mm2/mem64
98 *
99 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
100 * no FXSAVE state, just the operands.
101 */
102FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
103{
104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
105 if (IEM_IS_MODRM_REG_MODE(bRm))
106 {
107 /*
108 * Register, register.
109 */
110 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
111 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
113 IEM_MC_BEGIN(2, 0);
114 IEM_MC_ARG(uint64_t *, pDst, 0);
115 IEM_MC_ARG(uint64_t const *, pSrc, 1);
116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
117 IEM_MC_PREPARE_FPU_USAGE();
118 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
119 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
120 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
121 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
122 IEM_MC_FPU_TO_MMX_MODE();
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 }
126 else
127 {
128 /*
129 * Register, memory.
130 */
131 IEM_MC_BEGIN(2, 2);
132 IEM_MC_ARG(uint64_t *, pDst, 0);
133 IEM_MC_LOCAL(uint64_t, uSrc);
134 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
136
137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
139 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
140 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
141
142 IEM_MC_PREPARE_FPU_USAGE();
143 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
144 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
145 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
146 IEM_MC_FPU_TO_MMX_MODE();
147
148 IEM_MC_ADVANCE_RIP();
149 IEM_MC_END();
150 }
151 return VINF_SUCCESS;
152}
153
154
155/**
156 * Common worker for MMX instructions on the form:
157 * pxxx mm1, mm2/mem64
158 * for instructions introduced with SSE.
159 */
160FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
161{
162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
163 if (IEM_IS_MODRM_REG_MODE(bRm))
164 {
165 /*
166 * Register, register.
167 */
168 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
169 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
171 IEM_MC_BEGIN(2, 0);
172 IEM_MC_ARG(uint64_t *, pDst, 0);
173 IEM_MC_ARG(uint64_t const *, pSrc, 1);
174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
175 IEM_MC_PREPARE_FPU_USAGE();
176 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
177 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
178 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
179 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
180 IEM_MC_FPU_TO_MMX_MODE();
181 IEM_MC_ADVANCE_RIP();
182 IEM_MC_END();
183 }
184 else
185 {
186 /*
187 * Register, memory.
188 */
189 IEM_MC_BEGIN(2, 2);
190 IEM_MC_ARG(uint64_t *, pDst, 0);
191 IEM_MC_LOCAL(uint64_t, uSrc);
192 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
194
195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
198 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
199
200 IEM_MC_PREPARE_FPU_USAGE();
201 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
202 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
203 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
204 IEM_MC_FPU_TO_MMX_MODE();
205
206 IEM_MC_ADVANCE_RIP();
207 IEM_MC_END();
208 }
209 return VINF_SUCCESS;
210}
211
212
213/**
214 * Common worker for MMX instructions on the form:
215 * pxxx mm1, mm2/mem64
216 * for instructions introduced with SSE.
217 *
218 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
219 * no FXSAVE state, just the operands.
220 */
221FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
222{
223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
224 if (IEM_IS_MODRM_REG_MODE(bRm))
225 {
226 /*
227 * Register, register.
228 */
229 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
230 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
232 IEM_MC_BEGIN(2, 0);
233 IEM_MC_ARG(uint64_t *, pDst, 0);
234 IEM_MC_ARG(uint64_t const *, pSrc, 1);
235 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
236 IEM_MC_PREPARE_FPU_USAGE();
237 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
238 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
239 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
240 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
241 IEM_MC_FPU_TO_MMX_MODE();
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 else
246 {
247 /*
248 * Register, memory.
249 */
250 IEM_MC_BEGIN(2, 2);
251 IEM_MC_ARG(uint64_t *, pDst, 0);
252 IEM_MC_LOCAL(uint64_t, uSrc);
253 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
255
256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
258 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
259 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
260
261 IEM_MC_PREPARE_FPU_USAGE();
262 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
263 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
264 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
265 IEM_MC_FPU_TO_MMX_MODE();
266
267 IEM_MC_ADVANCE_RIP();
268 IEM_MC_END();
269 }
270 return VINF_SUCCESS;
271}
272
273
274/**
275 * Common worker for MMX instructions on the form:
276 * pxxx mm1, mm2/mem64
277 * that was introduced with SSE2.
278 */
279FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
280{
281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
282 if (IEM_IS_MODRM_REG_MODE(bRm))
283 {
284 /*
285 * Register, register.
286 */
287 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
288 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_BEGIN(2, 0);
291 IEM_MC_ARG(uint64_t *, pDst, 0);
292 IEM_MC_ARG(uint64_t const *, pSrc, 1);
293 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
294 IEM_MC_PREPARE_FPU_USAGE();
295 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
296 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
297 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
298 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
299 IEM_MC_FPU_TO_MMX_MODE();
300 IEM_MC_ADVANCE_RIP();
301 IEM_MC_END();
302 }
303 else
304 {
305 /*
306 * Register, memory.
307 */
308 IEM_MC_BEGIN(2, 2);
309 IEM_MC_ARG(uint64_t *, pDst, 0);
310 IEM_MC_LOCAL(uint64_t, uSrc);
311 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
313
314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
316 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
317 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
318
319 IEM_MC_PREPARE_FPU_USAGE();
320 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
321 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
322 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
323 IEM_MC_FPU_TO_MMX_MODE();
324
325 IEM_MC_ADVANCE_RIP();
326 IEM_MC_END();
327 }
328 return VINF_SUCCESS;
329}
330
331
332/**
333 * Common worker for SSE2 instructions on the forms:
334 * pxxx xmm1, xmm2/mem128
335 *
336 * Proper alignment of the 128-bit operand is enforced.
337 * Exceptions type 4. SSE2 cpuid checks.
338 *
339 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
340 */
341FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
342{
343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
344 if (IEM_IS_MODRM_REG_MODE(bRm))
345 {
346 /*
347 * Register, register.
348 */
349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
350 IEM_MC_BEGIN(2, 0);
351 IEM_MC_ARG(PRTUINT128U, pDst, 0);
352 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
354 IEM_MC_PREPARE_SSE_USAGE();
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
356 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
357 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
358 IEM_MC_ADVANCE_RIP();
359 IEM_MC_END();
360 }
361 else
362 {
363 /*
364 * Register, memory.
365 */
366 IEM_MC_BEGIN(2, 2);
367 IEM_MC_ARG(PRTUINT128U, pDst, 0);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
371
372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
376
377 IEM_MC_PREPARE_SSE_USAGE();
378 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
379 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
380
381 IEM_MC_ADVANCE_RIP();
382 IEM_MC_END();
383 }
384 return VINF_SUCCESS;
385}
386
387
388/**
389 * Common worker for SSE2 instructions on the forms:
390 * pxxx xmm1, xmm2/mem128
391 *
392 * Proper alignment of the 128-bit operand is enforced.
393 * Exceptions type 4. SSE2 cpuid checks.
394 *
395 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
396 * no FXSAVE state, just the operands.
397 *
398 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
399 */
400FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
401{
402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
403 if (IEM_IS_MODRM_REG_MODE(bRm))
404 {
405 /*
406 * Register, register.
407 */
408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
409 IEM_MC_BEGIN(2, 0);
410 IEM_MC_ARG(PRTUINT128U, pDst, 0);
411 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
412 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
413 IEM_MC_PREPARE_SSE_USAGE();
414 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
415 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
416 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
417 IEM_MC_ADVANCE_RIP();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * Register, memory.
424 */
425 IEM_MC_BEGIN(2, 2);
426 IEM_MC_ARG(PRTUINT128U, pDst, 0);
427 IEM_MC_LOCAL(RTUINT128U, uSrc);
428 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_SSE_USAGE();
437 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
438 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
439
440 IEM_MC_ADVANCE_RIP();
441 IEM_MC_END();
442 }
443 return VINF_SUCCESS;
444}
445
446
447/**
448 * Common worker for MMX instructions on the forms:
449 * pxxxx mm1, mm2/mem32
450 *
451 * The 2nd operand is the first half of a register, which in the memory case
452 * means a 32-bit memory access.
453 */
454FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
455{
456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
457 if (IEM_IS_MODRM_REG_MODE(bRm))
458 {
459 /*
460 * Register, register.
461 */
462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
463 IEM_MC_BEGIN(2, 0);
464 IEM_MC_ARG(uint64_t *, puDst, 0);
465 IEM_MC_ARG(uint64_t const *, puSrc, 1);
466 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
467 IEM_MC_PREPARE_FPU_USAGE();
468 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
469 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
470 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
471 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
472 IEM_MC_FPU_TO_MMX_MODE();
473 IEM_MC_ADVANCE_RIP();
474 IEM_MC_END();
475 }
476 else
477 {
478 /*
479 * Register, memory.
480 */
481 IEM_MC_BEGIN(2, 2);
482 IEM_MC_ARG(uint64_t *, puDst, 0);
483 IEM_MC_LOCAL(uint64_t, uSrc);
484 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
486
487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
490 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
491
492 IEM_MC_PREPARE_FPU_USAGE();
493 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
494 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
495 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
496 IEM_MC_FPU_TO_MMX_MODE();
497
498 IEM_MC_ADVANCE_RIP();
499 IEM_MC_END();
500 }
501 return VINF_SUCCESS;
502}
503
504
505/**
506 * Common worker for SSE instructions on the forms:
507 * pxxxx xmm1, xmm2/mem128
508 *
509 * The 2nd operand is the first half of a register, which in the memory case
510 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
511 *
512 * Exceptions type 4.
513 */
514FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
515{
516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
517 if (IEM_IS_MODRM_REG_MODE(bRm))
518 {
519 /*
520 * Register, register.
521 */
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_BEGIN(2, 0);
524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
525 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
529 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
530 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 }
534 else
535 {
536 /*
537 * Register, memory.
538 */
539 IEM_MC_BEGIN(2, 2);
540 IEM_MC_ARG(PRTUINT128U, puDst, 0);
541 IEM_MC_LOCAL(RTUINT128U, uSrc);
542 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
548 /** @todo Most CPUs probably only read the low qword. We read everything to
549 * make sure we apply segmentation and alignment checks correctly.
550 * When we have time, it would be interesting to explore what real
551 * CPUs actually does and whether it will do a TLB load for the high
552 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
553 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
554
555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
556 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
557 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
558
559 IEM_MC_ADVANCE_RIP();
560 IEM_MC_END();
561 }
562 return VINF_SUCCESS;
563}
564
565
566/**
567 * Common worker for SSE2 instructions on the forms:
568 * pxxxx xmm1, xmm2/mem128
569 *
570 * The 2nd operand is the first half of a register, which in the memory case
571 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
572 *
573 * Exceptions type 4.
574 */
575FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
576{
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
578 if (IEM_IS_MODRM_REG_MODE(bRm))
579 {
580 /*
581 * Register, register.
582 */
583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
584 IEM_MC_BEGIN(2, 0);
585 IEM_MC_ARG(PRTUINT128U, puDst, 0);
586 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
589 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
590 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
591 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
592 IEM_MC_ADVANCE_RIP();
593 IEM_MC_END();
594 }
595 else
596 {
597 /*
598 * Register, memory.
599 */
600 IEM_MC_BEGIN(2, 2);
601 IEM_MC_ARG(PRTUINT128U, puDst, 0);
602 IEM_MC_LOCAL(RTUINT128U, uSrc);
603 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
605
606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
608 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
609 /** @todo Most CPUs probably only read the low qword. We read everything to
610 * make sure we apply segmentation and alignment checks correctly.
611 * When we have time, it would be interesting to explore what real
612 * CPUs actually does and whether it will do a TLB load for the high
613 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
614 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
615
616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
617 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
618 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
619
620 IEM_MC_ADVANCE_RIP();
621 IEM_MC_END();
622 }
623 return VINF_SUCCESS;
624}
625
626
627/**
628 * Common worker for MMX instructions on the form:
629 * pxxxx mm1, mm2/mem64
630 *
631 * The 2nd operand is the second half of a register, which in the memory case
632 * means a 64-bit memory access for MMX.
633 */
634FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
635{
636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
637 if (IEM_IS_MODRM_REG_MODE(bRm))
638 {
639 /*
640 * Register, register.
641 */
642 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
643 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 IEM_MC_BEGIN(2, 0);
646 IEM_MC_ARG(uint64_t *, puDst, 0);
647 IEM_MC_ARG(uint64_t const *, puSrc, 1);
648 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
649 IEM_MC_PREPARE_FPU_USAGE();
650 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
651 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
652 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
653 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
654 IEM_MC_FPU_TO_MMX_MODE();
655 IEM_MC_ADVANCE_RIP();
656 IEM_MC_END();
657 }
658 else
659 {
660 /*
661 * Register, memory.
662 */
663 IEM_MC_BEGIN(2, 2);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_LOCAL(uint64_t, uSrc);
666 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
668
669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
671 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
672 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
673
674 IEM_MC_PREPARE_FPU_USAGE();
675 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
676 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
677 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
678 IEM_MC_FPU_TO_MMX_MODE();
679
680 IEM_MC_ADVANCE_RIP();
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/**
688 * Common worker for SSE instructions on the form:
689 * pxxxx xmm1, xmm2/mem128
690 *
691 * The 2nd operand is the second half of a register, which for SSE a 128-bit
692 * aligned access where it may read the full 128 bits or only the upper 64 bits.
693 *
694 * Exceptions type 4.
695 */
696FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
697{
698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
699 if (IEM_IS_MODRM_REG_MODE(bRm))
700 {
701 /*
702 * Register, register.
703 */
704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
705 IEM_MC_BEGIN(2, 0);
706 IEM_MC_ARG(PRTUINT128U, puDst, 0);
707 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
709 IEM_MC_PREPARE_SSE_USAGE();
710 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
711 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
712 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
713 IEM_MC_ADVANCE_RIP();
714 IEM_MC_END();
715 }
716 else
717 {
718 /*
719 * Register, memory.
720 */
721 IEM_MC_BEGIN(2, 2);
722 IEM_MC_ARG(PRTUINT128U, puDst, 0);
723 IEM_MC_LOCAL(RTUINT128U, uSrc);
724 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
726
727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 /** @todo Most CPUs probably only read the high qword. We read everything to
731 * make sure we apply segmentation and alignment checks correctly.
732 * When we have time, it would be interesting to explore what real
733 * CPUs actually does and whether it will do a TLB load for the lower
734 * part or skip any associated \#PF. */
735 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
736
737 IEM_MC_PREPARE_SSE_USAGE();
738 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
740
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 }
744 return VINF_SUCCESS;
745}
746
747
748/**
749 * Common worker for SSE instructions on the forms:
750 * pxxs xmm1, xmm2/mem128
751 *
752 * Proper alignment of the 128-bit operand is enforced.
753 * Exceptions type 2. SSE cpuid checks.
754 *
755 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
756 */
757FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
758{
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
760 if (IEM_IS_MODRM_REG_MODE(bRm))
761 {
762 /*
763 * Register, register.
764 */
765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
766 IEM_MC_BEGIN(3, 1);
767 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
768 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
769 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
770 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
772 IEM_MC_PREPARE_SSE_USAGE();
773 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
774 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
775 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
776 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
777 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
778
779 IEM_MC_ADVANCE_RIP();
780 IEM_MC_END();
781 }
782 else
783 {
784 /*
785 * Register, memory.
786 */
787 IEM_MC_BEGIN(3, 2);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_LOCAL(X86XMMREG, uSrc2);
790 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
791 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
792 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
794
795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
797 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
798 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
799
800 IEM_MC_PREPARE_SSE_USAGE();
801 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
802 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
803 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
805
806 IEM_MC_ADVANCE_RIP();
807 IEM_MC_END();
808 }
809 return VINF_SUCCESS;
810}
811
812
813/**
814 * Common worker for SSE instructions on the forms:
815 * pxxs xmm1, xmm2/mem32
816 *
817 * Proper alignment of the 128-bit operand is enforced.
818 * Exceptions type 2. SSE cpuid checks.
819 *
820 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
821 */
822FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
823{
824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
825 if (IEM_IS_MODRM_REG_MODE(bRm))
826 {
827 /*
828 * Register, register.
829 */
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_BEGIN(3, 1);
832 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
833 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
834 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
835 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
837 IEM_MC_PREPARE_SSE_USAGE();
838 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
839 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
840 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
841 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
842 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
843
844 IEM_MC_ADVANCE_RIP();
845 IEM_MC_END();
846 }
847 else
848 {
849 /*
850 * Register, memory.
851 */
852 IEM_MC_BEGIN(3, 2);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
855 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
856 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
857 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
859
860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
862 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
863 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
864
865 IEM_MC_PREPARE_SSE_USAGE();
866 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
867 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
868 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
869 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
870
871 IEM_MC_ADVANCE_RIP();
872 IEM_MC_END();
873 }
874 return VINF_SUCCESS;
875}
876
877
878/**
879 * Common worker for SSE2 instructions on the forms:
880 * pxxd xmm1, xmm2/mem128
881 *
882 * Proper alignment of the 128-bit operand is enforced.
883 * Exceptions type 2. SSE cpuid checks.
884 *
885 * @sa iemOpCommonSseFp_FullFull_To_Full
886 */
887FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
888{
889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
890 if (IEM_IS_MODRM_REG_MODE(bRm))
891 {
892 /*
893 * Register, register.
894 */
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
896 IEM_MC_BEGIN(3, 1);
897 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
898 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
899 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
900 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
901 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
902 IEM_MC_PREPARE_SSE_USAGE();
903 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
904 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
905 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
906 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
907 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
908
909 IEM_MC_ADVANCE_RIP();
910 IEM_MC_END();
911 }
912 else
913 {
914 /*
915 * Register, memory.
916 */
917 IEM_MC_BEGIN(3, 2);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_LOCAL(X86XMMREG, uSrc2);
920 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
921 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
922 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
924
925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
927 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
928 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
929
930 IEM_MC_PREPARE_SSE_USAGE();
931 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
933 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
934 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
935
936 IEM_MC_ADVANCE_RIP();
937 IEM_MC_END();
938 }
939 return VINF_SUCCESS;
940}
941
942
943/**
944 * Common worker for SSE2 instructions on the forms:
945 * pxxs xmm1, xmm2/mem64
946 *
947 * Proper alignment of the 128-bit operand is enforced.
948 * Exceptions type 2. SSE2 cpuid checks.
949 *
950 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
951 */
952FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
953{
954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
955 if (IEM_IS_MODRM_REG_MODE(bRm))
956 {
957 /*
958 * Register, register.
959 */
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_BEGIN(3, 1);
962 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
963 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
964 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
965 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
967 IEM_MC_PREPARE_SSE_USAGE();
968 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
970 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
971 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
972 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
973
974 IEM_MC_ADVANCE_RIP();
975 IEM_MC_END();
976 }
977 else
978 {
979 /*
980 * Register, memory.
981 */
982 IEM_MC_BEGIN(3, 2);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
985 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
986 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
987 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
989
990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
992 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
993 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
994
995 IEM_MC_PREPARE_SSE_USAGE();
996 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
997 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
998 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
999 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1000
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 }
1004 return VINF_SUCCESS;
1005}
1006
1007
1008/**
1009 * Common worker for SSE2 instructions on the form:
1010 * pxxxx xmm1, xmm2/mem128
1011 *
1012 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1013 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1014 *
1015 * Exceptions type 4.
1016 */
1017FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1018{
1019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1020 if (IEM_IS_MODRM_REG_MODE(bRm))
1021 {
1022 /*
1023 * Register, register.
1024 */
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1026 IEM_MC_BEGIN(2, 0);
1027 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1028 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1030 IEM_MC_PREPARE_SSE_USAGE();
1031 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1032 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1033 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1034 IEM_MC_ADVANCE_RIP();
1035 IEM_MC_END();
1036 }
1037 else
1038 {
1039 /*
1040 * Register, memory.
1041 */
1042 IEM_MC_BEGIN(2, 2);
1043 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1044 IEM_MC_LOCAL(RTUINT128U, uSrc);
1045 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1047
1048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 /** @todo Most CPUs probably only read the high qword. We read everything to
1052 * make sure we apply segmentation and alignment checks correctly.
1053 * When we have time, it would be interesting to explore what real
1054 * CPUs actually does and whether it will do a TLB load for the lower
1055 * part or skip any associated \#PF. */
1056 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1057
1058 IEM_MC_PREPARE_SSE_USAGE();
1059 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1061
1062 IEM_MC_ADVANCE_RIP();
1063 IEM_MC_END();
1064 }
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/**
1070 * Common worker for SSE3 instructions on the forms:
1071 * hxxx xmm1, xmm2/mem128
1072 *
1073 * Proper alignment of the 128-bit operand is enforced.
1074 * Exceptions type 2. SSE3 cpuid checks.
1075 *
1076 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1077 */
1078FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1079{
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if (IEM_IS_MODRM_REG_MODE(bRm))
1082 {
1083 /*
1084 * Register, register.
1085 */
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1087 IEM_MC_BEGIN(3, 1);
1088 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1089 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1090 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1091 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1092 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1093 IEM_MC_PREPARE_SSE_USAGE();
1094 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1095 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1096 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1097 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1098 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1099
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 else
1104 {
1105 /*
1106 * Register, memory.
1107 */
1108 IEM_MC_BEGIN(3, 2);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1111 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1113 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1115
1116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1118 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1119 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1120
1121 IEM_MC_PREPARE_SSE_USAGE();
1122 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1123 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1124 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1125 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1126
1127 IEM_MC_ADVANCE_RIP();
1128 IEM_MC_END();
1129 }
1130 return VINF_SUCCESS;
1131}
1132
1133
1134/** Opcode 0x0f 0x00 /0. */
1135FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1136{
1137 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1138 IEMOP_HLP_MIN_286();
1139 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1140
1141 if (IEM_IS_MODRM_REG_MODE(bRm))
1142 {
1143 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1145 }
1146
1147 /* Ignore operand size here, memory refs are always 16-bit. */
1148 IEM_MC_BEGIN(2, 0);
1149 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1150 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1152 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1153 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1154 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1155 IEM_MC_END();
1156 return VINF_SUCCESS;
1157}
1158
1159
1160/** Opcode 0x0f 0x00 /1. */
1161FNIEMOPRM_DEF(iemOp_Grp6_str)
1162{
1163 IEMOP_MNEMONIC(str, "str Rv/Mw");
1164 IEMOP_HLP_MIN_286();
1165 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1166
1167
1168 if (IEM_IS_MODRM_REG_MODE(bRm))
1169 {
1170 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1171 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1172 }
1173
1174 /* Ignore operand size here, memory refs are always 16-bit. */
1175 IEM_MC_BEGIN(2, 0);
1176 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1177 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1179 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1181 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1182 IEM_MC_END();
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x00 /2. */
1188FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1189{
1190 IEMOP_MNEMONIC(lldt, "lldt Ew");
1191 IEMOP_HLP_MIN_286();
1192 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1193
1194 if (IEM_IS_MODRM_REG_MODE(bRm))
1195 {
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_BEGIN(1, 0);
1198 IEM_MC_ARG(uint16_t, u16Sel, 0);
1199 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1200 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1201 IEM_MC_END();
1202 }
1203 else
1204 {
1205 IEM_MC_BEGIN(1, 1);
1206 IEM_MC_ARG(uint16_t, u16Sel, 0);
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1209 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1210 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1211 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1212 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1213 IEM_MC_END();
1214 }
1215 return VINF_SUCCESS;
1216}
1217
1218
1219/** Opcode 0x0f 0x00 /3. */
1220FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1221{
1222 IEMOP_MNEMONIC(ltr, "ltr Ew");
1223 IEMOP_HLP_MIN_286();
1224 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1225
1226 if (IEM_IS_MODRM_REG_MODE(bRm))
1227 {
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEM_MC_BEGIN(1, 0);
1230 IEM_MC_ARG(uint16_t, u16Sel, 0);
1231 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1232 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1233 IEM_MC_END();
1234 }
1235 else
1236 {
1237 IEM_MC_BEGIN(1, 1);
1238 IEM_MC_ARG(uint16_t, u16Sel, 0);
1239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1243 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1244 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1245 IEM_MC_END();
1246 }
1247 return VINF_SUCCESS;
1248}
1249
1250
1251/** Opcode 0x0f 0x00 /3. */
1252FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1253{
1254 IEMOP_HLP_MIN_286();
1255 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1256
1257 if (IEM_IS_MODRM_REG_MODE(bRm))
1258 {
1259 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1260 IEM_MC_BEGIN(2, 0);
1261 IEM_MC_ARG(uint16_t, u16Sel, 0);
1262 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1263 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1264 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1265 IEM_MC_END();
1266 }
1267 else
1268 {
1269 IEM_MC_BEGIN(2, 1);
1270 IEM_MC_ARG(uint16_t, u16Sel, 0);
1271 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1275 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1276 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1277 IEM_MC_END();
1278 }
1279 return VINF_SUCCESS;
1280}
1281
1282
1283/** Opcode 0x0f 0x00 /4. */
1284FNIEMOPRM_DEF(iemOp_Grp6_verr)
1285{
1286 IEMOP_MNEMONIC(verr, "verr Ew");
1287 IEMOP_HLP_MIN_286();
1288 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1289}
1290
1291
1292/** Opcode 0x0f 0x00 /5. */
1293FNIEMOPRM_DEF(iemOp_Grp6_verw)
1294{
1295 IEMOP_MNEMONIC(verw, "verw Ew");
1296 IEMOP_HLP_MIN_286();
1297 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1298}
1299
1300
1301/**
1302 * Group 6 jump table.
1303 */
1304IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1305{
1306 iemOp_Grp6_sldt,
1307 iemOp_Grp6_str,
1308 iemOp_Grp6_lldt,
1309 iemOp_Grp6_ltr,
1310 iemOp_Grp6_verr,
1311 iemOp_Grp6_verw,
1312 iemOp_InvalidWithRM,
1313 iemOp_InvalidWithRM
1314};
1315
1316/** Opcode 0x0f 0x00. */
1317FNIEMOP_DEF(iemOp_Grp6)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1321}
1322
1323
1324/** Opcode 0x0f 0x01 /0. */
1325FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1326{
1327 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1328 IEMOP_HLP_MIN_286();
1329 IEMOP_HLP_64BIT_OP_SIZE();
1330 IEM_MC_BEGIN(2, 1);
1331 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1332 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1335 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1336 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1337 IEM_MC_END();
1338 return VINF_SUCCESS;
1339}
1340
1341
1342/** Opcode 0x0f 0x01 /0. */
1343FNIEMOP_DEF(iemOp_Grp7_vmcall)
1344{
1345 IEMOP_MNEMONIC(vmcall, "vmcall");
1346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1347
1348 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1349 want all hypercalls regardless of instruction used, and if a
1350 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1351 (NEM/win makes ASSUMPTIONS about this behavior.) */
1352 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1353}
1354
1355
1356/** Opcode 0x0f 0x01 /0. */
1357#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1358FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1359{
1360 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1361 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1362 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1363 IEMOP_HLP_DONE_DECODING();
1364 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1365}
1366#else
1367FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1368{
1369 IEMOP_BITCH_ABOUT_STUB();
1370 return IEMOP_RAISE_INVALID_OPCODE();
1371}
1372#endif
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1377FNIEMOP_DEF(iemOp_Grp7_vmresume)
1378{
1379 IEMOP_MNEMONIC(vmresume, "vmresume");
1380 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1381 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1382 IEMOP_HLP_DONE_DECODING();
1383 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1384}
1385#else
1386FNIEMOP_DEF(iemOp_Grp7_vmresume)
1387{
1388 IEMOP_BITCH_ABOUT_STUB();
1389 return IEMOP_RAISE_INVALID_OPCODE();
1390}
1391#endif
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1396FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1397{
1398 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1399 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1400 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1401 IEMOP_HLP_DONE_DECODING();
1402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1403}
1404#else
1405FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1406{
1407 IEMOP_BITCH_ABOUT_STUB();
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410#endif
1411
1412
1413/** Opcode 0x0f 0x01 /1. */
1414FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1415{
1416 IEMOP_MNEMONIC(sidt, "sidt Ms");
1417 IEMOP_HLP_MIN_286();
1418 IEMOP_HLP_64BIT_OP_SIZE();
1419 IEM_MC_BEGIN(2, 1);
1420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1421 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1425 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1426 IEM_MC_END();
1427 return VINF_SUCCESS;
1428}
1429
1430
1431/** Opcode 0x0f 0x01 /1. */
1432FNIEMOP_DEF(iemOp_Grp7_monitor)
1433{
1434 IEMOP_MNEMONIC(monitor, "monitor");
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1436 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1437}
1438
1439
1440/** Opcode 0x0f 0x01 /1. */
1441FNIEMOP_DEF(iemOp_Grp7_mwait)
1442{
1443 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1446}
1447
1448
1449/** Opcode 0x0f 0x01 /2. */
1450FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1451{
1452 IEMOP_MNEMONIC(lgdt, "lgdt");
1453 IEMOP_HLP_64BIT_OP_SIZE();
1454 IEM_MC_BEGIN(3, 1);
1455 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1456 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1457 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1460 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1461 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1462 IEM_MC_END();
1463 return VINF_SUCCESS;
1464}
1465
1466
1467/** Opcode 0x0f 0x01 0xd0. */
1468FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1469{
1470 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1471 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1472 {
1473 /** @todo r=ramshankar: We should use
1474 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1475 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1478 }
1479 return IEMOP_RAISE_INVALID_OPCODE();
1480}
1481
1482
1483/** Opcode 0x0f 0x01 0xd1. */
1484FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1485{
1486 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1487 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1488 {
1489 /** @todo r=ramshankar: We should use
1490 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1491 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1492 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1494 }
1495 return IEMOP_RAISE_INVALID_OPCODE();
1496}
1497
1498
1499/** Opcode 0x0f 0x01 /3. */
1500FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1501{
1502 IEMOP_MNEMONIC(lidt, "lidt");
1503 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1504 ? IEMMODE_64BIT
1505 : pVCpu->iem.s.enmEffOpSize;
1506 IEM_MC_BEGIN(3, 1);
1507 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1508 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1509 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1513 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1514 IEM_MC_END();
1515 return VINF_SUCCESS;
1516}
1517
1518
1519/** Opcode 0x0f 0x01 0xd8. */
1520#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1521FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1522{
1523 IEMOP_MNEMONIC(vmrun, "vmrun");
1524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1525 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1526}
1527#else
1528FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1529#endif
1530
1531/** Opcode 0x0f 0x01 0xd9. */
1532FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1533{
1534 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1536
1537 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1538 want all hypercalls regardless of instruction used, and if a
1539 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1540 (NEM/win makes ASSUMPTIONS about this behavior.) */
1541 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1542}
1543
1544/** Opcode 0x0f 0x01 0xda. */
1545#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1546FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1547{
1548 IEMOP_MNEMONIC(vmload, "vmload");
1549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1550 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1551}
1552#else
1553FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1554#endif
1555
1556
1557/** Opcode 0x0f 0x01 0xdb. */
1558#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1559FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1560{
1561 IEMOP_MNEMONIC(vmsave, "vmsave");
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1563 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1564}
1565#else
1566FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1567#endif
1568
1569
1570/** Opcode 0x0f 0x01 0xdc. */
1571#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1572FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1573{
1574 IEMOP_MNEMONIC(stgi, "stgi");
1575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1577}
1578#else
1579FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1580#endif
1581
1582
1583/** Opcode 0x0f 0x01 0xdd. */
1584#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1585FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1586{
1587 IEMOP_MNEMONIC(clgi, "clgi");
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1589 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1590}
1591#else
1592FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1593#endif
1594
1595
1596/** Opcode 0x0f 0x01 0xdf. */
1597#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1598FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1599{
1600 IEMOP_MNEMONIC(invlpga, "invlpga");
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1602 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1603}
1604#else
1605FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1606#endif
1607
1608
1609/** Opcode 0x0f 0x01 0xde. */
1610#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1611FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1612{
1613 IEMOP_MNEMONIC(skinit, "skinit");
1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1615 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1616}
1617#else
1618FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1619#endif
1620
1621
1622/** Opcode 0x0f 0x01 /4. */
1623FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1624{
1625 IEMOP_MNEMONIC(smsw, "smsw");
1626 IEMOP_HLP_MIN_286();
1627 if (IEM_IS_MODRM_REG_MODE(bRm))
1628 {
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1631 }
1632
1633 /* Ignore operand size here, memory refs are always 16-bit. */
1634 IEM_MC_BEGIN(2, 0);
1635 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1636 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1640 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1641 IEM_MC_END();
1642 return VINF_SUCCESS;
1643}
1644
1645
1646/** Opcode 0x0f 0x01 /6. */
1647FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1648{
1649 /* The operand size is effectively ignored, all is 16-bit and only the
1650 lower 3-bits are used. */
1651 IEMOP_MNEMONIC(lmsw, "lmsw");
1652 IEMOP_HLP_MIN_286();
1653 if (IEM_IS_MODRM_REG_MODE(bRm))
1654 {
1655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1656 IEM_MC_BEGIN(2, 0);
1657 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1658 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1659 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1660 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1661 IEM_MC_END();
1662 }
1663 else
1664 {
1665 IEM_MC_BEGIN(2, 0);
1666 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1667 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1670 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1671 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x0f 0x01 /7. */
1679FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1680{
1681 IEMOP_MNEMONIC(invlpg, "invlpg");
1682 IEMOP_HLP_MIN_486();
1683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1684 IEM_MC_BEGIN(1, 1);
1685 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1687 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1688 IEM_MC_END();
1689 return VINF_SUCCESS;
1690}
1691
1692
1693/** Opcode 0x0f 0x01 /7. */
1694FNIEMOP_DEF(iemOp_Grp7_swapgs)
1695{
1696 IEMOP_MNEMONIC(swapgs, "swapgs");
1697 IEMOP_HLP_ONLY_64BIT();
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1700}
1701
1702
1703/** Opcode 0x0f 0x01 /7. */
1704FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1705{
1706 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1708 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1709}
1710
1711
1712/**
1713 * Group 7 jump table, memory variant.
1714 */
1715IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1716{
1717 iemOp_Grp7_sgdt,
1718 iemOp_Grp7_sidt,
1719 iemOp_Grp7_lgdt,
1720 iemOp_Grp7_lidt,
1721 iemOp_Grp7_smsw,
1722 iemOp_InvalidWithRM,
1723 iemOp_Grp7_lmsw,
1724 iemOp_Grp7_invlpg
1725};
1726
1727
1728/** Opcode 0x0f 0x01. */
1729FNIEMOP_DEF(iemOp_Grp7)
1730{
1731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1732 if (IEM_IS_MODRM_MEM_MODE(bRm))
1733 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1734
1735 switch (IEM_GET_MODRM_REG_8(bRm))
1736 {
1737 case 0:
1738 switch (IEM_GET_MODRM_RM_8(bRm))
1739 {
1740 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1741 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1742 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1743 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1744 }
1745 return IEMOP_RAISE_INVALID_OPCODE();
1746
1747 case 1:
1748 switch (IEM_GET_MODRM_RM_8(bRm))
1749 {
1750 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1751 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1752 }
1753 return IEMOP_RAISE_INVALID_OPCODE();
1754
1755 case 2:
1756 switch (IEM_GET_MODRM_RM_8(bRm))
1757 {
1758 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1759 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1760 }
1761 return IEMOP_RAISE_INVALID_OPCODE();
1762
1763 case 3:
1764 switch (IEM_GET_MODRM_RM_8(bRm))
1765 {
1766 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1767 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1768 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1769 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1770 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1771 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1772 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1773 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1775 }
1776
1777 case 4:
1778 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1779
1780 case 5:
1781 return IEMOP_RAISE_INVALID_OPCODE();
1782
1783 case 6:
1784 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1785
1786 case 7:
1787 switch (IEM_GET_MODRM_RM_8(bRm))
1788 {
1789 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1790 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1791 }
1792 return IEMOP_RAISE_INVALID_OPCODE();
1793
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1795 }
1796}
1797
1798/** Opcode 0x0f 0x00 /3. */
1799FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1800{
1801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803
1804 if (IEM_IS_MODRM_REG_MODE(bRm))
1805 {
1806 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1807 switch (pVCpu->iem.s.enmEffOpSize)
1808 {
1809 case IEMMODE_16BIT:
1810 {
1811 IEM_MC_BEGIN(3, 0);
1812 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1813 IEM_MC_ARG(uint16_t, u16Sel, 1);
1814 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1815
1816 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1817 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1818 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1819
1820 IEM_MC_END();
1821 return VINF_SUCCESS;
1822 }
1823
1824 case IEMMODE_32BIT:
1825 case IEMMODE_64BIT:
1826 {
1827 IEM_MC_BEGIN(3, 0);
1828 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1829 IEM_MC_ARG(uint16_t, u16Sel, 1);
1830 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1831
1832 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1833 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1834 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1835
1836 IEM_MC_END();
1837 return VINF_SUCCESS;
1838 }
1839
1840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1841 }
1842 }
1843 else
1844 {
1845 switch (pVCpu->iem.s.enmEffOpSize)
1846 {
1847 case IEMMODE_16BIT:
1848 {
1849 IEM_MC_BEGIN(3, 1);
1850 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1851 IEM_MC_ARG(uint16_t, u16Sel, 1);
1852 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1854
1855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1856 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1857
1858 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1859 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1860 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1861
1862 IEM_MC_END();
1863 return VINF_SUCCESS;
1864 }
1865
1866 case IEMMODE_32BIT:
1867 case IEMMODE_64BIT:
1868 {
1869 IEM_MC_BEGIN(3, 1);
1870 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1871 IEM_MC_ARG(uint16_t, u16Sel, 1);
1872 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1874
1875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1876 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1877/** @todo testcase: make sure it's a 16-bit read. */
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1888 }
1889 }
1890}
1891
1892
1893
1894/** Opcode 0x0f 0x02. */
1895FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1896{
1897 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1898 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1899}
1900
1901
1902/** Opcode 0x0f 0x03. */
1903FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1904{
1905 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1906 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1907}
1908
1909
1910/** Opcode 0x0f 0x05. */
1911FNIEMOP_DEF(iemOp_syscall)
1912{
1913 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1916}
1917
1918
1919/** Opcode 0x0f 0x06. */
1920FNIEMOP_DEF(iemOp_clts)
1921{
1922 IEMOP_MNEMONIC(clts, "clts");
1923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1924 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1925}
1926
1927
1928/** Opcode 0x0f 0x07. */
1929FNIEMOP_DEF(iemOp_sysret)
1930{
1931 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1933 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1934}
1935
1936
1937/** Opcode 0x0f 0x08. */
1938FNIEMOP_DEF(iemOp_invd)
1939{
1940 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1941 IEMOP_HLP_MIN_486();
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1944}
1945
1946
1947/** Opcode 0x0f 0x09. */
1948FNIEMOP_DEF(iemOp_wbinvd)
1949{
1950 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1951 IEMOP_HLP_MIN_486();
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1954}
1955
1956
1957/** Opcode 0x0f 0x0b. */
1958FNIEMOP_DEF(iemOp_ud2)
1959{
1960 IEMOP_MNEMONIC(ud2, "ud2");
1961 return IEMOP_RAISE_INVALID_OPCODE();
1962}
1963
1964/** Opcode 0x0f 0x0d. */
1965FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1966{
1967 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1968 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1969 {
1970 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972 }
1973
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 if (IEM_IS_MODRM_REG_MODE(bRm))
1976 {
1977 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1978 return IEMOP_RAISE_INVALID_OPCODE();
1979 }
1980
1981 switch (IEM_GET_MODRM_REG_8(bRm))
1982 {
1983 case 2: /* Aliased to /0 for the time being. */
1984 case 4: /* Aliased to /0 for the time being. */
1985 case 5: /* Aliased to /0 for the time being. */
1986 case 6: /* Aliased to /0 for the time being. */
1987 case 7: /* Aliased to /0 for the time being. */
1988 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1989 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1990 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1992 }
1993
1994 IEM_MC_BEGIN(0, 1);
1995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 /* Currently a NOP. */
1999 NOREF(GCPtrEffSrc);
2000 IEM_MC_ADVANCE_RIP();
2001 IEM_MC_END();
2002 return VINF_SUCCESS;
2003}
2004
2005
2006/** Opcode 0x0f 0x0e. */
2007FNIEMOP_DEF(iemOp_femms)
2008{
2009 IEMOP_MNEMONIC(femms, "femms");
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011
2012 IEM_MC_BEGIN(0,0);
2013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2015 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2016 IEM_MC_FPU_FROM_MMX_MODE();
2017 IEM_MC_ADVANCE_RIP();
2018 IEM_MC_END();
2019 return VINF_SUCCESS;
2020}
2021
2022
2023/** Opcode 0x0f 0x0f. */
2024FNIEMOP_DEF(iemOp_3Dnow)
2025{
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2027 {
2028 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2029 return IEMOP_RAISE_INVALID_OPCODE();
2030 }
2031
2032#ifdef IEM_WITH_3DNOW
2033 /* This is pretty sparse, use switch instead of table. */
2034 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2035 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2036#else
2037 IEMOP_BITCH_ABOUT_STUB();
2038 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2039#endif
2040}
2041
2042
2043/**
2044 * @opcode 0x10
2045 * @oppfx none
2046 * @opcpuid sse
2047 * @opgroup og_sse_simdfp_datamove
2048 * @opxcpttype 4UA
2049 * @optest op1=1 op2=2 -> op1=2
2050 * @optest op1=0 op2=-22 -> op1=-22
2051 */
2052FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2053{
2054 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if (IEM_IS_MODRM_REG_MODE(bRm))
2057 {
2058 /*
2059 * Register, register.
2060 */
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2062 IEM_MC_BEGIN(0, 0);
2063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2065 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2066 IEM_GET_MODRM_RM(pVCpu, bRm));
2067 IEM_MC_ADVANCE_RIP();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 /*
2073 * Memory, register.
2074 */
2075 IEM_MC_BEGIN(0, 2);
2076 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2081 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP();
2088 IEM_MC_END();
2089 }
2090 return VINF_SUCCESS;
2091
2092}
2093
2094
2095/**
2096 * @opcode 0x10
2097 * @oppfx 0x66
2098 * @opcpuid sse2
2099 * @opgroup og_sse2_pcksclr_datamove
2100 * @opxcpttype 4UA
2101 * @optest op1=1 op2=2 -> op1=2
2102 * @optest op1=0 op2=-42 -> op1=-42
2103 */
2104FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2105{
2106 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2108 if (IEM_IS_MODRM_REG_MODE(bRm))
2109 {
2110 /*
2111 * Register, register.
2112 */
2113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2114 IEM_MC_BEGIN(0, 0);
2115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2117 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2118 IEM_GET_MODRM_RM(pVCpu, bRm));
2119 IEM_MC_ADVANCE_RIP();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * Memory, register.
2126 */
2127 IEM_MC_BEGIN(0, 2);
2128 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP();
2140 IEM_MC_END();
2141 }
2142 return VINF_SUCCESS;
2143}
2144
2145
2146/**
2147 * @opcode 0x10
2148 * @oppfx 0xf3
2149 * @opcpuid sse
2150 * @opgroup og_sse_simdfp_datamove
2151 * @opxcpttype 5
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-22 -> op1=-22
2154 */
2155FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2156{
2157 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 if (IEM_IS_MODRM_REG_MODE(bRm))
2160 {
2161 /*
2162 * Register, register.
2163 */
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 IEM_MC_BEGIN(0, 1);
2166 IEM_MC_LOCAL(uint32_t, uSrc);
2167
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2170 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2171 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2172
2173 IEM_MC_ADVANCE_RIP();
2174 IEM_MC_END();
2175 }
2176 else
2177 {
2178 /*
2179 * Memory, register.
2180 */
2181 IEM_MC_BEGIN(0, 2);
2182 IEM_MC_LOCAL(uint32_t, uSrc);
2183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2184
2185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2188 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2189
2190 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2191 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2192
2193 IEM_MC_ADVANCE_RIP();
2194 IEM_MC_END();
2195 }
2196 return VINF_SUCCESS;
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf2
2203 * @opcpuid sse2
2204 * @opgroup og_sse2_pcksclr_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-42 -> op1=-42
2208 */
2209FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * Register, register.
2217 */
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_BEGIN(0, 1);
2220 IEM_MC_LOCAL(uint64_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2225 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2226
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * Memory, register.
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint64_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP();
2248 IEM_MC_END();
2249 }
2250 return VINF_SUCCESS;
2251}
2252
2253
2254/**
2255 * @opcode 0x11
2256 * @oppfx none
2257 * @opcpuid sse
2258 * @opgroup og_sse_simdfp_datamove
2259 * @opxcpttype 4UA
2260 * @optest op1=1 op2=2 -> op1=2
2261 * @optest op1=0 op2=-42 -> op1=-42
2262 */
2263FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2264{
2265 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 if (IEM_IS_MODRM_REG_MODE(bRm))
2268 {
2269 /*
2270 * Register, register.
2271 */
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_BEGIN(0, 0);
2274 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2276 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2277 IEM_GET_MODRM_REG(pVCpu, bRm));
2278 IEM_MC_ADVANCE_RIP();
2279 IEM_MC_END();
2280 }
2281 else
2282 {
2283 /*
2284 * Memory, register.
2285 */
2286 IEM_MC_BEGIN(0, 2);
2287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2289
2290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2294
2295 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2297
2298 IEM_MC_ADVANCE_RIP();
2299 IEM_MC_END();
2300 }
2301 return VINF_SUCCESS;
2302}
2303
2304
2305/**
2306 * @opcode 0x11
2307 * @oppfx 0x66
2308 * @opcpuid sse2
2309 * @opgroup og_sse2_pcksclr_datamove
2310 * @opxcpttype 4UA
2311 * @optest op1=1 op2=2 -> op1=2
2312 * @optest op1=0 op2=-42 -> op1=-42
2313 */
2314FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2315{
2316 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2318 if (IEM_IS_MODRM_REG_MODE(bRm))
2319 {
2320 /*
2321 * Register, register.
2322 */
2323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2324 IEM_MC_BEGIN(0, 0);
2325 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2326 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2327 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2328 IEM_GET_MODRM_REG(pVCpu, bRm));
2329 IEM_MC_ADVANCE_RIP();
2330 IEM_MC_END();
2331 }
2332 else
2333 {
2334 /*
2335 * Memory, register.
2336 */
2337 IEM_MC_BEGIN(0, 2);
2338 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2340
2341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2345
2346 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2347 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2348
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 return VINF_SUCCESS;
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0xf3
2359 * @opcpuid sse
2360 * @opgroup og_sse_simdfp_datamove
2361 * @opxcpttype 5
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-22 -> op1=-22
2364 */
2365FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * Register, register.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 1);
2376 IEM_MC_LOCAL(uint32_t, uSrc);
2377
2378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2381 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2382
2383 IEM_MC_ADVANCE_RIP();
2384 IEM_MC_END();
2385 }
2386 else
2387 {
2388 /*
2389 * Memory, register.
2390 */
2391 IEM_MC_BEGIN(0, 2);
2392 IEM_MC_LOCAL(uint32_t, uSrc);
2393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2394
2395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2399
2400 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2401 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2402
2403 IEM_MC_ADVANCE_RIP();
2404 IEM_MC_END();
2405 }
2406 return VINF_SUCCESS;
2407}
2408
2409
2410/**
2411 * @opcode 0x11
2412 * @oppfx 0xf2
2413 * @opcpuid sse2
2414 * @opgroup og_sse2_pcksclr_datamove
2415 * @opxcpttype 5
2416 * @optest op1=1 op2=2 -> op1=2
2417 * @optest op1=0 op2=-42 -> op1=-42
2418 */
2419FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2420{
2421 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2423 if (IEM_IS_MODRM_REG_MODE(bRm))
2424 {
2425 /*
2426 * Register, register.
2427 */
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 IEM_MC_BEGIN(0, 1);
2430 IEM_MC_LOCAL(uint64_t, uSrc);
2431
2432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2434 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2435 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 else
2441 {
2442 /*
2443 * Memory, register.
2444 */
2445 IEM_MC_BEGIN(0, 2);
2446 IEM_MC_LOCAL(uint64_t, uSrc);
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2453
2454 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2455 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 return VINF_SUCCESS;
2461}
2462
2463
2464FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2465{
2466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2467 if (IEM_IS_MODRM_REG_MODE(bRm))
2468 {
2469 /**
2470 * @opcode 0x12
2471 * @opcodesub 11 mr/reg
2472 * @oppfx none
2473 * @opcpuid sse
2474 * @opgroup og_sse_simdfp_datamove
2475 * @opxcpttype 5
2476 * @optest op1=1 op2=2 -> op1=2
2477 * @optest op1=0 op2=-42 -> op1=-42
2478 */
2479 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2480
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_BEGIN(0, 1);
2483 IEM_MC_LOCAL(uint64_t, uSrc);
2484
2485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2489
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /**
2496 * @opdone
2497 * @opcode 0x12
2498 * @opcodesub !11 mr/reg
2499 * @oppfx none
2500 * @opcpuid sse
2501 * @opgroup og_sse_simdfp_datamove
2502 * @opxcpttype 5
2503 * @optest op1=1 op2=2 -> op1=2
2504 * @optest op1=0 op2=-42 -> op1=-42
2505 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2506 */
2507 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2508
2509 IEM_MC_BEGIN(0, 2);
2510 IEM_MC_LOCAL(uint64_t, uSrc);
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2517
2518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2520
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 return VINF_SUCCESS;
2525}
2526
2527
2528/**
2529 * @opcode 0x12
2530 * @opcodesub !11 mr/reg
2531 * @oppfx 0x66
2532 * @opcpuid sse2
2533 * @opgroup og_sse2_pcksclr_datamove
2534 * @opxcpttype 5
2535 * @optest op1=1 op2=2 -> op1=2
2536 * @optest op1=0 op2=-42 -> op1=-42
2537 */
2538FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2539{
2540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2544
2545 IEM_MC_BEGIN(0, 2);
2546 IEM_MC_LOCAL(uint64_t, uSrc);
2547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2548
2549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2553
2554 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2555 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2556
2557 IEM_MC_ADVANCE_RIP();
2558 IEM_MC_END();
2559 return VINF_SUCCESS;
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 return IEMOP_RAISE_INVALID_OPCODE();
2573}
2574
2575
2576/**
2577 * @opcode 0x12
2578 * @oppfx 0xf3
2579 * @opcpuid sse3
2580 * @opgroup og_sse3_pcksclr_datamove
2581 * @opxcpttype 4
2582 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2583 * op1=0x00000002000000020000000100000001
2584 */
2585FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2586{
2587 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2589 if (IEM_IS_MODRM_REG_MODE(bRm))
2590 {
2591 /*
2592 * Register, register.
2593 */
2594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2595 IEM_MC_BEGIN(2, 0);
2596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2598
2599 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2604 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2605
2606 IEM_MC_ADVANCE_RIP();
2607 IEM_MC_END();
2608 }
2609 else
2610 {
2611 /*
2612 * Register, memory.
2613 */
2614 IEM_MC_BEGIN(2, 2);
2615 IEM_MC_LOCAL(RTUINT128U, uSrc);
2616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2622 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2627 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2628
2629 IEM_MC_ADVANCE_RIP();
2630 IEM_MC_END();
2631 }
2632 return VINF_SUCCESS;
2633}
2634
2635
2636/**
2637 * @opcode 0x12
2638 * @oppfx 0xf2
2639 * @opcpuid sse3
2640 * @opgroup og_sse3_pcksclr_datamove
2641 * @opxcpttype 5
2642 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2643 * op1=0x22222222111111112222222211111111
2644 */
2645FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2646{
2647 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2649 if (IEM_IS_MODRM_REG_MODE(bRm))
2650 {
2651 /*
2652 * Register, register.
2653 */
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2655 IEM_MC_BEGIN(2, 0);
2656 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2657 IEM_MC_ARG(uint64_t, uSrc, 1);
2658
2659 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2664 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * Register, memory.
2673 */
2674 IEM_MC_BEGIN(2, 2);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2677 IEM_MC_ARG(uint64_t, uSrc, 1);
2678
2679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2681 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2682 IEM_MC_PREPARE_SSE_USAGE();
2683
2684 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2685 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2686 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2687
2688 IEM_MC_ADVANCE_RIP();
2689 IEM_MC_END();
2690 }
2691 return VINF_SUCCESS;
2692}
2693
2694
2695/**
2696 * @opcode 0x13
2697 * @opcodesub !11 mr/reg
2698 * @oppfx none
2699 * @opcpuid sse
2700 * @opgroup og_sse_simdfp_datamove
2701 * @opxcpttype 5
2702 * @optest op1=1 op2=2 -> op1=2
2703 * @optest op1=0 op2=-42 -> op1=-42
2704 */
2705FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2706{
2707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2708 if (IEM_IS_MODRM_MEM_MODE(bRm))
2709 {
2710 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2711
2712 IEM_MC_BEGIN(0, 2);
2713 IEM_MC_LOCAL(uint64_t, uSrc);
2714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2715
2716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2718 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2719 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2720
2721 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2722 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2723
2724 IEM_MC_ADVANCE_RIP();
2725 IEM_MC_END();
2726 return VINF_SUCCESS;
2727 }
2728
2729 /**
2730 * @opdone
2731 * @opmnemonic ud0f13m3
2732 * @opcode 0x13
2733 * @opcodesub 11 mr/reg
2734 * @oppfx none
2735 * @opunused immediate
2736 * @opcpuid sse
2737 * @optest ->
2738 */
2739 return IEMOP_RAISE_INVALID_OPCODE();
2740}
2741
2742
2743/**
2744 * @opcode 0x13
2745 * @opcodesub !11 mr/reg
2746 * @oppfx 0x66
2747 * @opcpuid sse2
2748 * @opgroup og_sse2_pcksclr_datamove
2749 * @opxcpttype 5
2750 * @optest op1=1 op2=2 -> op1=2
2751 * @optest op1=0 op2=-42 -> op1=-42
2752 */
2753FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2754{
2755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2756 if (IEM_IS_MODRM_MEM_MODE(bRm))
2757 {
2758 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2759 IEM_MC_BEGIN(0, 2);
2760 IEM_MC_LOCAL(uint64_t, uSrc);
2761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2762
2763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2765 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2766 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2767
2768 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2769 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2770
2771 IEM_MC_ADVANCE_RIP();
2772 IEM_MC_END();
2773 return VINF_SUCCESS;
2774 }
2775
2776 /**
2777 * @opdone
2778 * @opmnemonic ud660f13m3
2779 * @opcode 0x13
2780 * @opcodesub 11 mr/reg
2781 * @oppfx 0x66
2782 * @opunused immediate
2783 * @opcpuid sse
2784 * @optest ->
2785 */
2786 return IEMOP_RAISE_INVALID_OPCODE();
2787}
2788
2789
2790/**
2791 * @opmnemonic udf30f13
2792 * @opcode 0x13
2793 * @oppfx 0xf3
2794 * @opunused intel-modrm
2795 * @opcpuid sse
2796 * @optest ->
2797 * @opdone
2798 */
2799
2800/**
2801 * @opmnemonic udf20f13
2802 * @opcode 0x13
2803 * @oppfx 0xf2
2804 * @opunused intel-modrm
2805 * @opcpuid sse
2806 * @optest ->
2807 * @opdone
2808 */
2809
2810/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2811FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2812{
2813 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2814 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2815}
2816
2817
2818/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2819FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2820{
2821 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2822 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2823}
2824
2825
2826/**
2827 * @opdone
2828 * @opmnemonic udf30f14
2829 * @opcode 0x14
2830 * @oppfx 0xf3
2831 * @opunused intel-modrm
2832 * @opcpuid sse
2833 * @optest ->
2834 * @opdone
2835 */
2836
2837/**
2838 * @opmnemonic udf20f14
2839 * @opcode 0x14
2840 * @oppfx 0xf2
2841 * @opunused intel-modrm
2842 * @opcpuid sse
2843 * @optest ->
2844 * @opdone
2845 */
2846
2847/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2848FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2849{
2850 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2851 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2852}
2853
2854
2855/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2856FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2857{
2858 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2859 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2860}
2861
2862
2863/* Opcode 0xf3 0x0f 0x15 - invalid */
2864/* Opcode 0xf2 0x0f 0x15 - invalid */
2865
2866/**
2867 * @opdone
2868 * @opmnemonic udf30f15
2869 * @opcode 0x15
2870 * @oppfx 0xf3
2871 * @opunused intel-modrm
2872 * @opcpuid sse
2873 * @optest ->
2874 * @opdone
2875 */
2876
2877/**
2878 * @opmnemonic udf20f15
2879 * @opcode 0x15
2880 * @oppfx 0xf2
2881 * @opunused intel-modrm
2882 * @opcpuid sse
2883 * @optest ->
2884 * @opdone
2885 */
2886
2887FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2888{
2889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2890 if (IEM_IS_MODRM_REG_MODE(bRm))
2891 {
2892 /**
2893 * @opcode 0x16
2894 * @opcodesub 11 mr/reg
2895 * @oppfx none
2896 * @opcpuid sse
2897 * @opgroup og_sse_simdfp_datamove
2898 * @opxcpttype 5
2899 * @optest op1=1 op2=2 -> op1=2
2900 * @optest op1=0 op2=-42 -> op1=-42
2901 */
2902 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2903
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2905 IEM_MC_BEGIN(0, 1);
2906 IEM_MC_LOCAL(uint64_t, uSrc);
2907
2908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2909 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2910 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2911 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /**
2919 * @opdone
2920 * @opcode 0x16
2921 * @opcodesub !11 mr/reg
2922 * @oppfx none
2923 * @opcpuid sse
2924 * @opgroup og_sse_simdfp_datamove
2925 * @opxcpttype 5
2926 * @optest op1=1 op2=2 -> op1=2
2927 * @optest op1=0 op2=-42 -> op1=-42
2928 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2929 */
2930 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2931
2932 IEM_MC_BEGIN(0, 2);
2933 IEM_MC_LOCAL(uint64_t, uSrc);
2934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2935
2936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2939 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2940
2941 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2942 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2943
2944 IEM_MC_ADVANCE_RIP();
2945 IEM_MC_END();
2946 }
2947 return VINF_SUCCESS;
2948}
2949
2950
2951/**
2952 * @opcode 0x16
2953 * @opcodesub !11 mr/reg
2954 * @oppfx 0x66
2955 * @opcpuid sse2
2956 * @opgroup og_sse2_pcksclr_datamove
2957 * @opxcpttype 5
2958 * @optest op1=1 op2=2 -> op1=2
2959 * @optest op1=0 op2=-42 -> op1=-42
2960 */
2961FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2962{
2963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2964 if (IEM_IS_MODRM_MEM_MODE(bRm))
2965 {
2966 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2967 IEM_MC_BEGIN(0, 2);
2968 IEM_MC_LOCAL(uint64_t, uSrc);
2969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2970
2971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2975
2976 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2977 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2978
2979 IEM_MC_ADVANCE_RIP();
2980 IEM_MC_END();
2981 return VINF_SUCCESS;
2982 }
2983
2984 /**
2985 * @opdone
2986 * @opmnemonic ud660f16m3
2987 * @opcode 0x16
2988 * @opcodesub 11 mr/reg
2989 * @oppfx 0x66
2990 * @opunused immediate
2991 * @opcpuid sse
2992 * @optest ->
2993 */
2994 return IEMOP_RAISE_INVALID_OPCODE();
2995}
2996
2997
2998/**
2999 * @opcode 0x16
3000 * @oppfx 0xf3
3001 * @opcpuid sse3
3002 * @opgroup og_sse3_pcksclr_datamove
3003 * @opxcpttype 4
3004 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3005 * op1=0x00000002000000020000000100000001
3006 */
3007FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3008{
3009 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3011 if (IEM_IS_MODRM_REG_MODE(bRm))
3012 {
3013 /*
3014 * Register, register.
3015 */
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEM_MC_BEGIN(2, 0);
3018 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3019 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3020
3021 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3022 IEM_MC_PREPARE_SSE_USAGE();
3023
3024 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3025 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3026 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3027
3028 IEM_MC_ADVANCE_RIP();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * Register, memory.
3035 */
3036 IEM_MC_BEGIN(2, 2);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3041
3042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3044 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3045 IEM_MC_PREPARE_SSE_USAGE();
3046
3047 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3048 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3049 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3050
3051 IEM_MC_ADVANCE_RIP();
3052 IEM_MC_END();
3053 }
3054 return VINF_SUCCESS;
3055}
3056
3057/**
3058 * @opdone
3059 * @opmnemonic udf30f16
3060 * @opcode 0x16
3061 * @oppfx 0xf2
3062 * @opunused intel-modrm
3063 * @opcpuid sse
3064 * @optest ->
3065 * @opdone
3066 */
3067
3068
3069/**
3070 * @opcode 0x17
3071 * @opcodesub !11 mr/reg
3072 * @oppfx none
3073 * @opcpuid sse
3074 * @opgroup og_sse_simdfp_datamove
3075 * @opxcpttype 5
3076 * @optest op1=1 op2=2 -> op1=2
3077 * @optest op1=0 op2=-42 -> op1=-42
3078 */
3079FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3080{
3081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3082 if (IEM_IS_MODRM_MEM_MODE(bRm))
3083 {
3084 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3085
3086 IEM_MC_BEGIN(0, 2);
3087 IEM_MC_LOCAL(uint64_t, uSrc);
3088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3089
3090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3092 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3094
3095 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3096 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3097
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 return VINF_SUCCESS;
3101 }
3102
3103 /**
3104 * @opdone
3105 * @opmnemonic ud0f17m3
3106 * @opcode 0x17
3107 * @opcodesub 11 mr/reg
3108 * @oppfx none
3109 * @opunused immediate
3110 * @opcpuid sse
3111 * @optest ->
3112 */
3113 return IEMOP_RAISE_INVALID_OPCODE();
3114}
3115
3116
3117/**
3118 * @opcode 0x17
3119 * @opcodesub !11 mr/reg
3120 * @oppfx 0x66
3121 * @opcpuid sse2
3122 * @opgroup og_sse2_pcksclr_datamove
3123 * @opxcpttype 5
3124 * @optest op1=1 op2=2 -> op1=2
3125 * @optest op1=0 op2=-42 -> op1=-42
3126 */
3127FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3128{
3129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3130 if (IEM_IS_MODRM_MEM_MODE(bRm))
3131 {
3132 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3133
3134 IEM_MC_BEGIN(0, 2);
3135 IEM_MC_LOCAL(uint64_t, uSrc);
3136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3137
3138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3142
3143 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3144 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3145
3146 IEM_MC_ADVANCE_RIP();
3147 IEM_MC_END();
3148 return VINF_SUCCESS;
3149 }
3150
3151 /**
3152 * @opdone
3153 * @opmnemonic ud660f17m3
3154 * @opcode 0x17
3155 * @opcodesub 11 mr/reg
3156 * @oppfx 0x66
3157 * @opunused immediate
3158 * @opcpuid sse
3159 * @optest ->
3160 */
3161 return IEMOP_RAISE_INVALID_OPCODE();
3162}
3163
3164
3165/**
3166 * @opdone
3167 * @opmnemonic udf30f17
3168 * @opcode 0x17
3169 * @oppfx 0xf3
3170 * @opunused intel-modrm
3171 * @opcpuid sse
3172 * @optest ->
3173 * @opdone
3174 */
3175
3176/**
3177 * @opmnemonic udf20f17
3178 * @opcode 0x17
3179 * @oppfx 0xf2
3180 * @opunused intel-modrm
3181 * @opcpuid sse
3182 * @optest ->
3183 * @opdone
3184 */
3185
3186
3187/** Opcode 0x0f 0x18. */
3188FNIEMOP_DEF(iemOp_prefetch_Grp16)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 if (IEM_IS_MODRM_MEM_MODE(bRm))
3192 {
3193 switch (IEM_GET_MODRM_REG_8(bRm))
3194 {
3195 case 4: /* Aliased to /0 for the time being according to AMD. */
3196 case 5: /* Aliased to /0 for the time being according to AMD. */
3197 case 6: /* Aliased to /0 for the time being according to AMD. */
3198 case 7: /* Aliased to /0 for the time being according to AMD. */
3199 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3200 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3201 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3202 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3204 }
3205
3206 IEM_MC_BEGIN(0, 1);
3207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3210 /* Currently a NOP. */
3211 NOREF(GCPtrEffSrc);
3212 IEM_MC_ADVANCE_RIP();
3213 IEM_MC_END();
3214 return VINF_SUCCESS;
3215 }
3216
3217 return IEMOP_RAISE_INVALID_OPCODE();
3218}
3219
3220
3221/** Opcode 0x0f 0x19..0x1f. */
3222FNIEMOP_DEF(iemOp_nop_Ev)
3223{
3224 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3226 if (IEM_IS_MODRM_REG_MODE(bRm))
3227 {
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 IEM_MC_BEGIN(0, 0);
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 IEM_MC_BEGIN(0, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3239 /* Currently a NOP. */
3240 NOREF(GCPtrEffSrc);
3241 IEM_MC_ADVANCE_RIP();
3242 IEM_MC_END();
3243 }
3244 return VINF_SUCCESS;
3245}
3246
3247
3248/** Opcode 0x0f 0x20. */
3249FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3250{
3251 /* mod is ignored, as is operand size overrides. */
3252 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3253 IEMOP_HLP_MIN_386();
3254 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3256 else
3257 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3258
3259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3260 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3261 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3262 {
3263 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3264 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3265 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3266 iCrReg |= 8;
3267 }
3268 switch (iCrReg)
3269 {
3270 case 0: case 2: case 3: case 4: case 8:
3271 break;
3272 default:
3273 return IEMOP_RAISE_INVALID_OPCODE();
3274 }
3275 IEMOP_HLP_DONE_DECODING();
3276
3277 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3285 IEMOP_HLP_MIN_386();
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3289 return IEMOP_RAISE_INVALID_OPCODE();
3290 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3291 IEM_GET_MODRM_RM(pVCpu, bRm),
3292 IEM_GET_MODRM_REG_8(bRm));
3293}
3294
3295
3296/** Opcode 0x0f 0x22. */
3297FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3298{
3299 /* mod is ignored, as is operand size overrides. */
3300 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3301 IEMOP_HLP_MIN_386();
3302 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3303 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3304 else
3305 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3306
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3310 {
3311 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3312 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3313 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3314 iCrReg |= 8;
3315 }
3316 switch (iCrReg)
3317 {
3318 case 0: case 2: case 3: case 4: case 8:
3319 break;
3320 default:
3321 return IEMOP_RAISE_INVALID_OPCODE();
3322 }
3323 IEMOP_HLP_DONE_DECODING();
3324
3325 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3326}
3327
3328
3329/** Opcode 0x0f 0x23. */
3330FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3331{
3332 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3333 IEMOP_HLP_MIN_386();
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3337 return IEMOP_RAISE_INVALID_OPCODE();
3338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3339 IEM_GET_MODRM_REG_8(bRm),
3340 IEM_GET_MODRM_RM(pVCpu, bRm));
3341}
3342
3343
3344/** Opcode 0x0f 0x24. */
3345FNIEMOP_DEF(iemOp_mov_Rd_Td)
3346{
3347 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3348 IEMOP_HLP_MIN_386();
3349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3352 return IEMOP_RAISE_INVALID_OPCODE();
3353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3354 IEM_GET_MODRM_RM(pVCpu, bRm),
3355 IEM_GET_MODRM_REG_8(bRm));
3356}
3357
3358
3359/** Opcode 0x0f 0x26. */
3360FNIEMOP_DEF(iemOp_mov_Td_Rd)
3361{
3362 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3363 IEMOP_HLP_MIN_386();
3364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3366 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3367 return IEMOP_RAISE_INVALID_OPCODE();
3368 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3369 IEM_GET_MODRM_REG_8(bRm),
3370 IEM_GET_MODRM_RM(pVCpu, bRm));
3371}
3372
3373
3374/**
3375 * @opcode 0x28
3376 * @oppfx none
3377 * @opcpuid sse
3378 * @opgroup og_sse_simdfp_datamove
3379 * @opxcpttype 1
3380 * @optest op1=1 op2=2 -> op1=2
3381 * @optest op1=0 op2=-42 -> op1=-42
3382 */
3383FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3384{
3385 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3387 if (IEM_IS_MODRM_REG_MODE(bRm))
3388 {
3389 /*
3390 * Register, register.
3391 */
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 IEM_MC_BEGIN(0, 0);
3394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3396 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3397 IEM_GET_MODRM_RM(pVCpu, bRm));
3398 IEM_MC_ADVANCE_RIP();
3399 IEM_MC_END();
3400 }
3401 else
3402 {
3403 /*
3404 * Register, memory.
3405 */
3406 IEM_MC_BEGIN(0, 2);
3407 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409
3410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3414
3415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3416 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3417
3418 IEM_MC_ADVANCE_RIP();
3419 IEM_MC_END();
3420 }
3421 return VINF_SUCCESS;
3422}
3423
3424/**
3425 * @opcode 0x28
3426 * @oppfx 66
3427 * @opcpuid sse2
3428 * @opgroup og_sse2_pcksclr_datamove
3429 * @opxcpttype 1
3430 * @optest op1=1 op2=2 -> op1=2
3431 * @optest op1=0 op2=-42 -> op1=-42
3432 */
3433FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3434{
3435 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3437 if (IEM_IS_MODRM_REG_MODE(bRm))
3438 {
3439 /*
3440 * Register, register.
3441 */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 0);
3444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3446 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3447 IEM_GET_MODRM_RM(pVCpu, bRm));
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 }
3451 else
3452 {
3453 /*
3454 * Register, memory.
3455 */
3456 IEM_MC_BEGIN(0, 2);
3457 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3464
3465 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3466 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3467
3468 IEM_MC_ADVANCE_RIP();
3469 IEM_MC_END();
3470 }
3471 return VINF_SUCCESS;
3472}
3473
3474/* Opcode 0xf3 0x0f 0x28 - invalid */
3475/* Opcode 0xf2 0x0f 0x28 - invalid */
3476
3477/**
3478 * @opcode 0x29
3479 * @oppfx none
3480 * @opcpuid sse
3481 * @opgroup og_sse_simdfp_datamove
3482 * @opxcpttype 1
3483 * @optest op1=1 op2=2 -> op1=2
3484 * @optest op1=0 op2=-42 -> op1=-42
3485 */
3486FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3487{
3488 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 if (IEM_IS_MODRM_REG_MODE(bRm))
3491 {
3492 /*
3493 * Register, register.
3494 */
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_BEGIN(0, 0);
3497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3499 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3500 IEM_GET_MODRM_REG(pVCpu, bRm));
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 else
3505 {
3506 /*
3507 * Memory, register.
3508 */
3509 IEM_MC_BEGIN(0, 2);
3510 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3512
3513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3517
3518 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3519 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3520
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 return VINF_SUCCESS;
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_BEGIN(0, 0);
3547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 2);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP();
3572 IEM_MC_END();
3573 }
3574 return VINF_SUCCESS;
3575}
3576
3577/* Opcode 0xf3 0x0f 0x29 - invalid */
3578/* Opcode 0xf2 0x0f 0x29 - invalid */
3579
3580
3581/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3582FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3583/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3584FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3585
3586
3587/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3588FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3589{
3590 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3591
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3593 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3594 {
3595 if (IEM_IS_MODRM_REG_MODE(bRm))
3596 {
3597 /* XMM, greg64 */
3598 IEM_MC_BEGIN(3, 4);
3599 IEM_MC_LOCAL(uint32_t, fMxcsr);
3600 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3601 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3602 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3603 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3604
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3607 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3608
3609 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3610 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3611 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3612 IEM_MC_IF_MXCSR_XCPT_PENDING()
3613 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3614 IEM_MC_ELSE()
3615 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3616 IEM_MC_ENDIF();
3617
3618 IEM_MC_ADVANCE_RIP();
3619 IEM_MC_END();
3620 }
3621 else
3622 {
3623 /* XMM, [mem64] */
3624 IEM_MC_BEGIN(3, 4);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626 IEM_MC_LOCAL(uint32_t, fMxcsr);
3627 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3628 IEM_MC_LOCAL(int64_t, i64Src);
3629 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3630 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3631 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3632
3633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3636 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3637
3638 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3639 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3640 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3641 IEM_MC_IF_MXCSR_XCPT_PENDING()
3642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3643 IEM_MC_ELSE()
3644 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3645 IEM_MC_ENDIF();
3646
3647 IEM_MC_ADVANCE_RIP();
3648 IEM_MC_END();
3649 }
3650 }
3651 else
3652 {
3653 if (IEM_IS_MODRM_REG_MODE(bRm))
3654 {
3655 /* greg, XMM */
3656 IEM_MC_BEGIN(3, 4);
3657 IEM_MC_LOCAL(uint32_t, fMxcsr);
3658 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3659 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3660 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3661 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3662
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3665 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3666
3667 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3668 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3669 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3670 IEM_MC_IF_MXCSR_XCPT_PENDING()
3671 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3672 IEM_MC_ELSE()
3673 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3674 IEM_MC_ENDIF();
3675
3676 IEM_MC_ADVANCE_RIP();
3677 IEM_MC_END();
3678 }
3679 else
3680 {
3681 /* greg, [mem] */
3682 IEM_MC_BEGIN(3, 4);
3683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3684 IEM_MC_LOCAL(uint32_t, fMxcsr);
3685 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3686 IEM_MC_LOCAL(int32_t, i32Src);
3687 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3688 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3689 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3690
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3693 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3694 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3695
3696 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3697 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3698 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3699 IEM_MC_IF_MXCSR_XCPT_PENDING()
3700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3701 IEM_MC_ELSE()
3702 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3703 IEM_MC_ENDIF();
3704
3705 IEM_MC_ADVANCE_RIP();
3706 IEM_MC_END();
3707 }
3708 }
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3714FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3715{
3716 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3717
3718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3720 {
3721 if (IEM_IS_MODRM_REG_MODE(bRm))
3722 {
3723 /* XMM, greg64 */
3724 IEM_MC_BEGIN(3, 4);
3725 IEM_MC_LOCAL(uint32_t, fMxcsr);
3726 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3727 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3728 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3729 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3730
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3732 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3733 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3734
3735 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3736 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3737 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3738 IEM_MC_IF_MXCSR_XCPT_PENDING()
3739 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3740 IEM_MC_ELSE()
3741 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3742 IEM_MC_ENDIF();
3743
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 }
3747 else
3748 {
3749 /* XMM, [mem64] */
3750 IEM_MC_BEGIN(3, 4);
3751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3752 IEM_MC_LOCAL(uint32_t, fMxcsr);
3753 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3754 IEM_MC_LOCAL(int64_t, i64Src);
3755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3756 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3757 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3758
3759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3762 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3763
3764 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3765 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3766 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3767 IEM_MC_IF_MXCSR_XCPT_PENDING()
3768 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3769 IEM_MC_ELSE()
3770 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3771 IEM_MC_ENDIF();
3772
3773 IEM_MC_ADVANCE_RIP();
3774 IEM_MC_END();
3775 }
3776 }
3777 else
3778 {
3779 if (IEM_IS_MODRM_REG_MODE(bRm))
3780 {
3781 /* greg, XMM */
3782 IEM_MC_BEGIN(3, 4);
3783 IEM_MC_LOCAL(uint32_t, fMxcsr);
3784 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3785 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3786 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3787 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3788
3789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3790 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3791 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3792
3793 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3794 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3795 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3796 IEM_MC_IF_MXCSR_XCPT_PENDING()
3797 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3798 IEM_MC_ELSE()
3799 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3800 IEM_MC_ENDIF();
3801
3802 IEM_MC_ADVANCE_RIP();
3803 IEM_MC_END();
3804 }
3805 else
3806 {
3807 /* greg, [mem] */
3808 IEM_MC_BEGIN(3, 4);
3809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3810 IEM_MC_LOCAL(uint32_t, fMxcsr);
3811 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3812 IEM_MC_LOCAL(int32_t, i32Src);
3813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3814 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3815 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3816
3817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3820 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3821
3822 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3823 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3824 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3825 IEM_MC_IF_MXCSR_XCPT_PENDING()
3826 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3827 IEM_MC_ELSE()
3828 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3829 IEM_MC_ENDIF();
3830
3831 IEM_MC_ADVANCE_RIP();
3832 IEM_MC_END();
3833 }
3834 }
3835 return VINF_SUCCESS;
3836}
3837
3838
3839/**
3840 * @opcode 0x2b
3841 * @opcodesub !11 mr/reg
3842 * @oppfx none
3843 * @opcpuid sse
3844 * @opgroup og_sse1_cachect
3845 * @opxcpttype 1
3846 * @optest op1=1 op2=2 -> op1=2
3847 * @optest op1=0 op2=-42 -> op1=-42
3848 */
3849FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3850{
3851 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3853 if (IEM_IS_MODRM_MEM_MODE(bRm))
3854 {
3855 /*
3856 * memory, register.
3857 */
3858 IEM_MC_BEGIN(0, 2);
3859 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3861
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3866
3867 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3868 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3869
3870 IEM_MC_ADVANCE_RIP();
3871 IEM_MC_END();
3872 }
3873 /* The register, register encoding is invalid. */
3874 else
3875 return IEMOP_RAISE_INVALID_OPCODE();
3876 return VINF_SUCCESS;
3877}
3878
3879/**
3880 * @opcode 0x2b
3881 * @opcodesub !11 mr/reg
3882 * @oppfx 0x66
3883 * @opcpuid sse2
3884 * @opgroup og_sse2_cachect
3885 * @opxcpttype 1
3886 * @optest op1=1 op2=2 -> op1=2
3887 * @optest op1=0 op2=-42 -> op1=-42
3888 */
3889FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3890{
3891 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3893 if (IEM_IS_MODRM_MEM_MODE(bRm))
3894 {
3895 /*
3896 * memory, register.
3897 */
3898 IEM_MC_BEGIN(0, 2);
3899 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3901
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3904 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3905 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3906
3907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3908 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3909
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 }
3913 /* The register, register encoding is invalid. */
3914 else
3915 return IEMOP_RAISE_INVALID_OPCODE();
3916 return VINF_SUCCESS;
3917}
3918/* Opcode 0xf3 0x0f 0x2b - invalid */
3919/* Opcode 0xf2 0x0f 0x2b - invalid */
3920
3921
3922/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3923FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3924/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3925FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3926
3927
3928/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3929FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
3930{
3931 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3932
3933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3934 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3935 {
3936 if (IEM_IS_MODRM_REG_MODE(bRm))
3937 {
3938 /* greg64, XMM */
3939 IEM_MC_BEGIN(3, 4);
3940 IEM_MC_LOCAL(uint32_t, fMxcsr);
3941 IEM_MC_LOCAL(int64_t, i64Dst);
3942 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3943 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3944 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
3945
3946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3948 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3949
3950 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3951 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
3952 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3953 IEM_MC_IF_MXCSR_XCPT_PENDING()
3954 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3955 IEM_MC_ELSE()
3956 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
3957 IEM_MC_ENDIF();
3958
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 /* greg64, [mem64] */
3965 IEM_MC_BEGIN(3, 4);
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3967 IEM_MC_LOCAL(uint32_t, fMxcsr);
3968 IEM_MC_LOCAL(int64_t, i64Dst);
3969 IEM_MC_LOCAL(uint32_t, u32Src);
3970 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3971 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3972 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
3973
3974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3977 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3978
3979 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3980 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
3981 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3982 IEM_MC_IF_MXCSR_XCPT_PENDING()
3983 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3984 IEM_MC_ELSE()
3985 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
3986 IEM_MC_ENDIF();
3987
3988 IEM_MC_ADVANCE_RIP();
3989 IEM_MC_END();
3990 }
3991 }
3992 else
3993 {
3994 if (IEM_IS_MODRM_REG_MODE(bRm))
3995 {
3996 /* greg, XMM */
3997 IEM_MC_BEGIN(3, 4);
3998 IEM_MC_LOCAL(uint32_t, fMxcsr);
3999 IEM_MC_LOCAL(int32_t, i32Dst);
4000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4001 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4002 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4003
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4006 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4007
4008 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4009 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4010 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4011 IEM_MC_IF_MXCSR_XCPT_PENDING()
4012 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4013 IEM_MC_ELSE()
4014 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4015 IEM_MC_ENDIF();
4016
4017 IEM_MC_ADVANCE_RIP();
4018 IEM_MC_END();
4019 }
4020 else
4021 {
4022 /* greg, [mem] */
4023 IEM_MC_BEGIN(3, 4);
4024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4025 IEM_MC_LOCAL(uint32_t, fMxcsr);
4026 IEM_MC_LOCAL(int32_t, i32Dst);
4027 IEM_MC_LOCAL(uint32_t, u32Src);
4028 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4029 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4030 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4031
4032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4035 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4036
4037 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4038 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4039 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4040 IEM_MC_IF_MXCSR_XCPT_PENDING()
4041 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4042 IEM_MC_ELSE()
4043 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4044 IEM_MC_ENDIF();
4045
4046 IEM_MC_ADVANCE_RIP();
4047 IEM_MC_END();
4048 }
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4055FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4056{
4057 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4058
4059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4060 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4061 {
4062 if (IEM_IS_MODRM_REG_MODE(bRm))
4063 {
4064 /* greg64, XMM */
4065 IEM_MC_BEGIN(3, 4);
4066 IEM_MC_LOCAL(uint32_t, fMxcsr);
4067 IEM_MC_LOCAL(int64_t, i64Dst);
4068 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4069 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4070 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4071
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4074 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4075
4076 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4077 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4078 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4079 IEM_MC_IF_MXCSR_XCPT_PENDING()
4080 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4081 IEM_MC_ELSE()
4082 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4083 IEM_MC_ENDIF();
4084
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 }
4088 else
4089 {
4090 /* greg64, [mem64] */
4091 IEM_MC_BEGIN(3, 4);
4092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4093 IEM_MC_LOCAL(uint32_t, fMxcsr);
4094 IEM_MC_LOCAL(int64_t, i64Dst);
4095 IEM_MC_LOCAL(uint64_t, u64Src);
4096 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4097 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4098 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4099
4100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4103 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4104
4105 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4106 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4107 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4108 IEM_MC_IF_MXCSR_XCPT_PENDING()
4109 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4110 IEM_MC_ELSE()
4111 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4112 IEM_MC_ENDIF();
4113
4114 IEM_MC_ADVANCE_RIP();
4115 IEM_MC_END();
4116 }
4117 }
4118 else
4119 {
4120 if (IEM_IS_MODRM_REG_MODE(bRm))
4121 {
4122 /* greg, XMM */
4123 IEM_MC_BEGIN(3, 4);
4124 IEM_MC_LOCAL(uint32_t, fMxcsr);
4125 IEM_MC_LOCAL(int32_t, i32Dst);
4126 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4127 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4128 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4129
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4132 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4133
4134 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4135 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4136 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4137 IEM_MC_IF_MXCSR_XCPT_PENDING()
4138 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4139 IEM_MC_ELSE()
4140 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4141 IEM_MC_ENDIF();
4142
4143 IEM_MC_ADVANCE_RIP();
4144 IEM_MC_END();
4145 }
4146 else
4147 {
4148 /* greg, [mem] */
4149 IEM_MC_BEGIN(3, 4);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, fMxcsr);
4152 IEM_MC_LOCAL(int32_t, i32Dst);
4153 IEM_MC_LOCAL(uint64_t, u64Src);
4154 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4155 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4156 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4157
4158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4160 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4161 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4162
4163 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4164 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4165 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4166 IEM_MC_IF_MXCSR_XCPT_PENDING()
4167 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4168 IEM_MC_ELSE()
4169 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4170 IEM_MC_ENDIF();
4171
4172 IEM_MC_ADVANCE_RIP();
4173 IEM_MC_END();
4174 }
4175 }
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4181FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
4182/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4183FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
4184
4185
4186/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4187FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4188{
4189 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4190
4191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4193 {
4194 if (IEM_IS_MODRM_REG_MODE(bRm))
4195 {
4196 /* greg64, XMM */
4197 IEM_MC_BEGIN(3, 4);
4198 IEM_MC_LOCAL(uint32_t, fMxcsr);
4199 IEM_MC_LOCAL(int64_t, i64Dst);
4200 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4201 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4202 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4203
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4206 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4207
4208 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4209 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4210 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4211 IEM_MC_IF_MXCSR_XCPT_PENDING()
4212 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4213 IEM_MC_ELSE()
4214 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4215 IEM_MC_ENDIF();
4216
4217 IEM_MC_ADVANCE_RIP();
4218 IEM_MC_END();
4219 }
4220 else
4221 {
4222 /* greg64, [mem64] */
4223 IEM_MC_BEGIN(3, 4);
4224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4225 IEM_MC_LOCAL(uint32_t, fMxcsr);
4226 IEM_MC_LOCAL(int64_t, i64Dst);
4227 IEM_MC_LOCAL(uint32_t, u32Src);
4228 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4229 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4230 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4231
4232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4234 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4235 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4236
4237 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4238 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4239 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4240 IEM_MC_IF_MXCSR_XCPT_PENDING()
4241 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4242 IEM_MC_ELSE()
4243 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4244 IEM_MC_ENDIF();
4245
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 }
4250 else
4251 {
4252 if (IEM_IS_MODRM_REG_MODE(bRm))
4253 {
4254 /* greg, XMM */
4255 IEM_MC_BEGIN(3, 4);
4256 IEM_MC_LOCAL(uint32_t, fMxcsr);
4257 IEM_MC_LOCAL(int32_t, i32Dst);
4258 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4259 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4260 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4261
4262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4264 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4265
4266 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4267 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4268 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4269 IEM_MC_IF_MXCSR_XCPT_PENDING()
4270 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4271 IEM_MC_ELSE()
4272 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4273 IEM_MC_ENDIF();
4274
4275 IEM_MC_ADVANCE_RIP();
4276 IEM_MC_END();
4277 }
4278 else
4279 {
4280 /* greg, [mem] */
4281 IEM_MC_BEGIN(3, 4);
4282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4283 IEM_MC_LOCAL(uint32_t, fMxcsr);
4284 IEM_MC_LOCAL(int32_t, i32Dst);
4285 IEM_MC_LOCAL(uint32_t, u32Src);
4286 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4287 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4288 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4289
4290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4293 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4294
4295 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4296 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4297 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4298 IEM_MC_IF_MXCSR_XCPT_PENDING()
4299 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4300 IEM_MC_ELSE()
4301 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4302 IEM_MC_ENDIF();
4303
4304 IEM_MC_ADVANCE_RIP();
4305 IEM_MC_END();
4306 }
4307 }
4308 return VINF_SUCCESS;
4309}
4310
4311
4312/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4313FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4314{
4315 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4316
4317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4318 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4319 {
4320 if (IEM_IS_MODRM_REG_MODE(bRm))
4321 {
4322 /* greg64, XMM */
4323 IEM_MC_BEGIN(3, 4);
4324 IEM_MC_LOCAL(uint32_t, fMxcsr);
4325 IEM_MC_LOCAL(int64_t, i64Dst);
4326 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4327 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4328 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4329
4330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4331 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4332 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4333
4334 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4335 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4336 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4337 IEM_MC_IF_MXCSR_XCPT_PENDING()
4338 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4339 IEM_MC_ELSE()
4340 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4341 IEM_MC_ENDIF();
4342
4343 IEM_MC_ADVANCE_RIP();
4344 IEM_MC_END();
4345 }
4346 else
4347 {
4348 /* greg64, [mem64] */
4349 IEM_MC_BEGIN(3, 4);
4350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4351 IEM_MC_LOCAL(uint32_t, fMxcsr);
4352 IEM_MC_LOCAL(int64_t, i64Dst);
4353 IEM_MC_LOCAL(uint64_t, u64Src);
4354 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4355 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4356 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4357
4358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4361 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4362
4363 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4364 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4365 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4366 IEM_MC_IF_MXCSR_XCPT_PENDING()
4367 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4368 IEM_MC_ELSE()
4369 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4370 IEM_MC_ENDIF();
4371
4372 IEM_MC_ADVANCE_RIP();
4373 IEM_MC_END();
4374 }
4375 }
4376 else
4377 {
4378 if (IEM_IS_MODRM_REG_MODE(bRm))
4379 {
4380 /* greg, XMM */
4381 IEM_MC_BEGIN(3, 4);
4382 IEM_MC_LOCAL(uint32_t, fMxcsr);
4383 IEM_MC_LOCAL(int32_t, i32Dst);
4384 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4385 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4386 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4387
4388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4390 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4391
4392 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4393 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4394 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4395 IEM_MC_IF_MXCSR_XCPT_PENDING()
4396 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4397 IEM_MC_ELSE()
4398 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4399 IEM_MC_ENDIF();
4400
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 }
4404 else
4405 {
4406 /* greg, [mem] */
4407 IEM_MC_BEGIN(3, 4);
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4409 IEM_MC_LOCAL(uint32_t, fMxcsr);
4410 IEM_MC_LOCAL(int32_t, i32Dst);
4411 IEM_MC_LOCAL(uint64_t, u64Src);
4412 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4413 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4414 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4415
4416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4419 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4420
4421 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4422 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4423 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4424 IEM_MC_IF_MXCSR_XCPT_PENDING()
4425 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4426 IEM_MC_ELSE()
4427 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4428 IEM_MC_ENDIF();
4429
4430 IEM_MC_ADVANCE_RIP();
4431 IEM_MC_END();
4432 }
4433 }
4434 return VINF_SUCCESS;
4435}
4436
4437
4438/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4439FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4440{
4441 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4443 if (IEM_IS_MODRM_REG_MODE(bRm))
4444 {
4445 /*
4446 * Register, register.
4447 */
4448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4449 IEM_MC_BEGIN(4, 1);
4450 IEM_MC_LOCAL(uint32_t, fEFlags);
4451 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4452 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4454 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4455 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4456 IEM_MC_PREPARE_SSE_USAGE();
4457 IEM_MC_FETCH_EFLAGS(fEFlags);
4458 IEM_MC_REF_MXCSR(pfMxcsr);
4459 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4460 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4462 IEM_MC_IF_MXCSR_XCPT_PENDING()
4463 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4464 IEM_MC_ELSE()
4465 IEM_MC_COMMIT_EFLAGS(fEFlags);
4466 IEM_MC_ENDIF();
4467
4468 IEM_MC_ADVANCE_RIP();
4469 IEM_MC_END();
4470 }
4471 else
4472 {
4473 /*
4474 * Register, memory.
4475 */
4476 IEM_MC_BEGIN(4, 3);
4477 IEM_MC_LOCAL(uint32_t, fEFlags);
4478 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4479 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4480 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4481 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4482 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4484
4485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4488 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4489
4490 IEM_MC_PREPARE_SSE_USAGE();
4491 IEM_MC_REF_MXCSR(pfMxcsr);
4492 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4493 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4494 IEM_MC_IF_MXCSR_XCPT_PENDING()
4495 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4496 IEM_MC_ELSE()
4497 IEM_MC_COMMIT_EFLAGS(fEFlags);
4498 IEM_MC_ENDIF();
4499
4500 IEM_MC_ADVANCE_RIP();
4501 IEM_MC_END();
4502 }
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4508FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4509{
4510 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4512 if (IEM_IS_MODRM_REG_MODE(bRm))
4513 {
4514 /*
4515 * Register, register.
4516 */
4517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4518 IEM_MC_BEGIN(4, 1);
4519 IEM_MC_LOCAL(uint32_t, fEFlags);
4520 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4521 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4522 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4523 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4525 IEM_MC_PREPARE_SSE_USAGE();
4526 IEM_MC_FETCH_EFLAGS(fEFlags);
4527 IEM_MC_REF_MXCSR(pfMxcsr);
4528 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4529 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4530 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4531 IEM_MC_IF_MXCSR_XCPT_PENDING()
4532 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4533 IEM_MC_ELSE()
4534 IEM_MC_COMMIT_EFLAGS(fEFlags);
4535 IEM_MC_ENDIF();
4536
4537 IEM_MC_ADVANCE_RIP();
4538 IEM_MC_END();
4539 }
4540 else
4541 {
4542 /*
4543 * Register, memory.
4544 */
4545 IEM_MC_BEGIN(4, 3);
4546 IEM_MC_LOCAL(uint32_t, fEFlags);
4547 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4548 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4549 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4550 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4551 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4553
4554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4556 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4557 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4558
4559 IEM_MC_PREPARE_SSE_USAGE();
4560 IEM_MC_REF_MXCSR(pfMxcsr);
4561 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4562 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4563 IEM_MC_IF_MXCSR_XCPT_PENDING()
4564 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4565 IEM_MC_ELSE()
4566 IEM_MC_COMMIT_EFLAGS(fEFlags);
4567 IEM_MC_ENDIF();
4568
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575
4576/* Opcode 0xf3 0x0f 0x2e - invalid */
4577/* Opcode 0xf2 0x0f 0x2e - invalid */
4578
4579
4580/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4581FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4582{
4583 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4585 if (IEM_IS_MODRM_REG_MODE(bRm))
4586 {
4587 /*
4588 * Register, register.
4589 */
4590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4591 IEM_MC_BEGIN(4, 1);
4592 IEM_MC_LOCAL(uint32_t, fEFlags);
4593 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4594 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4595 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4596 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4597 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4598 IEM_MC_PREPARE_SSE_USAGE();
4599 IEM_MC_FETCH_EFLAGS(fEFlags);
4600 IEM_MC_REF_MXCSR(pfMxcsr);
4601 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4602 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4603 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4604 IEM_MC_IF_MXCSR_XCPT_PENDING()
4605 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4606 IEM_MC_ELSE()
4607 IEM_MC_COMMIT_EFLAGS(fEFlags);
4608 IEM_MC_ENDIF();
4609
4610 IEM_MC_ADVANCE_RIP();
4611 IEM_MC_END();
4612 }
4613 else
4614 {
4615 /*
4616 * Register, memory.
4617 */
4618 IEM_MC_BEGIN(4, 3);
4619 IEM_MC_LOCAL(uint32_t, fEFlags);
4620 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4621 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4622 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4623 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4624 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4626
4627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4630 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4631
4632 IEM_MC_PREPARE_SSE_USAGE();
4633 IEM_MC_REF_MXCSR(pfMxcsr);
4634 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4635 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4636 IEM_MC_IF_MXCSR_XCPT_PENDING()
4637 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4638 IEM_MC_ELSE()
4639 IEM_MC_COMMIT_EFLAGS(fEFlags);
4640 IEM_MC_ENDIF();
4641
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 }
4645 return VINF_SUCCESS;
4646}
4647
4648
4649/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
4650FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4651{
4652 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4654 if (IEM_IS_MODRM_REG_MODE(bRm))
4655 {
4656 /*
4657 * Register, register.
4658 */
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660 IEM_MC_BEGIN(4, 1);
4661 IEM_MC_LOCAL(uint32_t, fEFlags);
4662 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4663 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4664 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4665 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4666 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4667 IEM_MC_PREPARE_SSE_USAGE();
4668 IEM_MC_FETCH_EFLAGS(fEFlags);
4669 IEM_MC_REF_MXCSR(pfMxcsr);
4670 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4671 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4672 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4673 IEM_MC_IF_MXCSR_XCPT_PENDING()
4674 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4675 IEM_MC_ELSE()
4676 IEM_MC_COMMIT_EFLAGS(fEFlags);
4677 IEM_MC_ENDIF();
4678
4679 IEM_MC_ADVANCE_RIP();
4680 IEM_MC_END();
4681 }
4682 else
4683 {
4684 /*
4685 * Register, memory.
4686 */
4687 IEM_MC_BEGIN(4, 3);
4688 IEM_MC_LOCAL(uint32_t, fEFlags);
4689 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4690 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4691 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4692 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4693 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4695
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4699 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4700
4701 IEM_MC_PREPARE_SSE_USAGE();
4702 IEM_MC_REF_MXCSR(pfMxcsr);
4703 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4704 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4705 IEM_MC_IF_MXCSR_XCPT_PENDING()
4706 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4707 IEM_MC_ELSE()
4708 IEM_MC_COMMIT_EFLAGS(fEFlags);
4709 IEM_MC_ENDIF();
4710
4711 IEM_MC_ADVANCE_RIP();
4712 IEM_MC_END();
4713 }
4714 return VINF_SUCCESS;
4715}
4716
4717
4718/* Opcode 0xf3 0x0f 0x2f - invalid */
4719/* Opcode 0xf2 0x0f 0x2f - invalid */
4720
4721/** Opcode 0x0f 0x30. */
4722FNIEMOP_DEF(iemOp_wrmsr)
4723{
4724 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4726 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
4727}
4728
4729
4730/** Opcode 0x0f 0x31. */
4731FNIEMOP_DEF(iemOp_rdtsc)
4732{
4733 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4735 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
4736}
4737
4738
4739/** Opcode 0x0f 0x33. */
4740FNIEMOP_DEF(iemOp_rdmsr)
4741{
4742 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
4745}
4746
4747
4748/** Opcode 0x0f 0x34. */
4749FNIEMOP_DEF(iemOp_rdpmc)
4750{
4751 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
4754}
4755
4756
4757/** Opcode 0x0f 0x34. */
4758FNIEMOP_DEF(iemOp_sysenter)
4759{
4760 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
4763}
4764
4765/** Opcode 0x0f 0x35. */
4766FNIEMOP_DEF(iemOp_sysexit)
4767{
4768 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4771}
4772
4773/** Opcode 0x0f 0x37. */
4774FNIEMOP_STUB(iemOp_getsec);
4775
4776
4777/** Opcode 0x0f 0x38. */
4778FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4779{
4780#ifdef IEM_WITH_THREE_0F_38
4781 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4782 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4783#else
4784 IEMOP_BITCH_ABOUT_STUB();
4785 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4786#endif
4787}
4788
4789
4790/** Opcode 0x0f 0x3a. */
4791FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4792{
4793#ifdef IEM_WITH_THREE_0F_3A
4794 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4795 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4796#else
4797 IEMOP_BITCH_ABOUT_STUB();
4798 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4799#endif
4800}
4801
4802
4803/**
4804 * Implements a conditional move.
4805 *
4806 * Wish there was an obvious way to do this where we could share and reduce
4807 * code bloat.
4808 *
4809 * @param a_Cnd The conditional "microcode" operation.
4810 */
4811#define CMOV_X(a_Cnd) \
4812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4813 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4814 { \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(0, 1); \
4819 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4820 a_Cnd { \
4821 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4822 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4823 } IEM_MC_ENDIF(); \
4824 IEM_MC_ADVANCE_RIP(); \
4825 IEM_MC_END(); \
4826 return VINF_SUCCESS; \
4827 \
4828 case IEMMODE_32BIT: \
4829 IEM_MC_BEGIN(0, 1); \
4830 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4831 a_Cnd { \
4832 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4833 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4834 } IEM_MC_ELSE() { \
4835 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4836 } IEM_MC_ENDIF(); \
4837 IEM_MC_ADVANCE_RIP(); \
4838 IEM_MC_END(); \
4839 return VINF_SUCCESS; \
4840 \
4841 case IEMMODE_64BIT: \
4842 IEM_MC_BEGIN(0, 1); \
4843 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4844 a_Cnd { \
4845 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4846 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4847 } IEM_MC_ENDIF(); \
4848 IEM_MC_ADVANCE_RIP(); \
4849 IEM_MC_END(); \
4850 return VINF_SUCCESS; \
4851 \
4852 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4853 } \
4854 } \
4855 else \
4856 { \
4857 switch (pVCpu->iem.s.enmEffOpSize) \
4858 { \
4859 case IEMMODE_16BIT: \
4860 IEM_MC_BEGIN(0, 2); \
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4862 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4864 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4865 a_Cnd { \
4866 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4867 } IEM_MC_ENDIF(); \
4868 IEM_MC_ADVANCE_RIP(); \
4869 IEM_MC_END(); \
4870 return VINF_SUCCESS; \
4871 \
4872 case IEMMODE_32BIT: \
4873 IEM_MC_BEGIN(0, 2); \
4874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4875 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4877 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4878 a_Cnd { \
4879 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4880 } IEM_MC_ELSE() { \
4881 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4882 } IEM_MC_ENDIF(); \
4883 IEM_MC_ADVANCE_RIP(); \
4884 IEM_MC_END(); \
4885 return VINF_SUCCESS; \
4886 \
4887 case IEMMODE_64BIT: \
4888 IEM_MC_BEGIN(0, 2); \
4889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4890 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4892 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4893 a_Cnd { \
4894 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4895 } IEM_MC_ENDIF(); \
4896 IEM_MC_ADVANCE_RIP(); \
4897 IEM_MC_END(); \
4898 return VINF_SUCCESS; \
4899 \
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4901 } \
4902 } do {} while (0)
4903
4904
4905
4906/** Opcode 0x0f 0x40. */
4907FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4908{
4909 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4910 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4911}
4912
4913
4914/** Opcode 0x0f 0x41. */
4915FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4916{
4917 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4918 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4919}
4920
4921
4922/** Opcode 0x0f 0x42. */
4923FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
4924{
4925 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
4926 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
4927}
4928
4929
4930/** Opcode 0x0f 0x43. */
4931FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
4932{
4933 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
4934 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
4935}
4936
4937
4938/** Opcode 0x0f 0x44. */
4939FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
4940{
4941 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
4942 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
4943}
4944
4945
4946/** Opcode 0x0f 0x45. */
4947FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
4948{
4949 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
4950 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
4951}
4952
4953
4954/** Opcode 0x0f 0x46. */
4955FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
4956{
4957 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
4958 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4959}
4960
4961
4962/** Opcode 0x0f 0x47. */
4963FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
4964{
4965 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
4966 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4967}
4968
4969
4970/** Opcode 0x0f 0x48. */
4971FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
4972{
4973 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
4974 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
4975}
4976
4977
4978/** Opcode 0x0f 0x49. */
4979FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
4980{
4981 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
4982 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
4983}
4984
4985
4986/** Opcode 0x0f 0x4a. */
4987FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
4988{
4989 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
4990 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
4991}
4992
4993
4994/** Opcode 0x0f 0x4b. */
4995FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
4996{
4997 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
4998 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
4999}
5000
5001
5002/** Opcode 0x0f 0x4c. */
5003FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5004{
5005 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5006 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5007}
5008
5009
5010/** Opcode 0x0f 0x4d. */
5011FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5012{
5013 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5014 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5015}
5016
5017
5018/** Opcode 0x0f 0x4e. */
5019FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5020{
5021 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5022 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5023}
5024
5025
5026/** Opcode 0x0f 0x4f. */
5027FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5028{
5029 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5030 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5031}
5032
5033#undef CMOV_X
5034
5035/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5036FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5037{
5038 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5040 if (IEM_IS_MODRM_REG_MODE(bRm))
5041 {
5042 /*
5043 * Register, register.
5044 */
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046 IEM_MC_BEGIN(2, 1);
5047 IEM_MC_LOCAL(uint8_t, u8Dst);
5048 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5050 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5051 IEM_MC_PREPARE_SSE_USAGE();
5052 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5053 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5054 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 return VINF_SUCCESS;
5058 }
5059
5060 /* No memory operand. */
5061 return IEMOP_RAISE_INVALID_OPCODE();
5062}
5063
5064
5065/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5066FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5067{
5068 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5070 if (IEM_IS_MODRM_REG_MODE(bRm))
5071 {
5072 /*
5073 * Register, register.
5074 */
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076 IEM_MC_BEGIN(2, 1);
5077 IEM_MC_LOCAL(uint8_t, u8Dst);
5078 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5079 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5080 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5081 IEM_MC_PREPARE_SSE_USAGE();
5082 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5083 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5084 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5085 IEM_MC_ADVANCE_RIP();
5086 IEM_MC_END();
5087 return VINF_SUCCESS;
5088 }
5089
5090 /* No memory operand. */
5091 return IEMOP_RAISE_INVALID_OPCODE();
5092
5093}
5094
5095
5096/* Opcode 0xf3 0x0f 0x50 - invalid */
5097/* Opcode 0xf2 0x0f 0x50 - invalid */
5098
5099
5100/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5101FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5102{
5103 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5104 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5105}
5106
5107
5108/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5109FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5110{
5111 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5112 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5113}
5114
5115
5116/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5117FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5118{
5119 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5120 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5121}
5122
5123
5124/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5125FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5126{
5127 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5128 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5129}
5130
5131
5132/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5133FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5134/* Opcode 0x66 0x0f 0x52 - invalid */
5135/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5136FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5137/* Opcode 0xf2 0x0f 0x52 - invalid */
5138
5139/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5140FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5141/* Opcode 0x66 0x0f 0x53 - invalid */
5142/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5143FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5144/* Opcode 0xf2 0x0f 0x53 - invalid */
5145
5146
5147/** Opcode 0x0f 0x54 - andps Vps, Wps */
5148FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5149{
5150 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5151 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5152}
5153
5154
5155/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5156FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5157{
5158 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5159 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5160}
5161
5162
5163/* Opcode 0xf3 0x0f 0x54 - invalid */
5164/* Opcode 0xf2 0x0f 0x54 - invalid */
5165
5166
5167/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5168FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5169{
5170 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5171 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5172}
5173
5174
5175/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5176FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5177{
5178 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5179 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5180}
5181
5182
5183/* Opcode 0xf3 0x0f 0x55 - invalid */
5184/* Opcode 0xf2 0x0f 0x55 - invalid */
5185
5186
5187/** Opcode 0x0f 0x56 - orps Vps, Wps */
5188FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5189{
5190 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5191 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5192}
5193
5194
5195/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5196FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5197{
5198 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5199 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5200}
5201
5202
5203/* Opcode 0xf3 0x0f 0x56 - invalid */
5204/* Opcode 0xf2 0x0f 0x56 - invalid */
5205
5206
5207/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5208FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5209{
5210 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5211 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5212}
5213
5214
5215/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5216FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5217{
5218 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5219 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5220}
5221
5222
5223/* Opcode 0xf3 0x0f 0x57 - invalid */
5224/* Opcode 0xf2 0x0f 0x57 - invalid */
5225
5226/** Opcode 0x0f 0x58 - addps Vps, Wps */
5227FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5228{
5229 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5230 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5231}
5232
5233
5234/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5235FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5236{
5237 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5238 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5239}
5240
5241
5242/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5243FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5244{
5245 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5246 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5247}
5248
5249
5250/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5251FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5252{
5253 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5254 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5255}
5256
5257
5258/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5259FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5260{
5261 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5262 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5263}
5264
5265
5266/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5267FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5268{
5269 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5270 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5271}
5272
5273
5274/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5275FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5276{
5277 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5278 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5279}
5280
5281
5282/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5283FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5284{
5285 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5286 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5287}
5288
5289
5290/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5291FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5292{
5293 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5294 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5295}
5296
5297
5298/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5299FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5300{
5301 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5302 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5303}
5304
5305
5306/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5307FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5308{
5309 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5310 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5311}
5312
5313
5314/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5315FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5316{
5317 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5318 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5319}
5320
5321
5322/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5323FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5324{
5325 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5326 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5327}
5328
5329
5330/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5331FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5332{
5333 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5334 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5335}
5336
5337
5338/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5339FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5340{
5341 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5342 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5343}
5344
5345
5346/* Opcode 0xf2 0x0f 0x5b - invalid */
5347
5348
5349/** Opcode 0x0f 0x5c - subps Vps, Wps */
5350FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5351{
5352 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5353 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5354}
5355
5356
5357/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5358FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5359{
5360 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5361 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5362}
5363
5364
5365/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5366FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5367{
5368 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5369 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5370}
5371
5372
5373/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5374FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5375{
5376 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5377 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5378}
5379
5380
5381/** Opcode 0x0f 0x5d - minps Vps, Wps */
5382FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5383{
5384 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5385 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5386}
5387
5388
5389/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5390FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5391{
5392 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5393 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5394}
5395
5396
5397/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5398FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5399{
5400 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5401 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5402}
5403
5404
5405/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5406FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5407{
5408 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5409 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5410}
5411
5412
5413/** Opcode 0x0f 0x5e - divps Vps, Wps */
5414FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5415{
5416 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5417 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5418}
5419
5420
5421/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5422FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5423{
5424 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5425 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5426}
5427
5428
5429/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5430FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5431{
5432 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5433 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5434}
5435
5436
5437/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5438FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5439{
5440 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5441 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5442}
5443
5444
5445/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5446FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5447{
5448 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5449 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5450}
5451
5452
5453/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5454FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5455{
5456 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5457 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5458}
5459
5460
5461/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5462FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5463{
5464 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5465 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5466}
5467
5468
5469/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5470FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5471{
5472 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5473 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5474}
5475
5476
5477/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5478FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5479{
5480 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5481 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5482}
5483
5484
5485/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5486FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5487{
5488 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5489 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5490}
5491
5492
5493/* Opcode 0xf3 0x0f 0x60 - invalid */
5494
5495
5496/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5497FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5498{
5499 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5500 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5501 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5502}
5503
5504
5505/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5506FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5507{
5508 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5509 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5510}
5511
5512
5513/* Opcode 0xf3 0x0f 0x61 - invalid */
5514
5515
5516/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5517FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5518{
5519 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5520 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5521}
5522
5523
5524/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5525FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5526{
5527 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5528 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5529}
5530
5531
5532/* Opcode 0xf3 0x0f 0x62 - invalid */
5533
5534
5535
5536/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5537FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5538{
5539 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5540 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5541}
5542
5543
5544/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5545FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5546{
5547 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5548 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5549}
5550
5551
5552/* Opcode 0xf3 0x0f 0x63 - invalid */
5553
5554
5555/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5556FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5557{
5558 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5559 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5560}
5561
5562
5563/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5564FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5565{
5566 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5567 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5568}
5569
5570
5571/* Opcode 0xf3 0x0f 0x64 - invalid */
5572
5573
5574/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5575FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5576{
5577 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5578 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5579}
5580
5581
5582/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5583FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5584{
5585 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5586 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5587}
5588
5589
5590/* Opcode 0xf3 0x0f 0x65 - invalid */
5591
5592
5593/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5594FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5595{
5596 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5597 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5598}
5599
5600
5601/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5602FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5603{
5604 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5605 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5606}
5607
5608
5609/* Opcode 0xf3 0x0f 0x66 - invalid */
5610
5611
5612/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5613FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5614{
5615 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5616 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5617}
5618
5619
5620/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5621FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5622{
5623 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5624 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5625}
5626
5627
5628/* Opcode 0xf3 0x0f 0x67 - invalid */
5629
5630
5631/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5632 * @note Intel and AMD both uses Qd for the second parameter, however they
5633 * both list it as a mmX/mem64 operand and intel describes it as being
5634 * loaded as a qword, so it should be Qq, shouldn't it? */
5635FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5636{
5637 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5638 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5639}
5640
5641
5642/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5643FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5644{
5645 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5646 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5647}
5648
5649
5650/* Opcode 0xf3 0x0f 0x68 - invalid */
5651
5652
5653/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5654 * @note Intel and AMD both uses Qd for the second parameter, however they
5655 * both list it as a mmX/mem64 operand and intel describes it as being
5656 * loaded as a qword, so it should be Qq, shouldn't it? */
5657FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5658{
5659 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5660 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5661}
5662
5663
5664/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5665FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5666{
5667 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5668 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5669
5670}
5671
5672
5673/* Opcode 0xf3 0x0f 0x69 - invalid */
5674
5675
5676/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5677 * @note Intel and AMD both uses Qd for the second parameter, however they
5678 * both list it as a mmX/mem64 operand and intel describes it as being
5679 * loaded as a qword, so it should be Qq, shouldn't it? */
5680FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5681{
5682 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5683 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5684}
5685
5686
5687/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5688FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5689{
5690 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5691 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5692}
5693
5694
5695/* Opcode 0xf3 0x0f 0x6a - invalid */
5696
5697
5698/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5699FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5700{
5701 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5702 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5703}
5704
5705
5706/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5707FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5708{
5709 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5710 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5711}
5712
5713
5714/* Opcode 0xf3 0x0f 0x6b - invalid */
5715
5716
5717/* Opcode 0x0f 0x6c - invalid */
5718
5719
5720/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5721FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5722{
5723 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5724 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5725}
5726
5727
5728/* Opcode 0xf3 0x0f 0x6c - invalid */
5729/* Opcode 0xf2 0x0f 0x6c - invalid */
5730
5731
5732/* Opcode 0x0f 0x6d - invalid */
5733
5734
5735/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5736FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5737{
5738 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5739 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5740}
5741
5742
5743/* Opcode 0xf3 0x0f 0x6d - invalid */
5744
5745
5746FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5747{
5748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5749 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5750 {
5751 /**
5752 * @opcode 0x6e
5753 * @opcodesub rex.w=1
5754 * @oppfx none
5755 * @opcpuid mmx
5756 * @opgroup og_mmx_datamove
5757 * @opxcpttype 5
5758 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5759 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5760 */
5761 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5762 if (IEM_IS_MODRM_REG_MODE(bRm))
5763 {
5764 /* MMX, greg64 */
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766 IEM_MC_BEGIN(0, 1);
5767 IEM_MC_LOCAL(uint64_t, u64Tmp);
5768
5769 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5770 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5771
5772 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5773 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5774 IEM_MC_FPU_TO_MMX_MODE();
5775
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 }
5779 else
5780 {
5781 /* MMX, [mem64] */
5782 IEM_MC_BEGIN(0, 2);
5783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5784 IEM_MC_LOCAL(uint64_t, u64Tmp);
5785
5786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5788 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5789 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5790
5791 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5792 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5793 IEM_MC_FPU_TO_MMX_MODE();
5794
5795 IEM_MC_ADVANCE_RIP();
5796 IEM_MC_END();
5797 }
5798 }
5799 else
5800 {
5801 /**
5802 * @opdone
5803 * @opcode 0x6e
5804 * @opcodesub rex.w=0
5805 * @oppfx none
5806 * @opcpuid mmx
5807 * @opgroup og_mmx_datamove
5808 * @opxcpttype 5
5809 * @opfunction iemOp_movd_q_Pd_Ey
5810 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5811 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5812 */
5813 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5814 if (IEM_IS_MODRM_REG_MODE(bRm))
5815 {
5816 /* MMX, greg */
5817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5818 IEM_MC_BEGIN(0, 1);
5819 IEM_MC_LOCAL(uint64_t, u64Tmp);
5820
5821 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5822 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5823
5824 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5825 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5826 IEM_MC_FPU_TO_MMX_MODE();
5827
5828 IEM_MC_ADVANCE_RIP();
5829 IEM_MC_END();
5830 }
5831 else
5832 {
5833 /* MMX, [mem] */
5834 IEM_MC_BEGIN(0, 2);
5835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5836 IEM_MC_LOCAL(uint32_t, u32Tmp);
5837
5838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5840 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5841 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5842
5843 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5844 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5845 IEM_MC_FPU_TO_MMX_MODE();
5846
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 }
5850 }
5851 return VINF_SUCCESS;
5852}
5853
5854FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
5855{
5856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5857 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5858 {
5859 /**
5860 * @opcode 0x6e
5861 * @opcodesub rex.w=1
5862 * @oppfx 0x66
5863 * @opcpuid sse2
5864 * @opgroup og_sse2_simdint_datamove
5865 * @opxcpttype 5
5866 * @optest 64-bit / op1=1 op2=2 -> op1=2
5867 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5868 */
5869 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5870 if (IEM_IS_MODRM_REG_MODE(bRm))
5871 {
5872 /* XMM, greg64 */
5873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5874 IEM_MC_BEGIN(0, 1);
5875 IEM_MC_LOCAL(uint64_t, u64Tmp);
5876
5877 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5878 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5879
5880 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5881 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5882
5883 IEM_MC_ADVANCE_RIP();
5884 IEM_MC_END();
5885 }
5886 else
5887 {
5888 /* XMM, [mem64] */
5889 IEM_MC_BEGIN(0, 2);
5890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5891 IEM_MC_LOCAL(uint64_t, u64Tmp);
5892
5893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5896 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5897
5898 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5899 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5900
5901 IEM_MC_ADVANCE_RIP();
5902 IEM_MC_END();
5903 }
5904 }
5905 else
5906 {
5907 /**
5908 * @opdone
5909 * @opcode 0x6e
5910 * @opcodesub rex.w=0
5911 * @oppfx 0x66
5912 * @opcpuid sse2
5913 * @opgroup og_sse2_simdint_datamove
5914 * @opxcpttype 5
5915 * @opfunction iemOp_movd_q_Vy_Ey
5916 * @optest op1=1 op2=2 -> op1=2
5917 * @optest op1=0 op2=-42 -> op1=-42
5918 */
5919 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5920 if (IEM_IS_MODRM_REG_MODE(bRm))
5921 {
5922 /* XMM, greg32 */
5923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5924 IEM_MC_BEGIN(0, 1);
5925 IEM_MC_LOCAL(uint32_t, u32Tmp);
5926
5927 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5928 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5929
5930 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5931 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5932
5933 IEM_MC_ADVANCE_RIP();
5934 IEM_MC_END();
5935 }
5936 else
5937 {
5938 /* XMM, [mem32] */
5939 IEM_MC_BEGIN(0, 2);
5940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5941 IEM_MC_LOCAL(uint32_t, u32Tmp);
5942
5943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5947
5948 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5949 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5950
5951 IEM_MC_ADVANCE_RIP();
5952 IEM_MC_END();
5953 }
5954 }
5955 return VINF_SUCCESS;
5956}
5957
5958/* Opcode 0xf3 0x0f 0x6e - invalid */
5959
5960
5961/**
5962 * @opcode 0x6f
5963 * @oppfx none
5964 * @opcpuid mmx
5965 * @opgroup og_mmx_datamove
5966 * @opxcpttype 5
5967 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5968 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5969 */
5970FNIEMOP_DEF(iemOp_movq_Pq_Qq)
5971{
5972 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5974 if (IEM_IS_MODRM_REG_MODE(bRm))
5975 {
5976 /*
5977 * Register, register.
5978 */
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980 IEM_MC_BEGIN(0, 1);
5981 IEM_MC_LOCAL(uint64_t, u64Tmp);
5982
5983 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5984 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5985
5986 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
5987 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5988 IEM_MC_FPU_TO_MMX_MODE();
5989
5990 IEM_MC_ADVANCE_RIP();
5991 IEM_MC_END();
5992 }
5993 else
5994 {
5995 /*
5996 * Register, memory.
5997 */
5998 IEM_MC_BEGIN(0, 2);
5999 IEM_MC_LOCAL(uint64_t, u64Tmp);
6000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6001
6002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6004 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6005 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6006
6007 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6008 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6009 IEM_MC_FPU_TO_MMX_MODE();
6010
6011 IEM_MC_ADVANCE_RIP();
6012 IEM_MC_END();
6013 }
6014 return VINF_SUCCESS;
6015}
6016
6017/**
6018 * @opcode 0x6f
6019 * @oppfx 0x66
6020 * @opcpuid sse2
6021 * @opgroup og_sse2_simdint_datamove
6022 * @opxcpttype 1
6023 * @optest op1=1 op2=2 -> op1=2
6024 * @optest op1=0 op2=-42 -> op1=-42
6025 */
6026FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6027{
6028 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6030 if (IEM_IS_MODRM_REG_MODE(bRm))
6031 {
6032 /*
6033 * Register, register.
6034 */
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036 IEM_MC_BEGIN(0, 0);
6037
6038 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6039 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6040
6041 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6042 IEM_GET_MODRM_RM(pVCpu, bRm));
6043 IEM_MC_ADVANCE_RIP();
6044 IEM_MC_END();
6045 }
6046 else
6047 {
6048 /*
6049 * Register, memory.
6050 */
6051 IEM_MC_BEGIN(0, 2);
6052 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6054
6055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6058 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6059
6060 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6061 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6062
6063 IEM_MC_ADVANCE_RIP();
6064 IEM_MC_END();
6065 }
6066 return VINF_SUCCESS;
6067}
6068
6069/**
6070 * @opcode 0x6f
6071 * @oppfx 0xf3
6072 * @opcpuid sse2
6073 * @opgroup og_sse2_simdint_datamove
6074 * @opxcpttype 4UA
6075 * @optest op1=1 op2=2 -> op1=2
6076 * @optest op1=0 op2=-42 -> op1=-42
6077 */
6078FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6079{
6080 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6082 if (IEM_IS_MODRM_REG_MODE(bRm))
6083 {
6084 /*
6085 * Register, register.
6086 */
6087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6088 IEM_MC_BEGIN(0, 0);
6089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6091 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6092 IEM_GET_MODRM_RM(pVCpu, bRm));
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 }
6096 else
6097 {
6098 /*
6099 * Register, memory.
6100 */
6101 IEM_MC_BEGIN(0, 2);
6102 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6104
6105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6109 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6110 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6111
6112 IEM_MC_ADVANCE_RIP();
6113 IEM_MC_END();
6114 }
6115 return VINF_SUCCESS;
6116}
6117
6118
6119/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6120FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6121{
6122 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6124 if (IEM_IS_MODRM_REG_MODE(bRm))
6125 {
6126 /*
6127 * Register, register.
6128 */
6129 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6131
6132 IEM_MC_BEGIN(3, 0);
6133 IEM_MC_ARG(uint64_t *, pDst, 0);
6134 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6135 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6137 IEM_MC_PREPARE_FPU_USAGE();
6138 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6139 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6141 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6142 IEM_MC_FPU_TO_MMX_MODE();
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 }
6146 else
6147 {
6148 /*
6149 * Register, memory.
6150 */
6151 IEM_MC_BEGIN(3, 2);
6152 IEM_MC_ARG(uint64_t *, pDst, 0);
6153 IEM_MC_LOCAL(uint64_t, uSrc);
6154 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6156
6157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6158 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6159 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6161 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6162
6163 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6164 IEM_MC_PREPARE_FPU_USAGE();
6165 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6167 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6168 IEM_MC_FPU_TO_MMX_MODE();
6169
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 }
6173 return VINF_SUCCESS;
6174}
6175
6176
6177/**
6178 * Common worker for SSE2 instructions on the forms:
6179 * pshufd xmm1, xmm2/mem128, imm8
6180 * pshufhw xmm1, xmm2/mem128, imm8
6181 * pshuflw xmm1, xmm2/mem128, imm8
6182 *
6183 * Proper alignment of the 128-bit operand is enforced.
6184 * Exceptions type 4. SSE2 cpuid checks.
6185 */
6186FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6187{
6188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6189 if (IEM_IS_MODRM_REG_MODE(bRm))
6190 {
6191 /*
6192 * Register, register.
6193 */
6194 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196
6197 IEM_MC_BEGIN(3, 0);
6198 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6199 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6200 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6201 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6202 IEM_MC_PREPARE_SSE_USAGE();
6203 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6204 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6205 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6206 IEM_MC_ADVANCE_RIP();
6207 IEM_MC_END();
6208 }
6209 else
6210 {
6211 /*
6212 * Register, memory.
6213 */
6214 IEM_MC_BEGIN(3, 2);
6215 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6216 IEM_MC_LOCAL(RTUINT128U, uSrc);
6217 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6219
6220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6221 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6222 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6225
6226 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6227 IEM_MC_PREPARE_SSE_USAGE();
6228 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6229 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6230
6231 IEM_MC_ADVANCE_RIP();
6232 IEM_MC_END();
6233 }
6234 return VINF_SUCCESS;
6235}
6236
6237
6238/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6239FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6240{
6241 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6242 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6243}
6244
6245
6246/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6247FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6248{
6249 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6250 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6251}
6252
6253
6254/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6255FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6256{
6257 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6258 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6259}
6260
6261
6262/**
6263 * Common worker for MMX instructions of the form:
6264 * psrlw mm, imm8
6265 * psraw mm, imm8
6266 * psllw mm, imm8
6267 * psrld mm, imm8
6268 * psrad mm, imm8
6269 * pslld mm, imm8
6270 * psrlq mm, imm8
6271 * psllq mm, imm8
6272 *
6273 */
6274FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6275{
6276 if (IEM_IS_MODRM_REG_MODE(bRm))
6277 {
6278 /*
6279 * Register, immediate.
6280 */
6281 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6283
6284 IEM_MC_BEGIN(2, 0);
6285 IEM_MC_ARG(uint64_t *, pDst, 0);
6286 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6287 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6288 IEM_MC_PREPARE_FPU_USAGE();
6289 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6290 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6291 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6292 IEM_MC_FPU_TO_MMX_MODE();
6293 IEM_MC_ADVANCE_RIP();
6294 IEM_MC_END();
6295 }
6296 else
6297 {
6298 /*
6299 * Register, memory not supported.
6300 */
6301 /// @todo Caller already enforced register mode?!
6302 }
6303 return VINF_SUCCESS;
6304}
6305
6306
6307/**
6308 * Common worker for SSE2 instructions of the form:
6309 * psrlw xmm, imm8
6310 * psraw xmm, imm8
6311 * psllw xmm, imm8
6312 * psrld xmm, imm8
6313 * psrad xmm, imm8
6314 * pslld xmm, imm8
6315 * psrlq xmm, imm8
6316 * psllq xmm, imm8
6317 *
6318 */
6319FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6320{
6321 if (IEM_IS_MODRM_REG_MODE(bRm))
6322 {
6323 /*
6324 * Register, immediate.
6325 */
6326 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6328
6329 IEM_MC_BEGIN(2, 0);
6330 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6331 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6333 IEM_MC_PREPARE_SSE_USAGE();
6334 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6335 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6336 IEM_MC_ADVANCE_RIP();
6337 IEM_MC_END();
6338 }
6339 else
6340 {
6341 /*
6342 * Register, memory.
6343 */
6344 /// @todo Caller already enforced register mode?!
6345 }
6346 return VINF_SUCCESS;
6347}
6348
6349
6350/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6351FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6352{
6353// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6354 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6355}
6356
6357
6358/** Opcode 0x66 0x0f 0x71 11/2. */
6359FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6360{
6361// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6362 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6363}
6364
6365
6366/** Opcode 0x0f 0x71 11/4. */
6367FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6368{
6369// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6370 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6371}
6372
6373
6374/** Opcode 0x66 0x0f 0x71 11/4. */
6375FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6376{
6377// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6378 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6379}
6380
6381
6382/** Opcode 0x0f 0x71 11/6. */
6383FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6384{
6385// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6386 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6387}
6388
6389
6390/** Opcode 0x66 0x0f 0x71 11/6. */
6391FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6392{
6393// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6394 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6395}
6396
6397
6398/**
6399 * Group 12 jump table for register variant.
6400 */
6401IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6402{
6403 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6404 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6405 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6406 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6407 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6408 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6409 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6410 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6411};
6412AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6413
6414
6415/** Opcode 0x0f 0x71. */
6416FNIEMOP_DEF(iemOp_Grp12)
6417{
6418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6419 if (IEM_IS_MODRM_REG_MODE(bRm))
6420 /* register, register */
6421 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6422 + pVCpu->iem.s.idxPrefix], bRm);
6423 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6424}
6425
6426
6427/** Opcode 0x0f 0x72 11/2. */
6428FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6429{
6430// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6431 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6432}
6433
6434
6435/** Opcode 0x66 0x0f 0x72 11/2. */
6436FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6437{
6438// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6439 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6440}
6441
6442
6443/** Opcode 0x0f 0x72 11/4. */
6444FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6445{
6446// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6447 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6448}
6449
6450
6451/** Opcode 0x66 0x0f 0x72 11/4. */
6452FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6453{
6454// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6455 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6456}
6457
6458
6459/** Opcode 0x0f 0x72 11/6. */
6460FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6461{
6462// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6463 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6464}
6465
6466/** Opcode 0x66 0x0f 0x72 11/6. */
6467FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6468{
6469// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6470 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6471}
6472
6473
6474/**
6475 * Group 13 jump table for register variant.
6476 */
6477IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6478{
6479 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6480 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6481 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6482 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6483 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6484 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6485 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6486 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6487};
6488AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6489
6490/** Opcode 0x0f 0x72. */
6491FNIEMOP_DEF(iemOp_Grp13)
6492{
6493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6494 if (IEM_IS_MODRM_REG_MODE(bRm))
6495 /* register, register */
6496 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6497 + pVCpu->iem.s.idxPrefix], bRm);
6498 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6499}
6500
6501
6502/** Opcode 0x0f 0x73 11/2. */
6503FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6504{
6505// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6506 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6507}
6508
6509
6510/** Opcode 0x66 0x0f 0x73 11/2. */
6511FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6512{
6513// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6514 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6515}
6516
6517
6518/** Opcode 0x66 0x0f 0x73 11/3. */
6519FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6520{
6521// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6522 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6523}
6524
6525
6526/** Opcode 0x0f 0x73 11/6. */
6527FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6528{
6529// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6530 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6531}
6532
6533
6534/** Opcode 0x66 0x0f 0x73 11/6. */
6535FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6536{
6537// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6538 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6539}
6540
6541
6542/** Opcode 0x66 0x0f 0x73 11/7. */
6543FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6544{
6545// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6546 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6547}
6548
6549/**
6550 * Group 14 jump table for register variant.
6551 */
6552IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6553{
6554 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6555 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6556 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6557 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6558 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6559 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6560 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6561 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6562};
6563AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6564
6565
6566/** Opcode 0x0f 0x73. */
6567FNIEMOP_DEF(iemOp_Grp14)
6568{
6569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6570 if (IEM_IS_MODRM_REG_MODE(bRm))
6571 /* register, register */
6572 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6573 + pVCpu->iem.s.idxPrefix], bRm);
6574 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6575}
6576
6577
6578/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6579FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6580{
6581 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6582 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6583}
6584
6585
6586/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6587FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6588{
6589 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6590 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6591}
6592
6593
6594/* Opcode 0xf3 0x0f 0x74 - invalid */
6595/* Opcode 0xf2 0x0f 0x74 - invalid */
6596
6597
6598/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6599FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6600{
6601 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6602 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6603}
6604
6605
6606/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6607FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6608{
6609 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6610 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6611}
6612
6613
6614/* Opcode 0xf3 0x0f 0x75 - invalid */
6615/* Opcode 0xf2 0x0f 0x75 - invalid */
6616
6617
6618/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6619FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6620{
6621 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6622 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6623}
6624
6625
6626/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6627FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6628{
6629 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6630 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6631}
6632
6633
6634/* Opcode 0xf3 0x0f 0x76 - invalid */
6635/* Opcode 0xf2 0x0f 0x76 - invalid */
6636
6637
6638/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6639FNIEMOP_DEF(iemOp_emms)
6640{
6641 IEMOP_MNEMONIC(emms, "emms");
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643
6644 IEM_MC_BEGIN(0,0);
6645 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6646 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6647 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6648 IEM_MC_FPU_FROM_MMX_MODE();
6649 IEM_MC_ADVANCE_RIP();
6650 IEM_MC_END();
6651 return VINF_SUCCESS;
6652}
6653
6654/* Opcode 0x66 0x0f 0x77 - invalid */
6655/* Opcode 0xf3 0x0f 0x77 - invalid */
6656/* Opcode 0xf2 0x0f 0x77 - invalid */
6657
6658/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6659#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6660FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6661{
6662 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6663 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6664 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6665 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6666
6667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6668 if (IEM_IS_MODRM_REG_MODE(bRm))
6669 {
6670 /*
6671 * Register, register.
6672 */
6673 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6674 if (enmEffOpSize == IEMMODE_64BIT)
6675 {
6676 IEM_MC_BEGIN(2, 0);
6677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6678 IEM_MC_ARG(uint64_t, u64Enc, 1);
6679 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6680 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6681 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6682 IEM_MC_END();
6683 }
6684 else
6685 {
6686 IEM_MC_BEGIN(2, 0);
6687 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6688 IEM_MC_ARG(uint32_t, u32Enc, 1);
6689 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6690 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6691 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
6692 IEM_MC_END();
6693 }
6694 }
6695 else
6696 {
6697 /*
6698 * Memory, register.
6699 */
6700 if (enmEffOpSize == IEMMODE_64BIT)
6701 {
6702 IEM_MC_BEGIN(3, 0);
6703 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6704 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6705 IEM_MC_ARG(uint64_t, u64Enc, 2);
6706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6707 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6708 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6709 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6710 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6711 IEM_MC_END();
6712 }
6713 else
6714 {
6715 IEM_MC_BEGIN(3, 0);
6716 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6717 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6718 IEM_MC_ARG(uint32_t, u32Enc, 2);
6719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6720 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6721 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6722 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6723 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6724 IEM_MC_END();
6725 }
6726 }
6727 return VINF_SUCCESS;
6728}
6729#else
6730FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
6731#endif
6732
6733/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6734FNIEMOP_STUB(iemOp_AmdGrp17);
6735/* Opcode 0xf3 0x0f 0x78 - invalid */
6736/* Opcode 0xf2 0x0f 0x78 - invalid */
6737
6738/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6739#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6740FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6741{
6742 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6743 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6744 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6745 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6746
6747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6748 if (IEM_IS_MODRM_REG_MODE(bRm))
6749 {
6750 /*
6751 * Register, register.
6752 */
6753 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6754 if (enmEffOpSize == IEMMODE_64BIT)
6755 {
6756 IEM_MC_BEGIN(2, 0);
6757 IEM_MC_ARG(uint64_t, u64Val, 0);
6758 IEM_MC_ARG(uint64_t, u64Enc, 1);
6759 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6760 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6761 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
6762 IEM_MC_END();
6763 }
6764 else
6765 {
6766 IEM_MC_BEGIN(2, 0);
6767 IEM_MC_ARG(uint32_t, u32Val, 0);
6768 IEM_MC_ARG(uint32_t, u32Enc, 1);
6769 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6770 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6771 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
6772 IEM_MC_END();
6773 }
6774 }
6775 else
6776 {
6777 /*
6778 * Register, memory.
6779 */
6780 if (enmEffOpSize == IEMMODE_64BIT)
6781 {
6782 IEM_MC_BEGIN(3, 0);
6783 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6784 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6785 IEM_MC_ARG(uint64_t, u64Enc, 2);
6786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6787 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6788 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6790 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
6791 IEM_MC_END();
6792 }
6793 else
6794 {
6795 IEM_MC_BEGIN(3, 0);
6796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6797 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6798 IEM_MC_ARG(uint32_t, u32Enc, 2);
6799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6800 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6801 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6802 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6803 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
6804 IEM_MC_END();
6805 }
6806 }
6807 return VINF_SUCCESS;
6808}
6809#else
6810FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
6811#endif
6812/* Opcode 0x66 0x0f 0x79 - invalid */
6813/* Opcode 0xf3 0x0f 0x79 - invalid */
6814/* Opcode 0xf2 0x0f 0x79 - invalid */
6815
6816/* Opcode 0x0f 0x7a - invalid */
6817/* Opcode 0x66 0x0f 0x7a - invalid */
6818/* Opcode 0xf3 0x0f 0x7a - invalid */
6819/* Opcode 0xf2 0x0f 0x7a - invalid */
6820
6821/* Opcode 0x0f 0x7b - invalid */
6822/* Opcode 0x66 0x0f 0x7b - invalid */
6823/* Opcode 0xf3 0x0f 0x7b - invalid */
6824/* Opcode 0xf2 0x0f 0x7b - invalid */
6825
6826/* Opcode 0x0f 0x7c - invalid */
6827
6828
6829/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
6830FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
6831{
6832 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6833 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
6834}
6835
6836
6837/* Opcode 0xf3 0x0f 0x7c - invalid */
6838
6839
6840/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
6841FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
6842{
6843 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6844 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
6845}
6846
6847
6848/* Opcode 0x0f 0x7d - invalid */
6849
6850
6851/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
6852FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
6853{
6854 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6855 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
6856}
6857
6858
6859/* Opcode 0xf3 0x0f 0x7d - invalid */
6860
6861
6862/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
6863FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
6864{
6865 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6866 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
6867}
6868
6869
6870/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
6871FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
6872{
6873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6875 {
6876 /**
6877 * @opcode 0x7e
6878 * @opcodesub rex.w=1
6879 * @oppfx none
6880 * @opcpuid mmx
6881 * @opgroup og_mmx_datamove
6882 * @opxcpttype 5
6883 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6884 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6885 */
6886 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6887 if (IEM_IS_MODRM_REG_MODE(bRm))
6888 {
6889 /* greg64, MMX */
6890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6891 IEM_MC_BEGIN(0, 1);
6892 IEM_MC_LOCAL(uint64_t, u64Tmp);
6893
6894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6895 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6896
6897 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6898 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
6899 IEM_MC_FPU_TO_MMX_MODE();
6900
6901 IEM_MC_ADVANCE_RIP();
6902 IEM_MC_END();
6903 }
6904 else
6905 {
6906 /* [mem64], MMX */
6907 IEM_MC_BEGIN(0, 2);
6908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6909 IEM_MC_LOCAL(uint64_t, u64Tmp);
6910
6911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6913 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6914 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6915
6916 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6917 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6918 IEM_MC_FPU_TO_MMX_MODE();
6919
6920 IEM_MC_ADVANCE_RIP();
6921 IEM_MC_END();
6922 }
6923 }
6924 else
6925 {
6926 /**
6927 * @opdone
6928 * @opcode 0x7e
6929 * @opcodesub rex.w=0
6930 * @oppfx none
6931 * @opcpuid mmx
6932 * @opgroup og_mmx_datamove
6933 * @opxcpttype 5
6934 * @opfunction iemOp_movd_q_Pd_Ey
6935 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6936 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6937 */
6938 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6939 if (IEM_IS_MODRM_REG_MODE(bRm))
6940 {
6941 /* greg32, MMX */
6942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6943 IEM_MC_BEGIN(0, 1);
6944 IEM_MC_LOCAL(uint32_t, u32Tmp);
6945
6946 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6947 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6948
6949 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6950 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
6951 IEM_MC_FPU_TO_MMX_MODE();
6952
6953 IEM_MC_ADVANCE_RIP();
6954 IEM_MC_END();
6955 }
6956 else
6957 {
6958 /* [mem32], MMX */
6959 IEM_MC_BEGIN(0, 2);
6960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6961 IEM_MC_LOCAL(uint32_t, u32Tmp);
6962
6963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6965 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6966 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6967
6968 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6969 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
6970 IEM_MC_FPU_TO_MMX_MODE();
6971
6972 IEM_MC_ADVANCE_RIP();
6973 IEM_MC_END();
6974 }
6975 }
6976 return VINF_SUCCESS;
6977
6978}
6979
6980
6981FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
6982{
6983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6984 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6985 {
6986 /**
6987 * @opcode 0x7e
6988 * @opcodesub rex.w=1
6989 * @oppfx 0x66
6990 * @opcpuid sse2
6991 * @opgroup og_sse2_simdint_datamove
6992 * @opxcpttype 5
6993 * @optest 64-bit / op1=1 op2=2 -> op1=2
6994 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6995 */
6996 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6997 if (IEM_IS_MODRM_REG_MODE(bRm))
6998 {
6999 /* greg64, XMM */
7000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7001 IEM_MC_BEGIN(0, 1);
7002 IEM_MC_LOCAL(uint64_t, u64Tmp);
7003
7004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7005 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7006
7007 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7008 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7009
7010 IEM_MC_ADVANCE_RIP();
7011 IEM_MC_END();
7012 }
7013 else
7014 {
7015 /* [mem64], XMM */
7016 IEM_MC_BEGIN(0, 2);
7017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7018 IEM_MC_LOCAL(uint64_t, u64Tmp);
7019
7020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7022 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7023 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7024
7025 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7026 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7027
7028 IEM_MC_ADVANCE_RIP();
7029 IEM_MC_END();
7030 }
7031 }
7032 else
7033 {
7034 /**
7035 * @opdone
7036 * @opcode 0x7e
7037 * @opcodesub rex.w=0
7038 * @oppfx 0x66
7039 * @opcpuid sse2
7040 * @opgroup og_sse2_simdint_datamove
7041 * @opxcpttype 5
7042 * @opfunction iemOp_movd_q_Vy_Ey
7043 * @optest op1=1 op2=2 -> op1=2
7044 * @optest op1=0 op2=-42 -> op1=-42
7045 */
7046 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7047 if (IEM_IS_MODRM_REG_MODE(bRm))
7048 {
7049 /* greg32, XMM */
7050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7051 IEM_MC_BEGIN(0, 1);
7052 IEM_MC_LOCAL(uint32_t, u32Tmp);
7053
7054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7056
7057 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7058 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7059
7060 IEM_MC_ADVANCE_RIP();
7061 IEM_MC_END();
7062 }
7063 else
7064 {
7065 /* [mem32], XMM */
7066 IEM_MC_BEGIN(0, 2);
7067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7068 IEM_MC_LOCAL(uint32_t, u32Tmp);
7069
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7073 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7074
7075 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7076 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7077
7078 IEM_MC_ADVANCE_RIP();
7079 IEM_MC_END();
7080 }
7081 }
7082 return VINF_SUCCESS;
7083
7084}
7085
7086/**
7087 * @opcode 0x7e
7088 * @oppfx 0xf3
7089 * @opcpuid sse2
7090 * @opgroup og_sse2_pcksclr_datamove
7091 * @opxcpttype none
7092 * @optest op1=1 op2=2 -> op1=2
7093 * @optest op1=0 op2=-42 -> op1=-42
7094 */
7095FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7096{
7097 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099 if (IEM_IS_MODRM_REG_MODE(bRm))
7100 {
7101 /*
7102 * Register, register.
7103 */
7104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7105 IEM_MC_BEGIN(0, 2);
7106 IEM_MC_LOCAL(uint64_t, uSrc);
7107
7108 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7109 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7110
7111 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7112 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7113
7114 IEM_MC_ADVANCE_RIP();
7115 IEM_MC_END();
7116 }
7117 else
7118 {
7119 /*
7120 * Memory, register.
7121 */
7122 IEM_MC_BEGIN(0, 2);
7123 IEM_MC_LOCAL(uint64_t, uSrc);
7124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7125
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7130
7131 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7132 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7133
7134 IEM_MC_ADVANCE_RIP();
7135 IEM_MC_END();
7136 }
7137 return VINF_SUCCESS;
7138}
7139
7140/* Opcode 0xf2 0x0f 0x7e - invalid */
7141
7142
7143/** Opcode 0x0f 0x7f - movq Qq, Pq */
7144FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7145{
7146 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7148 if (IEM_IS_MODRM_REG_MODE(bRm))
7149 {
7150 /*
7151 * Register, register.
7152 */
7153 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7154 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7156 IEM_MC_BEGIN(0, 1);
7157 IEM_MC_LOCAL(uint64_t, u64Tmp);
7158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7159 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7160 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7161 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7162 IEM_MC_FPU_TO_MMX_MODE();
7163 IEM_MC_ADVANCE_RIP();
7164 IEM_MC_END();
7165 }
7166 else
7167 {
7168 /*
7169 * Memory, Register.
7170 */
7171 IEM_MC_BEGIN(0, 2);
7172 IEM_MC_LOCAL(uint64_t, u64Tmp);
7173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7174
7175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7177 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7178 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7179
7180 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7181 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7182 IEM_MC_FPU_TO_MMX_MODE();
7183
7184 IEM_MC_ADVANCE_RIP();
7185 IEM_MC_END();
7186 }
7187 return VINF_SUCCESS;
7188}
7189
7190/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7191FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7192{
7193 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7195 if (IEM_IS_MODRM_REG_MODE(bRm))
7196 {
7197 /*
7198 * Register, register.
7199 */
7200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7201 IEM_MC_BEGIN(0, 0);
7202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7204 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7205 IEM_GET_MODRM_REG(pVCpu, bRm));
7206 IEM_MC_ADVANCE_RIP();
7207 IEM_MC_END();
7208 }
7209 else
7210 {
7211 /*
7212 * Register, memory.
7213 */
7214 IEM_MC_BEGIN(0, 2);
7215 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7217
7218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7220 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7221 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7222
7223 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7224 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7225
7226 IEM_MC_ADVANCE_RIP();
7227 IEM_MC_END();
7228 }
7229 return VINF_SUCCESS;
7230}
7231
7232/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7233FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7234{
7235 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7237 if (IEM_IS_MODRM_REG_MODE(bRm))
7238 {
7239 /*
7240 * Register, register.
7241 */
7242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7243 IEM_MC_BEGIN(0, 0);
7244 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7245 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7246 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7247 IEM_GET_MODRM_REG(pVCpu, bRm));
7248 IEM_MC_ADVANCE_RIP();
7249 IEM_MC_END();
7250 }
7251 else
7252 {
7253 /*
7254 * Register, memory.
7255 */
7256 IEM_MC_BEGIN(0, 2);
7257 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7259
7260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7264
7265 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7266 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7267
7268 IEM_MC_ADVANCE_RIP();
7269 IEM_MC_END();
7270 }
7271 return VINF_SUCCESS;
7272}
7273
7274/* Opcode 0xf2 0x0f 0x7f - invalid */
7275
7276
7277
7278/** Opcode 0x0f 0x80. */
7279FNIEMOP_DEF(iemOp_jo_Jv)
7280{
7281 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7282 IEMOP_HLP_MIN_386();
7283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7284 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7285 {
7286 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7288
7289 IEM_MC_BEGIN(0, 0);
7290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7291 IEM_MC_REL_JMP_S16(i16Imm);
7292 } IEM_MC_ELSE() {
7293 IEM_MC_ADVANCE_RIP();
7294 } IEM_MC_ENDIF();
7295 IEM_MC_END();
7296 }
7297 else
7298 {
7299 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7301
7302 IEM_MC_BEGIN(0, 0);
7303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7304 IEM_MC_REL_JMP_S32(i32Imm);
7305 } IEM_MC_ELSE() {
7306 IEM_MC_ADVANCE_RIP();
7307 } IEM_MC_ENDIF();
7308 IEM_MC_END();
7309 }
7310 return VINF_SUCCESS;
7311}
7312
7313
7314/** Opcode 0x0f 0x81. */
7315FNIEMOP_DEF(iemOp_jno_Jv)
7316{
7317 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7318 IEMOP_HLP_MIN_386();
7319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7320 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7321 {
7322 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324
7325 IEM_MC_BEGIN(0, 0);
7326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7327 IEM_MC_ADVANCE_RIP();
7328 } IEM_MC_ELSE() {
7329 IEM_MC_REL_JMP_S16(i16Imm);
7330 } IEM_MC_ENDIF();
7331 IEM_MC_END();
7332 }
7333 else
7334 {
7335 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337
7338 IEM_MC_BEGIN(0, 0);
7339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7340 IEM_MC_ADVANCE_RIP();
7341 } IEM_MC_ELSE() {
7342 IEM_MC_REL_JMP_S32(i32Imm);
7343 } IEM_MC_ENDIF();
7344 IEM_MC_END();
7345 }
7346 return VINF_SUCCESS;
7347}
7348
7349
7350/** Opcode 0x0f 0x82. */
7351FNIEMOP_DEF(iemOp_jc_Jv)
7352{
7353 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7354 IEMOP_HLP_MIN_386();
7355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7356 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7357 {
7358 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7360
7361 IEM_MC_BEGIN(0, 0);
7362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7363 IEM_MC_REL_JMP_S16(i16Imm);
7364 } IEM_MC_ELSE() {
7365 IEM_MC_ADVANCE_RIP();
7366 } IEM_MC_ENDIF();
7367 IEM_MC_END();
7368 }
7369 else
7370 {
7371 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373
7374 IEM_MC_BEGIN(0, 0);
7375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7376 IEM_MC_REL_JMP_S32(i32Imm);
7377 } IEM_MC_ELSE() {
7378 IEM_MC_ADVANCE_RIP();
7379 } IEM_MC_ENDIF();
7380 IEM_MC_END();
7381 }
7382 return VINF_SUCCESS;
7383}
7384
7385
7386/** Opcode 0x0f 0x83. */
7387FNIEMOP_DEF(iemOp_jnc_Jv)
7388{
7389 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7390 IEMOP_HLP_MIN_386();
7391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7392 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7393 {
7394 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7396
7397 IEM_MC_BEGIN(0, 0);
7398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7399 IEM_MC_ADVANCE_RIP();
7400 } IEM_MC_ELSE() {
7401 IEM_MC_REL_JMP_S16(i16Imm);
7402 } IEM_MC_ENDIF();
7403 IEM_MC_END();
7404 }
7405 else
7406 {
7407 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7409
7410 IEM_MC_BEGIN(0, 0);
7411 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7412 IEM_MC_ADVANCE_RIP();
7413 } IEM_MC_ELSE() {
7414 IEM_MC_REL_JMP_S32(i32Imm);
7415 } IEM_MC_ENDIF();
7416 IEM_MC_END();
7417 }
7418 return VINF_SUCCESS;
7419}
7420
7421
7422/** Opcode 0x0f 0x84. */
7423FNIEMOP_DEF(iemOp_je_Jv)
7424{
7425 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7426 IEMOP_HLP_MIN_386();
7427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7428 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7429 {
7430 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432
7433 IEM_MC_BEGIN(0, 0);
7434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7435 IEM_MC_REL_JMP_S16(i16Imm);
7436 } IEM_MC_ELSE() {
7437 IEM_MC_ADVANCE_RIP();
7438 } IEM_MC_ENDIF();
7439 IEM_MC_END();
7440 }
7441 else
7442 {
7443 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7445
7446 IEM_MC_BEGIN(0, 0);
7447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7448 IEM_MC_REL_JMP_S32(i32Imm);
7449 } IEM_MC_ELSE() {
7450 IEM_MC_ADVANCE_RIP();
7451 } IEM_MC_ENDIF();
7452 IEM_MC_END();
7453 }
7454 return VINF_SUCCESS;
7455}
7456
7457
7458/** Opcode 0x0f 0x85. */
7459FNIEMOP_DEF(iemOp_jne_Jv)
7460{
7461 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7462 IEMOP_HLP_MIN_386();
7463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7464 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7465 {
7466 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7468
7469 IEM_MC_BEGIN(0, 0);
7470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7471 IEM_MC_ADVANCE_RIP();
7472 } IEM_MC_ELSE() {
7473 IEM_MC_REL_JMP_S16(i16Imm);
7474 } IEM_MC_ENDIF();
7475 IEM_MC_END();
7476 }
7477 else
7478 {
7479 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481
7482 IEM_MC_BEGIN(0, 0);
7483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7484 IEM_MC_ADVANCE_RIP();
7485 } IEM_MC_ELSE() {
7486 IEM_MC_REL_JMP_S32(i32Imm);
7487 } IEM_MC_ENDIF();
7488 IEM_MC_END();
7489 }
7490 return VINF_SUCCESS;
7491}
7492
7493
7494/** Opcode 0x0f 0x86. */
7495FNIEMOP_DEF(iemOp_jbe_Jv)
7496{
7497 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7498 IEMOP_HLP_MIN_386();
7499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7500 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7501 {
7502 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7504
7505 IEM_MC_BEGIN(0, 0);
7506 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7507 IEM_MC_REL_JMP_S16(i16Imm);
7508 } IEM_MC_ELSE() {
7509 IEM_MC_ADVANCE_RIP();
7510 } IEM_MC_ENDIF();
7511 IEM_MC_END();
7512 }
7513 else
7514 {
7515 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7517
7518 IEM_MC_BEGIN(0, 0);
7519 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7520 IEM_MC_REL_JMP_S32(i32Imm);
7521 } IEM_MC_ELSE() {
7522 IEM_MC_ADVANCE_RIP();
7523 } IEM_MC_ENDIF();
7524 IEM_MC_END();
7525 }
7526 return VINF_SUCCESS;
7527}
7528
7529
7530/** Opcode 0x0f 0x87. */
7531FNIEMOP_DEF(iemOp_jnbe_Jv)
7532{
7533 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7534 IEMOP_HLP_MIN_386();
7535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7536 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7537 {
7538 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7540
7541 IEM_MC_BEGIN(0, 0);
7542 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7543 IEM_MC_ADVANCE_RIP();
7544 } IEM_MC_ELSE() {
7545 IEM_MC_REL_JMP_S16(i16Imm);
7546 } IEM_MC_ENDIF();
7547 IEM_MC_END();
7548 }
7549 else
7550 {
7551 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7553
7554 IEM_MC_BEGIN(0, 0);
7555 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7556 IEM_MC_ADVANCE_RIP();
7557 } IEM_MC_ELSE() {
7558 IEM_MC_REL_JMP_S32(i32Imm);
7559 } IEM_MC_ENDIF();
7560 IEM_MC_END();
7561 }
7562 return VINF_SUCCESS;
7563}
7564
7565
7566/** Opcode 0x0f 0x88. */
7567FNIEMOP_DEF(iemOp_js_Jv)
7568{
7569 IEMOP_MNEMONIC(js_Jv, "js Jv");
7570 IEMOP_HLP_MIN_386();
7571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7572 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7573 {
7574 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7576
7577 IEM_MC_BEGIN(0, 0);
7578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7579 IEM_MC_REL_JMP_S16(i16Imm);
7580 } IEM_MC_ELSE() {
7581 IEM_MC_ADVANCE_RIP();
7582 } IEM_MC_ENDIF();
7583 IEM_MC_END();
7584 }
7585 else
7586 {
7587 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7589
7590 IEM_MC_BEGIN(0, 0);
7591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7592 IEM_MC_REL_JMP_S32(i32Imm);
7593 } IEM_MC_ELSE() {
7594 IEM_MC_ADVANCE_RIP();
7595 } IEM_MC_ENDIF();
7596 IEM_MC_END();
7597 }
7598 return VINF_SUCCESS;
7599}
7600
7601
7602/** Opcode 0x0f 0x89. */
7603FNIEMOP_DEF(iemOp_jns_Jv)
7604{
7605 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7606 IEMOP_HLP_MIN_386();
7607 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7608 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7609 {
7610 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612
7613 IEM_MC_BEGIN(0, 0);
7614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7615 IEM_MC_ADVANCE_RIP();
7616 } IEM_MC_ELSE() {
7617 IEM_MC_REL_JMP_S16(i16Imm);
7618 } IEM_MC_ENDIF();
7619 IEM_MC_END();
7620 }
7621 else
7622 {
7623 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7625
7626 IEM_MC_BEGIN(0, 0);
7627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7628 IEM_MC_ADVANCE_RIP();
7629 } IEM_MC_ELSE() {
7630 IEM_MC_REL_JMP_S32(i32Imm);
7631 } IEM_MC_ENDIF();
7632 IEM_MC_END();
7633 }
7634 return VINF_SUCCESS;
7635}
7636
7637
7638/** Opcode 0x0f 0x8a. */
7639FNIEMOP_DEF(iemOp_jp_Jv)
7640{
7641 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7642 IEMOP_HLP_MIN_386();
7643 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7644 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7645 {
7646 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648
7649 IEM_MC_BEGIN(0, 0);
7650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7651 IEM_MC_REL_JMP_S16(i16Imm);
7652 } IEM_MC_ELSE() {
7653 IEM_MC_ADVANCE_RIP();
7654 } IEM_MC_ENDIF();
7655 IEM_MC_END();
7656 }
7657 else
7658 {
7659 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7661
7662 IEM_MC_BEGIN(0, 0);
7663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7664 IEM_MC_REL_JMP_S32(i32Imm);
7665 } IEM_MC_ELSE() {
7666 IEM_MC_ADVANCE_RIP();
7667 } IEM_MC_ENDIF();
7668 IEM_MC_END();
7669 }
7670 return VINF_SUCCESS;
7671}
7672
7673
7674/** Opcode 0x0f 0x8b. */
7675FNIEMOP_DEF(iemOp_jnp_Jv)
7676{
7677 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7678 IEMOP_HLP_MIN_386();
7679 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7680 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7681 {
7682 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7684
7685 IEM_MC_BEGIN(0, 0);
7686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7687 IEM_MC_ADVANCE_RIP();
7688 } IEM_MC_ELSE() {
7689 IEM_MC_REL_JMP_S16(i16Imm);
7690 } IEM_MC_ENDIF();
7691 IEM_MC_END();
7692 }
7693 else
7694 {
7695 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7697
7698 IEM_MC_BEGIN(0, 0);
7699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7700 IEM_MC_ADVANCE_RIP();
7701 } IEM_MC_ELSE() {
7702 IEM_MC_REL_JMP_S32(i32Imm);
7703 } IEM_MC_ENDIF();
7704 IEM_MC_END();
7705 }
7706 return VINF_SUCCESS;
7707}
7708
7709
7710/** Opcode 0x0f 0x8c. */
7711FNIEMOP_DEF(iemOp_jl_Jv)
7712{
7713 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7714 IEMOP_HLP_MIN_386();
7715 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7716 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7717 {
7718 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7720
7721 IEM_MC_BEGIN(0, 0);
7722 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7723 IEM_MC_REL_JMP_S16(i16Imm);
7724 } IEM_MC_ELSE() {
7725 IEM_MC_ADVANCE_RIP();
7726 } IEM_MC_ENDIF();
7727 IEM_MC_END();
7728 }
7729 else
7730 {
7731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733
7734 IEM_MC_BEGIN(0, 0);
7735 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7736 IEM_MC_REL_JMP_S32(i32Imm);
7737 } IEM_MC_ELSE() {
7738 IEM_MC_ADVANCE_RIP();
7739 } IEM_MC_ENDIF();
7740 IEM_MC_END();
7741 }
7742 return VINF_SUCCESS;
7743}
7744
7745
7746/** Opcode 0x0f 0x8d. */
7747FNIEMOP_DEF(iemOp_jnl_Jv)
7748{
7749 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7750 IEMOP_HLP_MIN_386();
7751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7752 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7753 {
7754 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7756
7757 IEM_MC_BEGIN(0, 0);
7758 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7759 IEM_MC_ADVANCE_RIP();
7760 } IEM_MC_ELSE() {
7761 IEM_MC_REL_JMP_S16(i16Imm);
7762 } IEM_MC_ENDIF();
7763 IEM_MC_END();
7764 }
7765 else
7766 {
7767 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7769
7770 IEM_MC_BEGIN(0, 0);
7771 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7772 IEM_MC_ADVANCE_RIP();
7773 } IEM_MC_ELSE() {
7774 IEM_MC_REL_JMP_S32(i32Imm);
7775 } IEM_MC_ENDIF();
7776 IEM_MC_END();
7777 }
7778 return VINF_SUCCESS;
7779}
7780
7781
7782/** Opcode 0x0f 0x8e. */
7783FNIEMOP_DEF(iemOp_jle_Jv)
7784{
7785 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
7786 IEMOP_HLP_MIN_386();
7787 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7788 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7789 {
7790 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7792
7793 IEM_MC_BEGIN(0, 0);
7794 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7795 IEM_MC_REL_JMP_S16(i16Imm);
7796 } IEM_MC_ELSE() {
7797 IEM_MC_ADVANCE_RIP();
7798 } IEM_MC_ENDIF();
7799 IEM_MC_END();
7800 }
7801 else
7802 {
7803 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7805
7806 IEM_MC_BEGIN(0, 0);
7807 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7808 IEM_MC_REL_JMP_S32(i32Imm);
7809 } IEM_MC_ELSE() {
7810 IEM_MC_ADVANCE_RIP();
7811 } IEM_MC_ENDIF();
7812 IEM_MC_END();
7813 }
7814 return VINF_SUCCESS;
7815}
7816
7817
7818/** Opcode 0x0f 0x8f. */
7819FNIEMOP_DEF(iemOp_jnle_Jv)
7820{
7821 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
7822 IEMOP_HLP_MIN_386();
7823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7824 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7825 {
7826 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7828
7829 IEM_MC_BEGIN(0, 0);
7830 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7831 IEM_MC_ADVANCE_RIP();
7832 } IEM_MC_ELSE() {
7833 IEM_MC_REL_JMP_S16(i16Imm);
7834 } IEM_MC_ENDIF();
7835 IEM_MC_END();
7836 }
7837 else
7838 {
7839 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7841
7842 IEM_MC_BEGIN(0, 0);
7843 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7844 IEM_MC_ADVANCE_RIP();
7845 } IEM_MC_ELSE() {
7846 IEM_MC_REL_JMP_S32(i32Imm);
7847 } IEM_MC_ENDIF();
7848 IEM_MC_END();
7849 }
7850 return VINF_SUCCESS;
7851}
7852
7853
7854/** Opcode 0x0f 0x90. */
7855FNIEMOP_DEF(iemOp_seto_Eb)
7856{
7857 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
7858 IEMOP_HLP_MIN_386();
7859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7860
7861 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7862 * any way. AMD says it's "unused", whatever that means. We're
7863 * ignoring for now. */
7864 if (IEM_IS_MODRM_REG_MODE(bRm))
7865 {
7866 /* register target */
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_BEGIN(0, 0);
7869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7870 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7871 } IEM_MC_ELSE() {
7872 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7873 } IEM_MC_ENDIF();
7874 IEM_MC_ADVANCE_RIP();
7875 IEM_MC_END();
7876 }
7877 else
7878 {
7879 /* memory target */
7880 IEM_MC_BEGIN(0, 1);
7881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7886 } IEM_MC_ELSE() {
7887 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7888 } IEM_MC_ENDIF();
7889 IEM_MC_ADVANCE_RIP();
7890 IEM_MC_END();
7891 }
7892 return VINF_SUCCESS;
7893}
7894
7895
7896/** Opcode 0x0f 0x91. */
7897FNIEMOP_DEF(iemOp_setno_Eb)
7898{
7899 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
7900 IEMOP_HLP_MIN_386();
7901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7902
7903 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7904 * any way. AMD says it's "unused", whatever that means. We're
7905 * ignoring for now. */
7906 if (IEM_IS_MODRM_REG_MODE(bRm))
7907 {
7908 /* register target */
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 IEM_MC_BEGIN(0, 0);
7911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7912 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7913 } IEM_MC_ELSE() {
7914 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7915 } IEM_MC_ENDIF();
7916 IEM_MC_ADVANCE_RIP();
7917 IEM_MC_END();
7918 }
7919 else
7920 {
7921 /* memory target */
7922 IEM_MC_BEGIN(0, 1);
7923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7927 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7928 } IEM_MC_ELSE() {
7929 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7930 } IEM_MC_ENDIF();
7931 IEM_MC_ADVANCE_RIP();
7932 IEM_MC_END();
7933 }
7934 return VINF_SUCCESS;
7935}
7936
7937
7938/** Opcode 0x0f 0x92. */
7939FNIEMOP_DEF(iemOp_setc_Eb)
7940{
7941 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
7942 IEMOP_HLP_MIN_386();
7943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7944
7945 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7946 * any way. AMD says it's "unused", whatever that means. We're
7947 * ignoring for now. */
7948 if (IEM_IS_MODRM_REG_MODE(bRm))
7949 {
7950 /* register target */
7951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7952 IEM_MC_BEGIN(0, 0);
7953 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7954 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7955 } IEM_MC_ELSE() {
7956 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7957 } IEM_MC_ENDIF();
7958 IEM_MC_ADVANCE_RIP();
7959 IEM_MC_END();
7960 }
7961 else
7962 {
7963 /* memory target */
7964 IEM_MC_BEGIN(0, 1);
7965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7969 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7970 } IEM_MC_ELSE() {
7971 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7972 } IEM_MC_ENDIF();
7973 IEM_MC_ADVANCE_RIP();
7974 IEM_MC_END();
7975 }
7976 return VINF_SUCCESS;
7977}
7978
7979
7980/** Opcode 0x0f 0x93. */
7981FNIEMOP_DEF(iemOp_setnc_Eb)
7982{
7983 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
7984 IEMOP_HLP_MIN_386();
7985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7986
7987 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7988 * any way. AMD says it's "unused", whatever that means. We're
7989 * ignoring for now. */
7990 if (IEM_IS_MODRM_REG_MODE(bRm))
7991 {
7992 /* register target */
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 IEM_MC_BEGIN(0, 0);
7995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7996 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7997 } IEM_MC_ELSE() {
7998 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7999 } IEM_MC_ENDIF();
8000 IEM_MC_ADVANCE_RIP();
8001 IEM_MC_END();
8002 }
8003 else
8004 {
8005 /* memory target */
8006 IEM_MC_BEGIN(0, 1);
8007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8011 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8012 } IEM_MC_ELSE() {
8013 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8014 } IEM_MC_ENDIF();
8015 IEM_MC_ADVANCE_RIP();
8016 IEM_MC_END();
8017 }
8018 return VINF_SUCCESS;
8019}
8020
8021
8022/** Opcode 0x0f 0x94. */
8023FNIEMOP_DEF(iemOp_sete_Eb)
8024{
8025 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8026 IEMOP_HLP_MIN_386();
8027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8028
8029 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8030 * any way. AMD says it's "unused", whatever that means. We're
8031 * ignoring for now. */
8032 if (IEM_IS_MODRM_REG_MODE(bRm))
8033 {
8034 /* register target */
8035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8036 IEM_MC_BEGIN(0, 0);
8037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8038 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8039 } IEM_MC_ELSE() {
8040 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8041 } IEM_MC_ENDIF();
8042 IEM_MC_ADVANCE_RIP();
8043 IEM_MC_END();
8044 }
8045 else
8046 {
8047 /* memory target */
8048 IEM_MC_BEGIN(0, 1);
8049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8053 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8054 } IEM_MC_ELSE() {
8055 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8056 } IEM_MC_ENDIF();
8057 IEM_MC_ADVANCE_RIP();
8058 IEM_MC_END();
8059 }
8060 return VINF_SUCCESS;
8061}
8062
8063
8064/** Opcode 0x0f 0x95. */
8065FNIEMOP_DEF(iemOp_setne_Eb)
8066{
8067 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8068 IEMOP_HLP_MIN_386();
8069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8070
8071 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8072 * any way. AMD says it's "unused", whatever that means. We're
8073 * ignoring for now. */
8074 if (IEM_IS_MODRM_REG_MODE(bRm))
8075 {
8076 /* register target */
8077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8078 IEM_MC_BEGIN(0, 0);
8079 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8080 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8081 } IEM_MC_ELSE() {
8082 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8083 } IEM_MC_ENDIF();
8084 IEM_MC_ADVANCE_RIP();
8085 IEM_MC_END();
8086 }
8087 else
8088 {
8089 /* memory target */
8090 IEM_MC_BEGIN(0, 1);
8091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8095 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8096 } IEM_MC_ELSE() {
8097 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8098 } IEM_MC_ENDIF();
8099 IEM_MC_ADVANCE_RIP();
8100 IEM_MC_END();
8101 }
8102 return VINF_SUCCESS;
8103}
8104
8105
8106/** Opcode 0x0f 0x96. */
8107FNIEMOP_DEF(iemOp_setbe_Eb)
8108{
8109 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8110 IEMOP_HLP_MIN_386();
8111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8112
8113 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8114 * any way. AMD says it's "unused", whatever that means. We're
8115 * ignoring for now. */
8116 if (IEM_IS_MODRM_REG_MODE(bRm))
8117 {
8118 /* register target */
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_BEGIN(0, 0);
8121 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8122 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8123 } IEM_MC_ELSE() {
8124 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8125 } IEM_MC_ENDIF();
8126 IEM_MC_ADVANCE_RIP();
8127 IEM_MC_END();
8128 }
8129 else
8130 {
8131 /* memory target */
8132 IEM_MC_BEGIN(0, 1);
8133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8136 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8137 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8138 } IEM_MC_ELSE() {
8139 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8140 } IEM_MC_ENDIF();
8141 IEM_MC_ADVANCE_RIP();
8142 IEM_MC_END();
8143 }
8144 return VINF_SUCCESS;
8145}
8146
8147
8148/** Opcode 0x0f 0x97. */
8149FNIEMOP_DEF(iemOp_setnbe_Eb)
8150{
8151 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8152 IEMOP_HLP_MIN_386();
8153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8154
8155 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8156 * any way. AMD says it's "unused", whatever that means. We're
8157 * ignoring for now. */
8158 if (IEM_IS_MODRM_REG_MODE(bRm))
8159 {
8160 /* register target */
8161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8162 IEM_MC_BEGIN(0, 0);
8163 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8164 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8165 } IEM_MC_ELSE() {
8166 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8167 } IEM_MC_ENDIF();
8168 IEM_MC_ADVANCE_RIP();
8169 IEM_MC_END();
8170 }
8171 else
8172 {
8173 /* memory target */
8174 IEM_MC_BEGIN(0, 1);
8175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8178 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8179 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8180 } IEM_MC_ELSE() {
8181 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8182 } IEM_MC_ENDIF();
8183 IEM_MC_ADVANCE_RIP();
8184 IEM_MC_END();
8185 }
8186 return VINF_SUCCESS;
8187}
8188
8189
8190/** Opcode 0x0f 0x98. */
8191FNIEMOP_DEF(iemOp_sets_Eb)
8192{
8193 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8194 IEMOP_HLP_MIN_386();
8195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8196
8197 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8198 * any way. AMD says it's "unused", whatever that means. We're
8199 * ignoring for now. */
8200 if (IEM_IS_MODRM_REG_MODE(bRm))
8201 {
8202 /* register target */
8203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8204 IEM_MC_BEGIN(0, 0);
8205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8206 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8207 } IEM_MC_ELSE() {
8208 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8209 } IEM_MC_ENDIF();
8210 IEM_MC_ADVANCE_RIP();
8211 IEM_MC_END();
8212 }
8213 else
8214 {
8215 /* memory target */
8216 IEM_MC_BEGIN(0, 1);
8217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8220 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8221 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8222 } IEM_MC_ELSE() {
8223 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8224 } IEM_MC_ENDIF();
8225 IEM_MC_ADVANCE_RIP();
8226 IEM_MC_END();
8227 }
8228 return VINF_SUCCESS;
8229}
8230
8231
8232/** Opcode 0x0f 0x99. */
8233FNIEMOP_DEF(iemOp_setns_Eb)
8234{
8235 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8236 IEMOP_HLP_MIN_386();
8237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8238
8239 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8240 * any way. AMD says it's "unused", whatever that means. We're
8241 * ignoring for now. */
8242 if (IEM_IS_MODRM_REG_MODE(bRm))
8243 {
8244 /* register target */
8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8246 IEM_MC_BEGIN(0, 0);
8247 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8248 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8249 } IEM_MC_ELSE() {
8250 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8251 } IEM_MC_ENDIF();
8252 IEM_MC_ADVANCE_RIP();
8253 IEM_MC_END();
8254 }
8255 else
8256 {
8257 /* memory target */
8258 IEM_MC_BEGIN(0, 1);
8259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8263 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8264 } IEM_MC_ELSE() {
8265 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8266 } IEM_MC_ENDIF();
8267 IEM_MC_ADVANCE_RIP();
8268 IEM_MC_END();
8269 }
8270 return VINF_SUCCESS;
8271}
8272
8273
8274/** Opcode 0x0f 0x9a. */
8275FNIEMOP_DEF(iemOp_setp_Eb)
8276{
8277 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8278 IEMOP_HLP_MIN_386();
8279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8280
8281 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8282 * any way. AMD says it's "unused", whatever that means. We're
8283 * ignoring for now. */
8284 if (IEM_IS_MODRM_REG_MODE(bRm))
8285 {
8286 /* register target */
8287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8288 IEM_MC_BEGIN(0, 0);
8289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8290 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8291 } IEM_MC_ELSE() {
8292 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8293 } IEM_MC_ENDIF();
8294 IEM_MC_ADVANCE_RIP();
8295 IEM_MC_END();
8296 }
8297 else
8298 {
8299 /* memory target */
8300 IEM_MC_BEGIN(0, 1);
8301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8305 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8306 } IEM_MC_ELSE() {
8307 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8308 } IEM_MC_ENDIF();
8309 IEM_MC_ADVANCE_RIP();
8310 IEM_MC_END();
8311 }
8312 return VINF_SUCCESS;
8313}
8314
8315
8316/** Opcode 0x0f 0x9b. */
8317FNIEMOP_DEF(iemOp_setnp_Eb)
8318{
8319 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8320 IEMOP_HLP_MIN_386();
8321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8322
8323 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8324 * any way. AMD says it's "unused", whatever that means. We're
8325 * ignoring for now. */
8326 if (IEM_IS_MODRM_REG_MODE(bRm))
8327 {
8328 /* register target */
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8330 IEM_MC_BEGIN(0, 0);
8331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8332 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8333 } IEM_MC_ELSE() {
8334 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8335 } IEM_MC_ENDIF();
8336 IEM_MC_ADVANCE_RIP();
8337 IEM_MC_END();
8338 }
8339 else
8340 {
8341 /* memory target */
8342 IEM_MC_BEGIN(0, 1);
8343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8346 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8347 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8348 } IEM_MC_ELSE() {
8349 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8350 } IEM_MC_ENDIF();
8351 IEM_MC_ADVANCE_RIP();
8352 IEM_MC_END();
8353 }
8354 return VINF_SUCCESS;
8355}
8356
8357
8358/** Opcode 0x0f 0x9c. */
8359FNIEMOP_DEF(iemOp_setl_Eb)
8360{
8361 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8362 IEMOP_HLP_MIN_386();
8363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8364
8365 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8366 * any way. AMD says it's "unused", whatever that means. We're
8367 * ignoring for now. */
8368 if (IEM_IS_MODRM_REG_MODE(bRm))
8369 {
8370 /* register target */
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8372 IEM_MC_BEGIN(0, 0);
8373 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8374 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8375 } IEM_MC_ELSE() {
8376 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8377 } IEM_MC_ENDIF();
8378 IEM_MC_ADVANCE_RIP();
8379 IEM_MC_END();
8380 }
8381 else
8382 {
8383 /* memory target */
8384 IEM_MC_BEGIN(0, 1);
8385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8389 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8390 } IEM_MC_ELSE() {
8391 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8392 } IEM_MC_ENDIF();
8393 IEM_MC_ADVANCE_RIP();
8394 IEM_MC_END();
8395 }
8396 return VINF_SUCCESS;
8397}
8398
8399
8400/** Opcode 0x0f 0x9d. */
8401FNIEMOP_DEF(iemOp_setnl_Eb)
8402{
8403 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8404 IEMOP_HLP_MIN_386();
8405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8406
8407 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8408 * any way. AMD says it's "unused", whatever that means. We're
8409 * ignoring for now. */
8410 if (IEM_IS_MODRM_REG_MODE(bRm))
8411 {
8412 /* register target */
8413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8414 IEM_MC_BEGIN(0, 0);
8415 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8416 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8417 } IEM_MC_ELSE() {
8418 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8419 } IEM_MC_ENDIF();
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 }
8423 else
8424 {
8425 /* memory target */
8426 IEM_MC_BEGIN(0, 1);
8427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8430 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8431 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8432 } IEM_MC_ELSE() {
8433 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8434 } IEM_MC_ENDIF();
8435 IEM_MC_ADVANCE_RIP();
8436 IEM_MC_END();
8437 }
8438 return VINF_SUCCESS;
8439}
8440
8441
8442/** Opcode 0x0f 0x9e. */
8443FNIEMOP_DEF(iemOp_setle_Eb)
8444{
8445 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8446 IEMOP_HLP_MIN_386();
8447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8448
8449 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8450 * any way. AMD says it's "unused", whatever that means. We're
8451 * ignoring for now. */
8452 if (IEM_IS_MODRM_REG_MODE(bRm))
8453 {
8454 /* register target */
8455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8456 IEM_MC_BEGIN(0, 0);
8457 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8458 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8459 } IEM_MC_ELSE() {
8460 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8461 } IEM_MC_ENDIF();
8462 IEM_MC_ADVANCE_RIP();
8463 IEM_MC_END();
8464 }
8465 else
8466 {
8467 /* memory target */
8468 IEM_MC_BEGIN(0, 1);
8469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8472 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8473 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8474 } IEM_MC_ELSE() {
8475 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8476 } IEM_MC_ENDIF();
8477 IEM_MC_ADVANCE_RIP();
8478 IEM_MC_END();
8479 }
8480 return VINF_SUCCESS;
8481}
8482
8483
8484/** Opcode 0x0f 0x9f. */
8485FNIEMOP_DEF(iemOp_setnle_Eb)
8486{
8487 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8488 IEMOP_HLP_MIN_386();
8489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8490
8491 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8492 * any way. AMD says it's "unused", whatever that means. We're
8493 * ignoring for now. */
8494 if (IEM_IS_MODRM_REG_MODE(bRm))
8495 {
8496 /* register target */
8497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8498 IEM_MC_BEGIN(0, 0);
8499 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8500 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8501 } IEM_MC_ELSE() {
8502 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8503 } IEM_MC_ENDIF();
8504 IEM_MC_ADVANCE_RIP();
8505 IEM_MC_END();
8506 }
8507 else
8508 {
8509 /* memory target */
8510 IEM_MC_BEGIN(0, 1);
8511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8515 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8516 } IEM_MC_ELSE() {
8517 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8518 } IEM_MC_ENDIF();
8519 IEM_MC_ADVANCE_RIP();
8520 IEM_MC_END();
8521 }
8522 return VINF_SUCCESS;
8523}
8524
8525
8526/**
8527 * Common 'push segment-register' helper.
8528 */
8529FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8530{
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8534
8535 switch (pVCpu->iem.s.enmEffOpSize)
8536 {
8537 case IEMMODE_16BIT:
8538 IEM_MC_BEGIN(0, 1);
8539 IEM_MC_LOCAL(uint16_t, u16Value);
8540 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8541 IEM_MC_PUSH_U16(u16Value);
8542 IEM_MC_ADVANCE_RIP();
8543 IEM_MC_END();
8544 break;
8545
8546 case IEMMODE_32BIT:
8547 IEM_MC_BEGIN(0, 1);
8548 IEM_MC_LOCAL(uint32_t, u32Value);
8549 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8550 IEM_MC_PUSH_U32_SREG(u32Value);
8551 IEM_MC_ADVANCE_RIP();
8552 IEM_MC_END();
8553 break;
8554
8555 case IEMMODE_64BIT:
8556 IEM_MC_BEGIN(0, 1);
8557 IEM_MC_LOCAL(uint64_t, u64Value);
8558 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8559 IEM_MC_PUSH_U64(u64Value);
8560 IEM_MC_ADVANCE_RIP();
8561 IEM_MC_END();
8562 break;
8563 }
8564
8565 return VINF_SUCCESS;
8566}
8567
8568
8569/** Opcode 0x0f 0xa0. */
8570FNIEMOP_DEF(iemOp_push_fs)
8571{
8572 IEMOP_MNEMONIC(push_fs, "push fs");
8573 IEMOP_HLP_MIN_386();
8574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8575 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8576}
8577
8578
8579/** Opcode 0x0f 0xa1. */
8580FNIEMOP_DEF(iemOp_pop_fs)
8581{
8582 IEMOP_MNEMONIC(pop_fs, "pop fs");
8583 IEMOP_HLP_MIN_386();
8584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8585 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8586}
8587
8588
8589/** Opcode 0x0f 0xa2. */
8590FNIEMOP_DEF(iemOp_cpuid)
8591{
8592 IEMOP_MNEMONIC(cpuid, "cpuid");
8593 IEMOP_HLP_MIN_486(); /* not all 486es. */
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8596}
8597
8598
8599/**
8600 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8601 * iemOp_bts_Ev_Gv.
8602 */
8603FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
8604{
8605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8606 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8607
8608 if (IEM_IS_MODRM_REG_MODE(bRm))
8609 {
8610 /* register destination. */
8611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8612 switch (pVCpu->iem.s.enmEffOpSize)
8613 {
8614 case IEMMODE_16BIT:
8615 IEM_MC_BEGIN(3, 0);
8616 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8617 IEM_MC_ARG(uint16_t, u16Src, 1);
8618 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8619
8620 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8621 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
8622 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8623 IEM_MC_REF_EFLAGS(pEFlags);
8624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8625
8626 IEM_MC_ADVANCE_RIP();
8627 IEM_MC_END();
8628 return VINF_SUCCESS;
8629
8630 case IEMMODE_32BIT:
8631 IEM_MC_BEGIN(3, 0);
8632 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8633 IEM_MC_ARG(uint32_t, u32Src, 1);
8634 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8635
8636 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8637 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
8638 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8639 IEM_MC_REF_EFLAGS(pEFlags);
8640 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8641
8642 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8643 IEM_MC_ADVANCE_RIP();
8644 IEM_MC_END();
8645 return VINF_SUCCESS;
8646
8647 case IEMMODE_64BIT:
8648 IEM_MC_BEGIN(3, 0);
8649 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8650 IEM_MC_ARG(uint64_t, u64Src, 1);
8651 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8652
8653 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8654 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
8655 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8656 IEM_MC_REF_EFLAGS(pEFlags);
8657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8658
8659 IEM_MC_ADVANCE_RIP();
8660 IEM_MC_END();
8661 return VINF_SUCCESS;
8662
8663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8664 }
8665 }
8666 else
8667 {
8668 /* memory destination. */
8669
8670 uint32_t fAccess;
8671 if (pImpl->pfnLockedU16)
8672 fAccess = IEM_ACCESS_DATA_RW;
8673 else /* BT */
8674 fAccess = IEM_ACCESS_DATA_R;
8675
8676 /** @todo test negative bit offsets! */
8677 switch (pVCpu->iem.s.enmEffOpSize)
8678 {
8679 case IEMMODE_16BIT:
8680 IEM_MC_BEGIN(3, 2);
8681 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8682 IEM_MC_ARG(uint16_t, u16Src, 1);
8683 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8685 IEM_MC_LOCAL(int16_t, i16AddrAdj);
8686
8687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8688 if (pImpl->pfnLockedU16)
8689 IEMOP_HLP_DONE_DECODING();
8690 else
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8693 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
8694 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
8695 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
8696 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
8697 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
8698 IEM_MC_FETCH_EFLAGS(EFlags);
8699
8700 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8701 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8703 else
8704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8706
8707 IEM_MC_COMMIT_EFLAGS(EFlags);
8708 IEM_MC_ADVANCE_RIP();
8709 IEM_MC_END();
8710 return VINF_SUCCESS;
8711
8712 case IEMMODE_32BIT:
8713 IEM_MC_BEGIN(3, 2);
8714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8715 IEM_MC_ARG(uint32_t, u32Src, 1);
8716 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8718 IEM_MC_LOCAL(int32_t, i32AddrAdj);
8719
8720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8721 if (pImpl->pfnLockedU16)
8722 IEMOP_HLP_DONE_DECODING();
8723 else
8724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8725 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8726 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
8727 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
8728 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
8729 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
8730 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
8731 IEM_MC_FETCH_EFLAGS(EFlags);
8732
8733 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8736 else
8737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8739
8740 IEM_MC_COMMIT_EFLAGS(EFlags);
8741 IEM_MC_ADVANCE_RIP();
8742 IEM_MC_END();
8743 return VINF_SUCCESS;
8744
8745 case IEMMODE_64BIT:
8746 IEM_MC_BEGIN(3, 2);
8747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8748 IEM_MC_ARG(uint64_t, u64Src, 1);
8749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8751 IEM_MC_LOCAL(int64_t, i64AddrAdj);
8752
8753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8754 if (pImpl->pfnLockedU16)
8755 IEMOP_HLP_DONE_DECODING();
8756 else
8757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8758 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8759 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
8760 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
8761 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
8762 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
8763 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
8764 IEM_MC_FETCH_EFLAGS(EFlags);
8765
8766 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8767 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8769 else
8770 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8772
8773 IEM_MC_COMMIT_EFLAGS(EFlags);
8774 IEM_MC_ADVANCE_RIP();
8775 IEM_MC_END();
8776 return VINF_SUCCESS;
8777
8778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8779 }
8780 }
8781}
8782
8783
8784/** Opcode 0x0f 0xa3. */
8785FNIEMOP_DEF(iemOp_bt_Ev_Gv)
8786{
8787 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
8788 IEMOP_HLP_MIN_386();
8789 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
8790}
8791
8792
8793/**
8794 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
8795 */
8796FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
8797{
8798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8800
8801 if (IEM_IS_MODRM_REG_MODE(bRm))
8802 {
8803 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8805
8806 switch (pVCpu->iem.s.enmEffOpSize)
8807 {
8808 case IEMMODE_16BIT:
8809 IEM_MC_BEGIN(4, 0);
8810 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8811 IEM_MC_ARG(uint16_t, u16Src, 1);
8812 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8813 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8814
8815 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8816 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8817 IEM_MC_REF_EFLAGS(pEFlags);
8818 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8819
8820 IEM_MC_ADVANCE_RIP();
8821 IEM_MC_END();
8822 return VINF_SUCCESS;
8823
8824 case IEMMODE_32BIT:
8825 IEM_MC_BEGIN(4, 0);
8826 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8827 IEM_MC_ARG(uint32_t, u32Src, 1);
8828 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8829 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8830
8831 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8832 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8833 IEM_MC_REF_EFLAGS(pEFlags);
8834 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8835
8836 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8837 IEM_MC_ADVANCE_RIP();
8838 IEM_MC_END();
8839 return VINF_SUCCESS;
8840
8841 case IEMMODE_64BIT:
8842 IEM_MC_BEGIN(4, 0);
8843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8844 IEM_MC_ARG(uint64_t, u64Src, 1);
8845 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8846 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8847
8848 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8849 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8850 IEM_MC_REF_EFLAGS(pEFlags);
8851 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8852
8853 IEM_MC_ADVANCE_RIP();
8854 IEM_MC_END();
8855 return VINF_SUCCESS;
8856
8857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8858 }
8859 }
8860 else
8861 {
8862 switch (pVCpu->iem.s.enmEffOpSize)
8863 {
8864 case IEMMODE_16BIT:
8865 IEM_MC_BEGIN(4, 2);
8866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8867 IEM_MC_ARG(uint16_t, u16Src, 1);
8868 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8869 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8871
8872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8873 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8874 IEM_MC_ASSIGN(cShiftArg, cShift);
8875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8876 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8877 IEM_MC_FETCH_EFLAGS(EFlags);
8878 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8879 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8880
8881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8882 IEM_MC_COMMIT_EFLAGS(EFlags);
8883 IEM_MC_ADVANCE_RIP();
8884 IEM_MC_END();
8885 return VINF_SUCCESS;
8886
8887 case IEMMODE_32BIT:
8888 IEM_MC_BEGIN(4, 2);
8889 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8890 IEM_MC_ARG(uint32_t, u32Src, 1);
8891 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8892 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8894
8895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8896 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8897 IEM_MC_ASSIGN(cShiftArg, cShift);
8898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8899 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8900 IEM_MC_FETCH_EFLAGS(EFlags);
8901 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8902 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8903
8904 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8905 IEM_MC_COMMIT_EFLAGS(EFlags);
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 return VINF_SUCCESS;
8909
8910 case IEMMODE_64BIT:
8911 IEM_MC_BEGIN(4, 2);
8912 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8913 IEM_MC_ARG(uint64_t, u64Src, 1);
8914 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8915 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8917
8918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8919 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8920 IEM_MC_ASSIGN(cShiftArg, cShift);
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8923 IEM_MC_FETCH_EFLAGS(EFlags);
8924 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8925 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8926
8927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8928 IEM_MC_COMMIT_EFLAGS(EFlags);
8929 IEM_MC_ADVANCE_RIP();
8930 IEM_MC_END();
8931 return VINF_SUCCESS;
8932
8933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8934 }
8935 }
8936}
8937
8938
8939/**
8940 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
8941 */
8942FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
8943{
8944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8945 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8946
8947 if (IEM_IS_MODRM_REG_MODE(bRm))
8948 {
8949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8950
8951 switch (pVCpu->iem.s.enmEffOpSize)
8952 {
8953 case IEMMODE_16BIT:
8954 IEM_MC_BEGIN(4, 0);
8955 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8956 IEM_MC_ARG(uint16_t, u16Src, 1);
8957 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8958 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8959
8960 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8961 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8962 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8963 IEM_MC_REF_EFLAGS(pEFlags);
8964 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8965
8966 IEM_MC_ADVANCE_RIP();
8967 IEM_MC_END();
8968 return VINF_SUCCESS;
8969
8970 case IEMMODE_32BIT:
8971 IEM_MC_BEGIN(4, 0);
8972 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8973 IEM_MC_ARG(uint32_t, u32Src, 1);
8974 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8975 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8976
8977 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8978 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8979 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8980 IEM_MC_REF_EFLAGS(pEFlags);
8981 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8982
8983 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8984 IEM_MC_ADVANCE_RIP();
8985 IEM_MC_END();
8986 return VINF_SUCCESS;
8987
8988 case IEMMODE_64BIT:
8989 IEM_MC_BEGIN(4, 0);
8990 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8991 IEM_MC_ARG(uint64_t, u64Src, 1);
8992 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8993 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8994
8995 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8996 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8997 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8998 IEM_MC_REF_EFLAGS(pEFlags);
8999 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9000
9001 IEM_MC_ADVANCE_RIP();
9002 IEM_MC_END();
9003 return VINF_SUCCESS;
9004
9005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9006 }
9007 }
9008 else
9009 {
9010 switch (pVCpu->iem.s.enmEffOpSize)
9011 {
9012 case IEMMODE_16BIT:
9013 IEM_MC_BEGIN(4, 2);
9014 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9015 IEM_MC_ARG(uint16_t, u16Src, 1);
9016 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9017 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9019
9020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9022 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9024 IEM_MC_FETCH_EFLAGS(EFlags);
9025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9026 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9027
9028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9029 IEM_MC_COMMIT_EFLAGS(EFlags);
9030 IEM_MC_ADVANCE_RIP();
9031 IEM_MC_END();
9032 return VINF_SUCCESS;
9033
9034 case IEMMODE_32BIT:
9035 IEM_MC_BEGIN(4, 2);
9036 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9037 IEM_MC_ARG(uint32_t, u32Src, 1);
9038 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9039 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9041
9042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9044 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9045 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9046 IEM_MC_FETCH_EFLAGS(EFlags);
9047 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9048 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9049
9050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9051 IEM_MC_COMMIT_EFLAGS(EFlags);
9052 IEM_MC_ADVANCE_RIP();
9053 IEM_MC_END();
9054 return VINF_SUCCESS;
9055
9056 case IEMMODE_64BIT:
9057 IEM_MC_BEGIN(4, 2);
9058 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9059 IEM_MC_ARG(uint64_t, u64Src, 1);
9060 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9061 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9063
9064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9066 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9067 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9068 IEM_MC_FETCH_EFLAGS(EFlags);
9069 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9070 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9071
9072 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9073 IEM_MC_COMMIT_EFLAGS(EFlags);
9074 IEM_MC_ADVANCE_RIP();
9075 IEM_MC_END();
9076 return VINF_SUCCESS;
9077
9078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9079 }
9080 }
9081}
9082
9083
9084
9085/** Opcode 0x0f 0xa4. */
9086FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9087{
9088 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9089 IEMOP_HLP_MIN_386();
9090 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9091}
9092
9093
9094/** Opcode 0x0f 0xa5. */
9095FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9096{
9097 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9098 IEMOP_HLP_MIN_386();
9099 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9100}
9101
9102
9103/** Opcode 0x0f 0xa8. */
9104FNIEMOP_DEF(iemOp_push_gs)
9105{
9106 IEMOP_MNEMONIC(push_gs, "push gs");
9107 IEMOP_HLP_MIN_386();
9108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9109 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9110}
9111
9112
9113/** Opcode 0x0f 0xa9. */
9114FNIEMOP_DEF(iemOp_pop_gs)
9115{
9116 IEMOP_MNEMONIC(pop_gs, "pop gs");
9117 IEMOP_HLP_MIN_386();
9118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9119 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9120}
9121
9122
9123/** Opcode 0x0f 0xaa. */
9124FNIEMOP_DEF(iemOp_rsm)
9125{
9126 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9127 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9129 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9130}
9131
9132
9133
9134/** Opcode 0x0f 0xab. */
9135FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9136{
9137 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9138 IEMOP_HLP_MIN_386();
9139 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9140}
9141
9142
9143/** Opcode 0x0f 0xac. */
9144FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9145{
9146 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9147 IEMOP_HLP_MIN_386();
9148 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9149}
9150
9151
9152/** Opcode 0x0f 0xad. */
9153FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9154{
9155 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9156 IEMOP_HLP_MIN_386();
9157 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9158}
9159
9160
9161/** Opcode 0x0f 0xae mem/0. */
9162FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9163{
9164 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9165 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9166 return IEMOP_RAISE_INVALID_OPCODE();
9167
9168 IEM_MC_BEGIN(3, 1);
9169 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9170 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9171 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9174 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9175 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9176 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9177 IEM_MC_END();
9178 return VINF_SUCCESS;
9179}
9180
9181
9182/** Opcode 0x0f 0xae mem/1. */
9183FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9184{
9185 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9186 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9187 return IEMOP_RAISE_INVALID_OPCODE();
9188
9189 IEM_MC_BEGIN(3, 1);
9190 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9191 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9192 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9195 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9196 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9197 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9198 IEM_MC_END();
9199 return VINF_SUCCESS;
9200}
9201
9202
9203/**
9204 * @opmaps grp15
9205 * @opcode !11/2
9206 * @oppfx none
9207 * @opcpuid sse
9208 * @opgroup og_sse_mxcsrsm
9209 * @opxcpttype 5
9210 * @optest op1=0 -> mxcsr=0
9211 * @optest op1=0x2083 -> mxcsr=0x2083
9212 * @optest op1=0xfffffffe -> value.xcpt=0xd
9213 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9214 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9215 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9216 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9217 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9218 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9219 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9220 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9221 */
9222FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9223{
9224 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9225 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9226 return IEMOP_RAISE_INVALID_OPCODE();
9227
9228 IEM_MC_BEGIN(2, 0);
9229 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9230 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9233 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9234 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9235 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9236 IEM_MC_END();
9237 return VINF_SUCCESS;
9238}
9239
9240
9241/**
9242 * @opmaps grp15
9243 * @opcode !11/3
9244 * @oppfx none
9245 * @opcpuid sse
9246 * @opgroup og_sse_mxcsrsm
9247 * @opxcpttype 5
9248 * @optest mxcsr=0 -> op1=0
9249 * @optest mxcsr=0x2083 -> op1=0x2083
9250 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9251 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9252 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9253 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9254 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9255 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9256 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9257 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9258 */
9259FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9260{
9261 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9262 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9263 return IEMOP_RAISE_INVALID_OPCODE();
9264
9265 IEM_MC_BEGIN(2, 0);
9266 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9267 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9270 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9271 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9272 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9273 IEM_MC_END();
9274 return VINF_SUCCESS;
9275}
9276
9277
9278/**
9279 * @opmaps grp15
9280 * @opcode !11/4
9281 * @oppfx none
9282 * @opcpuid xsave
9283 * @opgroup og_system
9284 * @opxcpttype none
9285 */
9286FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9287{
9288 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9289 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9290 return IEMOP_RAISE_INVALID_OPCODE();
9291
9292 IEM_MC_BEGIN(3, 0);
9293 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9294 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9295 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9299 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9300 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9301 IEM_MC_END();
9302 return VINF_SUCCESS;
9303}
9304
9305
9306/**
9307 * @opmaps grp15
9308 * @opcode !11/5
9309 * @oppfx none
9310 * @opcpuid xsave
9311 * @opgroup og_system
9312 * @opxcpttype none
9313 */
9314FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9315{
9316 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9317 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9318 return IEMOP_RAISE_INVALID_OPCODE();
9319
9320 IEM_MC_BEGIN(3, 0);
9321 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9322 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9323 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9326 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9327 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9328 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9329 IEM_MC_END();
9330 return VINF_SUCCESS;
9331}
9332
9333/** Opcode 0x0f 0xae mem/6. */
9334FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9335
9336/**
9337 * @opmaps grp15
9338 * @opcode !11/7
9339 * @oppfx none
9340 * @opcpuid clfsh
9341 * @opgroup og_cachectl
9342 * @optest op1=1 ->
9343 */
9344FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9345{
9346 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9347 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9348 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9349
9350 IEM_MC_BEGIN(2, 0);
9351 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9352 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9355 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9356 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9357 IEM_MC_END();
9358 return VINF_SUCCESS;
9359}
9360
9361/**
9362 * @opmaps grp15
9363 * @opcode !11/7
9364 * @oppfx 0x66
9365 * @opcpuid clflushopt
9366 * @opgroup og_cachectl
9367 * @optest op1=1 ->
9368 */
9369FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9370{
9371 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9373 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9374
9375 IEM_MC_BEGIN(2, 0);
9376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9377 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9380 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9381 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9382 IEM_MC_END();
9383 return VINF_SUCCESS;
9384}
9385
9386
9387/** Opcode 0x0f 0xae 11b/5. */
9388FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9389{
9390 RT_NOREF_PV(bRm);
9391 IEMOP_MNEMONIC(lfence, "lfence");
9392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9393 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9394 return IEMOP_RAISE_INVALID_OPCODE();
9395
9396 IEM_MC_BEGIN(0, 0);
9397#ifndef RT_ARCH_ARM64
9398 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9399#endif
9400 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9401#ifndef RT_ARCH_ARM64
9402 else
9403 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9404#endif
9405 IEM_MC_ADVANCE_RIP();
9406 IEM_MC_END();
9407 return VINF_SUCCESS;
9408}
9409
9410
9411/** Opcode 0x0f 0xae 11b/6. */
9412FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9413{
9414 RT_NOREF_PV(bRm);
9415 IEMOP_MNEMONIC(mfence, "mfence");
9416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9417 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9418 return IEMOP_RAISE_INVALID_OPCODE();
9419
9420 IEM_MC_BEGIN(0, 0);
9421#ifndef RT_ARCH_ARM64
9422 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9423#endif
9424 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9425#ifndef RT_ARCH_ARM64
9426 else
9427 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9428#endif
9429 IEM_MC_ADVANCE_RIP();
9430 IEM_MC_END();
9431 return VINF_SUCCESS;
9432}
9433
9434
9435/** Opcode 0x0f 0xae 11b/7. */
9436FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9437{
9438 RT_NOREF_PV(bRm);
9439 IEMOP_MNEMONIC(sfence, "sfence");
9440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9441 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9442 return IEMOP_RAISE_INVALID_OPCODE();
9443
9444 IEM_MC_BEGIN(0, 0);
9445#ifndef RT_ARCH_ARM64
9446 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9447#endif
9448 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9449#ifndef RT_ARCH_ARM64
9450 else
9451 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9452#endif
9453 IEM_MC_ADVANCE_RIP();
9454 IEM_MC_END();
9455 return VINF_SUCCESS;
9456}
9457
9458
9459/** Opcode 0xf3 0x0f 0xae 11b/0. */
9460FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9461{
9462 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9464 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9465 {
9466 IEM_MC_BEGIN(1, 0);
9467 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9468 IEM_MC_ARG(uint64_t, u64Dst, 0);
9469 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9470 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9471 IEM_MC_ADVANCE_RIP();
9472 IEM_MC_END();
9473 }
9474 else
9475 {
9476 IEM_MC_BEGIN(1, 0);
9477 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9478 IEM_MC_ARG(uint32_t, u32Dst, 0);
9479 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9481 IEM_MC_ADVANCE_RIP();
9482 IEM_MC_END();
9483 }
9484 return VINF_SUCCESS;
9485}
9486
9487
9488/** Opcode 0xf3 0x0f 0xae 11b/1. */
9489FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9490{
9491 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9493 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9494 {
9495 IEM_MC_BEGIN(1, 0);
9496 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9497 IEM_MC_ARG(uint64_t, u64Dst, 0);
9498 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9499 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9500 IEM_MC_ADVANCE_RIP();
9501 IEM_MC_END();
9502 }
9503 else
9504 {
9505 IEM_MC_BEGIN(1, 0);
9506 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9507 IEM_MC_ARG(uint32_t, u32Dst, 0);
9508 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9509 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9510 IEM_MC_ADVANCE_RIP();
9511 IEM_MC_END();
9512 }
9513 return VINF_SUCCESS;
9514}
9515
9516
9517/** Opcode 0xf3 0x0f 0xae 11b/2. */
9518FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9519{
9520 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9523 {
9524 IEM_MC_BEGIN(1, 0);
9525 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9526 IEM_MC_ARG(uint64_t, u64Dst, 0);
9527 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9528 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9529 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9530 IEM_MC_ADVANCE_RIP();
9531 IEM_MC_END();
9532 }
9533 else
9534 {
9535 IEM_MC_BEGIN(1, 0);
9536 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9537 IEM_MC_ARG(uint32_t, u32Dst, 0);
9538 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9539 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9540 IEM_MC_ADVANCE_RIP();
9541 IEM_MC_END();
9542 }
9543 return VINF_SUCCESS;
9544}
9545
9546
9547/** Opcode 0xf3 0x0f 0xae 11b/3. */
9548FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9549{
9550 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9552 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9553 {
9554 IEM_MC_BEGIN(1, 0);
9555 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9556 IEM_MC_ARG(uint64_t, u64Dst, 0);
9557 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9558 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9559 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9560 IEM_MC_ADVANCE_RIP();
9561 IEM_MC_END();
9562 }
9563 else
9564 {
9565 IEM_MC_BEGIN(1, 0);
9566 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9567 IEM_MC_ARG(uint32_t, u32Dst, 0);
9568 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9569 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9570 IEM_MC_ADVANCE_RIP();
9571 IEM_MC_END();
9572 }
9573 return VINF_SUCCESS;
9574}
9575
9576
9577/**
9578 * Group 15 jump table for register variant.
9579 */
9580IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9581{ /* pfx: none, 066h, 0f3h, 0f2h */
9582 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9583 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9584 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9585 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9586 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9587 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9588 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9589 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9590};
9591AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9592
9593
9594/**
9595 * Group 15 jump table for memory variant.
9596 */
9597IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9598{ /* pfx: none, 066h, 0f3h, 0f2h */
9599 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9600 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9601 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9602 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9603 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9604 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9605 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9606 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9607};
9608AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9609
9610
9611/** Opcode 0x0f 0xae. */
9612FNIEMOP_DEF(iemOp_Grp15)
9613{
9614 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9616 if (IEM_IS_MODRM_REG_MODE(bRm))
9617 /* register, register */
9618 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9619 + pVCpu->iem.s.idxPrefix], bRm);
9620 /* memory, register */
9621 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9622 + pVCpu->iem.s.idxPrefix], bRm);
9623}
9624
9625
9626/** Opcode 0x0f 0xaf. */
9627FNIEMOP_DEF(iemOp_imul_Gv_Ev)
9628{
9629 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
9630 IEMOP_HLP_MIN_386();
9631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9632 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
9633}
9634
9635
9636/** Opcode 0x0f 0xb0. */
9637FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
9638{
9639 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
9640 IEMOP_HLP_MIN_486();
9641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9642
9643 if (IEM_IS_MODRM_REG_MODE(bRm))
9644 {
9645 IEMOP_HLP_DONE_DECODING();
9646 IEM_MC_BEGIN(4, 0);
9647 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9648 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9649 IEM_MC_ARG(uint8_t, u8Src, 2);
9650 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9651
9652 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9653 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9654 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
9655 IEM_MC_REF_EFLAGS(pEFlags);
9656 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9657 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9658 else
9659 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9660
9661 IEM_MC_ADVANCE_RIP();
9662 IEM_MC_END();
9663 }
9664 else
9665 {
9666 IEM_MC_BEGIN(4, 3);
9667 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9668 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9669 IEM_MC_ARG(uint8_t, u8Src, 2);
9670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9672 IEM_MC_LOCAL(uint8_t, u8Al);
9673
9674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9675 IEMOP_HLP_DONE_DECODING();
9676 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9677 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9678 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
9679 IEM_MC_FETCH_EFLAGS(EFlags);
9680 IEM_MC_REF_LOCAL(pu8Al, u8Al);
9681 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9682 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9683 else
9684 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9685
9686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9687 IEM_MC_COMMIT_EFLAGS(EFlags);
9688 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
9689 IEM_MC_ADVANCE_RIP();
9690 IEM_MC_END();
9691 }
9692 return VINF_SUCCESS;
9693}
9694
9695/** Opcode 0x0f 0xb1. */
9696FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
9697{
9698 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
9699 IEMOP_HLP_MIN_486();
9700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9701
9702 if (IEM_IS_MODRM_REG_MODE(bRm))
9703 {
9704 IEMOP_HLP_DONE_DECODING();
9705 switch (pVCpu->iem.s.enmEffOpSize)
9706 {
9707 case IEMMODE_16BIT:
9708 IEM_MC_BEGIN(4, 0);
9709 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9710 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9711 IEM_MC_ARG(uint16_t, u16Src, 2);
9712 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9713
9714 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9715 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9716 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
9717 IEM_MC_REF_EFLAGS(pEFlags);
9718 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9719 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9720 else
9721 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9722
9723 IEM_MC_ADVANCE_RIP();
9724 IEM_MC_END();
9725 return VINF_SUCCESS;
9726
9727 case IEMMODE_32BIT:
9728 IEM_MC_BEGIN(4, 0);
9729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9730 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9731 IEM_MC_ARG(uint32_t, u32Src, 2);
9732 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9733
9734 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9735 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9736 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
9737 IEM_MC_REF_EFLAGS(pEFlags);
9738 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9739 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9740 else
9741 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9742
9743 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
9744 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9745 IEM_MC_ADVANCE_RIP();
9746 IEM_MC_END();
9747 return VINF_SUCCESS;
9748
9749 case IEMMODE_64BIT:
9750 IEM_MC_BEGIN(4, 0);
9751 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9752 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9753#ifdef RT_ARCH_X86
9754 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9755#else
9756 IEM_MC_ARG(uint64_t, u64Src, 2);
9757#endif
9758 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9759
9760 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9761 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
9762 IEM_MC_REF_EFLAGS(pEFlags);
9763#ifdef RT_ARCH_X86
9764 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9765 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9767 else
9768 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9769#else
9770 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9771 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9772 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9773 else
9774 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9775#endif
9776
9777 IEM_MC_ADVANCE_RIP();
9778 IEM_MC_END();
9779 return VINF_SUCCESS;
9780
9781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9782 }
9783 }
9784 else
9785 {
9786 switch (pVCpu->iem.s.enmEffOpSize)
9787 {
9788 case IEMMODE_16BIT:
9789 IEM_MC_BEGIN(4, 3);
9790 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9791 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9792 IEM_MC_ARG(uint16_t, u16Src, 2);
9793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9795 IEM_MC_LOCAL(uint16_t, u16Ax);
9796
9797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9798 IEMOP_HLP_DONE_DECODING();
9799 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9800 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9801 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
9802 IEM_MC_FETCH_EFLAGS(EFlags);
9803 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
9804 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9805 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9806 else
9807 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9808
9809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9810 IEM_MC_COMMIT_EFLAGS(EFlags);
9811 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
9812 IEM_MC_ADVANCE_RIP();
9813 IEM_MC_END();
9814 return VINF_SUCCESS;
9815
9816 case IEMMODE_32BIT:
9817 IEM_MC_BEGIN(4, 3);
9818 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9819 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9820 IEM_MC_ARG(uint32_t, u32Src, 2);
9821 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9823 IEM_MC_LOCAL(uint32_t, u32Eax);
9824
9825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9826 IEMOP_HLP_DONE_DECODING();
9827 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9828 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9829 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
9830 IEM_MC_FETCH_EFLAGS(EFlags);
9831 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
9832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9834 else
9835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9836
9837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9838 IEM_MC_COMMIT_EFLAGS(EFlags);
9839 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
9840 IEM_MC_ADVANCE_RIP();
9841 IEM_MC_END();
9842 return VINF_SUCCESS;
9843
9844 case IEMMODE_64BIT:
9845 IEM_MC_BEGIN(4, 3);
9846 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9847 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9848#ifdef RT_ARCH_X86
9849 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9850#else
9851 IEM_MC_ARG(uint64_t, u64Src, 2);
9852#endif
9853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9855 IEM_MC_LOCAL(uint64_t, u64Rax);
9856
9857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9858 IEMOP_HLP_DONE_DECODING();
9859 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9860 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
9861 IEM_MC_FETCH_EFLAGS(EFlags);
9862 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
9863#ifdef RT_ARCH_X86
9864 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9865 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9866 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9867 else
9868 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9869#else
9870 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9871 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9872 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9873 else
9874 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9875#endif
9876
9877 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9878 IEM_MC_COMMIT_EFLAGS(EFlags);
9879 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 return VINF_SUCCESS;
9883
9884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9885 }
9886 }
9887}
9888
9889
9890FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
9891{
9892 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
9893 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
9894
9895 switch (pVCpu->iem.s.enmEffOpSize)
9896 {
9897 case IEMMODE_16BIT:
9898 IEM_MC_BEGIN(5, 1);
9899 IEM_MC_ARG(uint16_t, uSel, 0);
9900 IEM_MC_ARG(uint16_t, offSeg, 1);
9901 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9902 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9903 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9904 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9907 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9908 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
9909 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9910 IEM_MC_END();
9911 return VINF_SUCCESS;
9912
9913 case IEMMODE_32BIT:
9914 IEM_MC_BEGIN(5, 1);
9915 IEM_MC_ARG(uint16_t, uSel, 0);
9916 IEM_MC_ARG(uint32_t, offSeg, 1);
9917 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9918 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9919 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9920 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9923 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9924 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
9925 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9926 IEM_MC_END();
9927 return VINF_SUCCESS;
9928
9929 case IEMMODE_64BIT:
9930 IEM_MC_BEGIN(5, 1);
9931 IEM_MC_ARG(uint16_t, uSel, 0);
9932 IEM_MC_ARG(uint64_t, offSeg, 1);
9933 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9934 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9935 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9936 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9939 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
9940 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9941 else
9942 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9943 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
9944 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9945 IEM_MC_END();
9946 return VINF_SUCCESS;
9947
9948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9949 }
9950}
9951
9952
9953/** Opcode 0x0f 0xb2. */
9954FNIEMOP_DEF(iemOp_lss_Gv_Mp)
9955{
9956 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
9957 IEMOP_HLP_MIN_386();
9958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9959 if (IEM_IS_MODRM_REG_MODE(bRm))
9960 return IEMOP_RAISE_INVALID_OPCODE();
9961 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
9962}
9963
9964
9965/** Opcode 0x0f 0xb3. */
9966FNIEMOP_DEF(iemOp_btr_Ev_Gv)
9967{
9968 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
9969 IEMOP_HLP_MIN_386();
9970 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
9971}
9972
9973
9974/** Opcode 0x0f 0xb4. */
9975FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
9976{
9977 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
9978 IEMOP_HLP_MIN_386();
9979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9980 if (IEM_IS_MODRM_REG_MODE(bRm))
9981 return IEMOP_RAISE_INVALID_OPCODE();
9982 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
9983}
9984
9985
9986/** Opcode 0x0f 0xb5. */
9987FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
9988{
9989 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
9990 IEMOP_HLP_MIN_386();
9991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9992 if (IEM_IS_MODRM_REG_MODE(bRm))
9993 return IEMOP_RAISE_INVALID_OPCODE();
9994 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
9995}
9996
9997
9998/** Opcode 0x0f 0xb6. */
9999FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10000{
10001 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10002 IEMOP_HLP_MIN_386();
10003
10004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10005
10006 /*
10007 * If rm is denoting a register, no more instruction bytes.
10008 */
10009 if (IEM_IS_MODRM_REG_MODE(bRm))
10010 {
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10012 switch (pVCpu->iem.s.enmEffOpSize)
10013 {
10014 case IEMMODE_16BIT:
10015 IEM_MC_BEGIN(0, 1);
10016 IEM_MC_LOCAL(uint16_t, u16Value);
10017 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10018 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10019 IEM_MC_ADVANCE_RIP();
10020 IEM_MC_END();
10021 return VINF_SUCCESS;
10022
10023 case IEMMODE_32BIT:
10024 IEM_MC_BEGIN(0, 1);
10025 IEM_MC_LOCAL(uint32_t, u32Value);
10026 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10027 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10028 IEM_MC_ADVANCE_RIP();
10029 IEM_MC_END();
10030 return VINF_SUCCESS;
10031
10032 case IEMMODE_64BIT:
10033 IEM_MC_BEGIN(0, 1);
10034 IEM_MC_LOCAL(uint64_t, u64Value);
10035 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10036 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10037 IEM_MC_ADVANCE_RIP();
10038 IEM_MC_END();
10039 return VINF_SUCCESS;
10040
10041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10042 }
10043 }
10044 else
10045 {
10046 /*
10047 * We're loading a register from memory.
10048 */
10049 switch (pVCpu->iem.s.enmEffOpSize)
10050 {
10051 case IEMMODE_16BIT:
10052 IEM_MC_BEGIN(0, 2);
10053 IEM_MC_LOCAL(uint16_t, u16Value);
10054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10057 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10058 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10059 IEM_MC_ADVANCE_RIP();
10060 IEM_MC_END();
10061 return VINF_SUCCESS;
10062
10063 case IEMMODE_32BIT:
10064 IEM_MC_BEGIN(0, 2);
10065 IEM_MC_LOCAL(uint32_t, u32Value);
10066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10069 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10070 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10071 IEM_MC_ADVANCE_RIP();
10072 IEM_MC_END();
10073 return VINF_SUCCESS;
10074
10075 case IEMMODE_64BIT:
10076 IEM_MC_BEGIN(0, 2);
10077 IEM_MC_LOCAL(uint64_t, u64Value);
10078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10081 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10082 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10083 IEM_MC_ADVANCE_RIP();
10084 IEM_MC_END();
10085 return VINF_SUCCESS;
10086
10087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10088 }
10089 }
10090}
10091
10092
10093/** Opcode 0x0f 0xb7. */
10094FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10095{
10096 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10097 IEMOP_HLP_MIN_386();
10098
10099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10100
10101 /** @todo Not entirely sure how the operand size prefix is handled here,
10102 * assuming that it will be ignored. Would be nice to have a few
10103 * test for this. */
10104 /*
10105 * If rm is denoting a register, no more instruction bytes.
10106 */
10107 if (IEM_IS_MODRM_REG_MODE(bRm))
10108 {
10109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10110 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10111 {
10112 IEM_MC_BEGIN(0, 1);
10113 IEM_MC_LOCAL(uint32_t, u32Value);
10114 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10115 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10116 IEM_MC_ADVANCE_RIP();
10117 IEM_MC_END();
10118 }
10119 else
10120 {
10121 IEM_MC_BEGIN(0, 1);
10122 IEM_MC_LOCAL(uint64_t, u64Value);
10123 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10124 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10125 IEM_MC_ADVANCE_RIP();
10126 IEM_MC_END();
10127 }
10128 }
10129 else
10130 {
10131 /*
10132 * We're loading a register from memory.
10133 */
10134 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10135 {
10136 IEM_MC_BEGIN(0, 2);
10137 IEM_MC_LOCAL(uint32_t, u32Value);
10138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10141 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10142 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10143 IEM_MC_ADVANCE_RIP();
10144 IEM_MC_END();
10145 }
10146 else
10147 {
10148 IEM_MC_BEGIN(0, 2);
10149 IEM_MC_LOCAL(uint64_t, u64Value);
10150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10153 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10154 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10155 IEM_MC_ADVANCE_RIP();
10156 IEM_MC_END();
10157 }
10158 }
10159 return VINF_SUCCESS;
10160}
10161
10162
10163/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10164FNIEMOP_UD_STUB(iemOp_jmpe);
10165
10166
10167/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10168FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10169{
10170 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10171 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10172 return iemOp_InvalidNeedRM(pVCpu);
10173#ifndef TST_IEM_CHECK_MC
10174# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10175 static const IEMOPBINSIZES s_Native =
10176 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10177# endif
10178 static const IEMOPBINSIZES s_Fallback =
10179 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10180#endif
10181 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10182}
10183
10184
10185/**
10186 * @opcode 0xb9
10187 * @opinvalid intel-modrm
10188 * @optest ->
10189 */
10190FNIEMOP_DEF(iemOp_Grp10)
10191{
10192 /*
10193 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10194 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10195 */
10196 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10197 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10198 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10199}
10200
10201
10202/** Opcode 0x0f 0xba. */
10203FNIEMOP_DEF(iemOp_Grp8)
10204{
10205 IEMOP_HLP_MIN_386();
10206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10207 PCIEMOPBINSIZES pImpl;
10208 switch (IEM_GET_MODRM_REG_8(bRm))
10209 {
10210 case 0: case 1: case 2: case 3:
10211 /* Both AMD and Intel want full modr/m decoding and imm8. */
10212 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10213 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10214 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10215 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10216 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10218 }
10219 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10220
10221 if (IEM_IS_MODRM_REG_MODE(bRm))
10222 {
10223 /* register destination. */
10224 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10226
10227 switch (pVCpu->iem.s.enmEffOpSize)
10228 {
10229 case IEMMODE_16BIT:
10230 IEM_MC_BEGIN(3, 0);
10231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10232 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10234
10235 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10236 IEM_MC_REF_EFLAGS(pEFlags);
10237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10238
10239 IEM_MC_ADVANCE_RIP();
10240 IEM_MC_END();
10241 return VINF_SUCCESS;
10242
10243 case IEMMODE_32BIT:
10244 IEM_MC_BEGIN(3, 0);
10245 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10246 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10248
10249 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10250 IEM_MC_REF_EFLAGS(pEFlags);
10251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10252
10253 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10254 IEM_MC_ADVANCE_RIP();
10255 IEM_MC_END();
10256 return VINF_SUCCESS;
10257
10258 case IEMMODE_64BIT:
10259 IEM_MC_BEGIN(3, 0);
10260 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10261 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10262 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10263
10264 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10265 IEM_MC_REF_EFLAGS(pEFlags);
10266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10267
10268 IEM_MC_ADVANCE_RIP();
10269 IEM_MC_END();
10270 return VINF_SUCCESS;
10271
10272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10273 }
10274 }
10275 else
10276 {
10277 /* memory destination. */
10278
10279 uint32_t fAccess;
10280 if (pImpl->pfnLockedU16)
10281 fAccess = IEM_ACCESS_DATA_RW;
10282 else /* BT */
10283 fAccess = IEM_ACCESS_DATA_R;
10284
10285 /** @todo test negative bit offsets! */
10286 switch (pVCpu->iem.s.enmEffOpSize)
10287 {
10288 case IEMMODE_16BIT:
10289 IEM_MC_BEGIN(3, 1);
10290 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10291 IEM_MC_ARG(uint16_t, u16Src, 1);
10292 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10294
10295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10296 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10297 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10298 if (pImpl->pfnLockedU16)
10299 IEMOP_HLP_DONE_DECODING();
10300 else
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302 IEM_MC_FETCH_EFLAGS(EFlags);
10303 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10304 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10306 else
10307 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10309
10310 IEM_MC_COMMIT_EFLAGS(EFlags);
10311 IEM_MC_ADVANCE_RIP();
10312 IEM_MC_END();
10313 return VINF_SUCCESS;
10314
10315 case IEMMODE_32BIT:
10316 IEM_MC_BEGIN(3, 1);
10317 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10318 IEM_MC_ARG(uint32_t, u32Src, 1);
10319 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10321
10322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10323 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10324 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10325 if (pImpl->pfnLockedU16)
10326 IEMOP_HLP_DONE_DECODING();
10327 else
10328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10329 IEM_MC_FETCH_EFLAGS(EFlags);
10330 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10333 else
10334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10336
10337 IEM_MC_COMMIT_EFLAGS(EFlags);
10338 IEM_MC_ADVANCE_RIP();
10339 IEM_MC_END();
10340 return VINF_SUCCESS;
10341
10342 case IEMMODE_64BIT:
10343 IEM_MC_BEGIN(3, 1);
10344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10345 IEM_MC_ARG(uint64_t, u64Src, 1);
10346 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10348
10349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10350 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10351 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10352 if (pImpl->pfnLockedU16)
10353 IEMOP_HLP_DONE_DECODING();
10354 else
10355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10356 IEM_MC_FETCH_EFLAGS(EFlags);
10357 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10358 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10359 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10360 else
10361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10362 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10363
10364 IEM_MC_COMMIT_EFLAGS(EFlags);
10365 IEM_MC_ADVANCE_RIP();
10366 IEM_MC_END();
10367 return VINF_SUCCESS;
10368
10369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10370 }
10371 }
10372}
10373
10374
10375/** Opcode 0x0f 0xbb. */
10376FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10377{
10378 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10379 IEMOP_HLP_MIN_386();
10380 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10381}
10382
10383
10384/**
10385 * Common worker for BSF and BSR instructions.
10386 *
10387 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10388 * the destination register, which means that for 32-bit operations the high
10389 * bits must be left alone.
10390 *
10391 * @param pImpl Pointer to the instruction implementation (assembly).
10392 */
10393FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10394{
10395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10396
10397 /*
10398 * If rm is denoting a register, no more instruction bytes.
10399 */
10400 if (IEM_IS_MODRM_REG_MODE(bRm))
10401 {
10402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10403 switch (pVCpu->iem.s.enmEffOpSize)
10404 {
10405 case IEMMODE_16BIT:
10406 IEM_MC_BEGIN(3, 0);
10407 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10408 IEM_MC_ARG(uint16_t, u16Src, 1);
10409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10410
10411 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10412 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10413 IEM_MC_REF_EFLAGS(pEFlags);
10414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10415
10416 IEM_MC_ADVANCE_RIP();
10417 IEM_MC_END();
10418 break;
10419
10420 case IEMMODE_32BIT:
10421 IEM_MC_BEGIN(3, 0);
10422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10423 IEM_MC_ARG(uint32_t, u32Src, 1);
10424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10425
10426 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10427 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10428 IEM_MC_REF_EFLAGS(pEFlags);
10429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10430 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10431 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10432 IEM_MC_ENDIF();
10433 IEM_MC_ADVANCE_RIP();
10434 IEM_MC_END();
10435 break;
10436
10437 case IEMMODE_64BIT:
10438 IEM_MC_BEGIN(3, 0);
10439 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10440 IEM_MC_ARG(uint64_t, u64Src, 1);
10441 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10442
10443 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10444 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10445 IEM_MC_REF_EFLAGS(pEFlags);
10446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10447
10448 IEM_MC_ADVANCE_RIP();
10449 IEM_MC_END();
10450 break;
10451 }
10452 }
10453 else
10454 {
10455 /*
10456 * We're accessing memory.
10457 */
10458 switch (pVCpu->iem.s.enmEffOpSize)
10459 {
10460 case IEMMODE_16BIT:
10461 IEM_MC_BEGIN(3, 1);
10462 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10463 IEM_MC_ARG(uint16_t, u16Src, 1);
10464 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10466
10467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10470 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10471 IEM_MC_REF_EFLAGS(pEFlags);
10472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10473
10474 IEM_MC_ADVANCE_RIP();
10475 IEM_MC_END();
10476 break;
10477
10478 case IEMMODE_32BIT:
10479 IEM_MC_BEGIN(3, 1);
10480 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10481 IEM_MC_ARG(uint32_t, u32Src, 1);
10482 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10484
10485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10487 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10488 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10489 IEM_MC_REF_EFLAGS(pEFlags);
10490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10491
10492 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10493 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10494 IEM_MC_ENDIF();
10495 IEM_MC_ADVANCE_RIP();
10496 IEM_MC_END();
10497 break;
10498
10499 case IEMMODE_64BIT:
10500 IEM_MC_BEGIN(3, 1);
10501 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10502 IEM_MC_ARG(uint64_t, u64Src, 1);
10503 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10505
10506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10509 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10510 IEM_MC_REF_EFLAGS(pEFlags);
10511 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10512
10513 IEM_MC_ADVANCE_RIP();
10514 IEM_MC_END();
10515 break;
10516 }
10517 }
10518 return VINF_SUCCESS;
10519}
10520
10521
10522/** Opcode 0x0f 0xbc. */
10523FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10524{
10525 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10526 IEMOP_HLP_MIN_386();
10527 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10528 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10529}
10530
10531
10532/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10533FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10534{
10535 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10536 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10537 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10538
10539#ifndef TST_IEM_CHECK_MC
10540 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10541 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10542 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10543 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10544 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10545 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10546 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10547 {
10548 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10549 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10550 };
10551#endif
10552 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10553 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10554 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10555}
10556
10557
10558/** Opcode 0x0f 0xbd. */
10559FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10560{
10561 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10562 IEMOP_HLP_MIN_386();
10563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10564 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10565}
10566
10567
10568/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10569FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10570{
10571 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10572 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10573 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10574
10575#ifndef TST_IEM_CHECK_MC
10576 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10577 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10578 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10579 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10580 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10581 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10582 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10583 {
10584 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10585 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10586 };
10587#endif
10588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10589 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10590 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10591}
10592
10593
10594
10595/** Opcode 0x0f 0xbe. */
10596FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10597{
10598 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
10599 IEMOP_HLP_MIN_386();
10600
10601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10602
10603 /*
10604 * If rm is denoting a register, no more instruction bytes.
10605 */
10606 if (IEM_IS_MODRM_REG_MODE(bRm))
10607 {
10608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10609 switch (pVCpu->iem.s.enmEffOpSize)
10610 {
10611 case IEMMODE_16BIT:
10612 IEM_MC_BEGIN(0, 1);
10613 IEM_MC_LOCAL(uint16_t, u16Value);
10614 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10615 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10616 IEM_MC_ADVANCE_RIP();
10617 IEM_MC_END();
10618 return VINF_SUCCESS;
10619
10620 case IEMMODE_32BIT:
10621 IEM_MC_BEGIN(0, 1);
10622 IEM_MC_LOCAL(uint32_t, u32Value);
10623 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10624 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10625 IEM_MC_ADVANCE_RIP();
10626 IEM_MC_END();
10627 return VINF_SUCCESS;
10628
10629 case IEMMODE_64BIT:
10630 IEM_MC_BEGIN(0, 1);
10631 IEM_MC_LOCAL(uint64_t, u64Value);
10632 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10633 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10634 IEM_MC_ADVANCE_RIP();
10635 IEM_MC_END();
10636 return VINF_SUCCESS;
10637
10638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10639 }
10640 }
10641 else
10642 {
10643 /*
10644 * We're loading a register from memory.
10645 */
10646 switch (pVCpu->iem.s.enmEffOpSize)
10647 {
10648 case IEMMODE_16BIT:
10649 IEM_MC_BEGIN(0, 2);
10650 IEM_MC_LOCAL(uint16_t, u16Value);
10651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10654 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10655 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10656 IEM_MC_ADVANCE_RIP();
10657 IEM_MC_END();
10658 return VINF_SUCCESS;
10659
10660 case IEMMODE_32BIT:
10661 IEM_MC_BEGIN(0, 2);
10662 IEM_MC_LOCAL(uint32_t, u32Value);
10663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10666 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10667 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10668 IEM_MC_ADVANCE_RIP();
10669 IEM_MC_END();
10670 return VINF_SUCCESS;
10671
10672 case IEMMODE_64BIT:
10673 IEM_MC_BEGIN(0, 2);
10674 IEM_MC_LOCAL(uint64_t, u64Value);
10675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10678 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10679 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10680 IEM_MC_ADVANCE_RIP();
10681 IEM_MC_END();
10682 return VINF_SUCCESS;
10683
10684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10685 }
10686 }
10687}
10688
10689
10690/** Opcode 0x0f 0xbf. */
10691FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
10692{
10693 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
10694 IEMOP_HLP_MIN_386();
10695
10696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10697
10698 /** @todo Not entirely sure how the operand size prefix is handled here,
10699 * assuming that it will be ignored. Would be nice to have a few
10700 * test for this. */
10701 /*
10702 * If rm is denoting a register, no more instruction bytes.
10703 */
10704 if (IEM_IS_MODRM_REG_MODE(bRm))
10705 {
10706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10707 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10708 {
10709 IEM_MC_BEGIN(0, 1);
10710 IEM_MC_LOCAL(uint32_t, u32Value);
10711 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10712 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10713 IEM_MC_ADVANCE_RIP();
10714 IEM_MC_END();
10715 }
10716 else
10717 {
10718 IEM_MC_BEGIN(0, 1);
10719 IEM_MC_LOCAL(uint64_t, u64Value);
10720 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10721 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10722 IEM_MC_ADVANCE_RIP();
10723 IEM_MC_END();
10724 }
10725 }
10726 else
10727 {
10728 /*
10729 * We're loading a register from memory.
10730 */
10731 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10732 {
10733 IEM_MC_BEGIN(0, 2);
10734 IEM_MC_LOCAL(uint32_t, u32Value);
10735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10738 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10739 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10740 IEM_MC_ADVANCE_RIP();
10741 IEM_MC_END();
10742 }
10743 else
10744 {
10745 IEM_MC_BEGIN(0, 2);
10746 IEM_MC_LOCAL(uint64_t, u64Value);
10747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10750 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10751 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10752 IEM_MC_ADVANCE_RIP();
10753 IEM_MC_END();
10754 }
10755 }
10756 return VINF_SUCCESS;
10757}
10758
10759
10760/** Opcode 0x0f 0xc0. */
10761FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
10762{
10763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10764 IEMOP_HLP_MIN_486();
10765 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
10766
10767 /*
10768 * If rm is denoting a register, no more instruction bytes.
10769 */
10770 if (IEM_IS_MODRM_REG_MODE(bRm))
10771 {
10772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10773
10774 IEM_MC_BEGIN(3, 0);
10775 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10776 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10777 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10778
10779 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10780 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10781 IEM_MC_REF_EFLAGS(pEFlags);
10782 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10783
10784 IEM_MC_ADVANCE_RIP();
10785 IEM_MC_END();
10786 }
10787 else
10788 {
10789 /*
10790 * We're accessing memory.
10791 */
10792 IEM_MC_BEGIN(3, 3);
10793 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10794 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10795 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10796 IEM_MC_LOCAL(uint8_t, u8RegCopy);
10797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10798
10799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10800 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10801 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10802 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
10803 IEM_MC_FETCH_EFLAGS(EFlags);
10804 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10805 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10806 else
10807 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
10808
10809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10810 IEM_MC_COMMIT_EFLAGS(EFlags);
10811 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 return VINF_SUCCESS;
10815 }
10816 return VINF_SUCCESS;
10817}
10818
10819
10820/** Opcode 0x0f 0xc1. */
10821FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
10822{
10823 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
10824 IEMOP_HLP_MIN_486();
10825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10826
10827 /*
10828 * If rm is denoting a register, no more instruction bytes.
10829 */
10830 if (IEM_IS_MODRM_REG_MODE(bRm))
10831 {
10832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10833
10834 switch (pVCpu->iem.s.enmEffOpSize)
10835 {
10836 case IEMMODE_16BIT:
10837 IEM_MC_BEGIN(3, 0);
10838 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10839 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10841
10842 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10843 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10844 IEM_MC_REF_EFLAGS(pEFlags);
10845 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10846
10847 IEM_MC_ADVANCE_RIP();
10848 IEM_MC_END();
10849 return VINF_SUCCESS;
10850
10851 case IEMMODE_32BIT:
10852 IEM_MC_BEGIN(3, 0);
10853 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10854 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10856
10857 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10858 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10859 IEM_MC_REF_EFLAGS(pEFlags);
10860 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10861
10862 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10863 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10864 IEM_MC_ADVANCE_RIP();
10865 IEM_MC_END();
10866 return VINF_SUCCESS;
10867
10868 case IEMMODE_64BIT:
10869 IEM_MC_BEGIN(3, 0);
10870 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10871 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10872 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10873
10874 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10875 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10876 IEM_MC_REF_EFLAGS(pEFlags);
10877 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10878
10879 IEM_MC_ADVANCE_RIP();
10880 IEM_MC_END();
10881 return VINF_SUCCESS;
10882
10883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10884 }
10885 }
10886 else
10887 {
10888 /*
10889 * We're accessing memory.
10890 */
10891 switch (pVCpu->iem.s.enmEffOpSize)
10892 {
10893 case IEMMODE_16BIT:
10894 IEM_MC_BEGIN(3, 3);
10895 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10896 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10897 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10898 IEM_MC_LOCAL(uint16_t, u16RegCopy);
10899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10900
10901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10902 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10903 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10904 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
10905 IEM_MC_FETCH_EFLAGS(EFlags);
10906 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10907 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10908 else
10909 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
10910
10911 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10912 IEM_MC_COMMIT_EFLAGS(EFlags);
10913 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
10914 IEM_MC_ADVANCE_RIP();
10915 IEM_MC_END();
10916 return VINF_SUCCESS;
10917
10918 case IEMMODE_32BIT:
10919 IEM_MC_BEGIN(3, 3);
10920 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10921 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10922 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10923 IEM_MC_LOCAL(uint32_t, u32RegCopy);
10924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10925
10926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10927 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10928 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10929 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
10930 IEM_MC_FETCH_EFLAGS(EFlags);
10931 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10933 else
10934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
10935
10936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10937 IEM_MC_COMMIT_EFLAGS(EFlags);
10938 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
10939 IEM_MC_ADVANCE_RIP();
10940 IEM_MC_END();
10941 return VINF_SUCCESS;
10942
10943 case IEMMODE_64BIT:
10944 IEM_MC_BEGIN(3, 3);
10945 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10946 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10947 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10948 IEM_MC_LOCAL(uint64_t, u64RegCopy);
10949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10950
10951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10952 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10953 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10954 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
10955 IEM_MC_FETCH_EFLAGS(EFlags);
10956 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10957 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10958 else
10959 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
10960
10961 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10962 IEM_MC_COMMIT_EFLAGS(EFlags);
10963 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
10964 IEM_MC_ADVANCE_RIP();
10965 IEM_MC_END();
10966 return VINF_SUCCESS;
10967
10968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10969 }
10970 }
10971}
10972
10973
10974/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
10975FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
10976{
10977 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10978
10979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10980 if (IEM_IS_MODRM_REG_MODE(bRm))
10981 {
10982 /*
10983 * Register, register.
10984 */
10985 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10987 IEM_MC_BEGIN(4, 2);
10988 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
10989 IEM_MC_LOCAL(X86XMMREG, Dst);
10990 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
10991 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
10992 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
10993 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
10994 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
10995 IEM_MC_PREPARE_SSE_USAGE();
10996 IEM_MC_REF_MXCSR(pfMxcsr);
10997 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
10998 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
10999 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11000 IEM_MC_IF_MXCSR_XCPT_PENDING()
11001 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11002 IEM_MC_ELSE()
11003 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11004 IEM_MC_ENDIF();
11005
11006 IEM_MC_ADVANCE_RIP();
11007 IEM_MC_END();
11008 }
11009 else
11010 {
11011 /*
11012 * Register, memory.
11013 */
11014 IEM_MC_BEGIN(4, 3);
11015 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11016 IEM_MC_LOCAL(X86XMMREG, Dst);
11017 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11018 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11019 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11021
11022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11023 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11024 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11027 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11028
11029 IEM_MC_PREPARE_SSE_USAGE();
11030 IEM_MC_REF_MXCSR(pfMxcsr);
11031 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11032 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11033 IEM_MC_IF_MXCSR_XCPT_PENDING()
11034 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11035 IEM_MC_ELSE()
11036 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11037 IEM_MC_ENDIF();
11038
11039 IEM_MC_ADVANCE_RIP();
11040 IEM_MC_END();
11041 }
11042 return VINF_SUCCESS;
11043}
11044
11045
11046/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11047FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11048{
11049 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11050
11051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11052 if (IEM_IS_MODRM_REG_MODE(bRm))
11053 {
11054 /*
11055 * Register, register.
11056 */
11057 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11059 IEM_MC_BEGIN(4, 2);
11060 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11061 IEM_MC_LOCAL(X86XMMREG, Dst);
11062 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11063 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11064 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11065 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11066 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11067 IEM_MC_PREPARE_SSE_USAGE();
11068 IEM_MC_REF_MXCSR(pfMxcsr);
11069 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11070 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11071 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11072 IEM_MC_IF_MXCSR_XCPT_PENDING()
11073 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11074 IEM_MC_ELSE()
11075 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11076 IEM_MC_ENDIF();
11077
11078 IEM_MC_ADVANCE_RIP();
11079 IEM_MC_END();
11080 }
11081 else
11082 {
11083 /*
11084 * Register, memory.
11085 */
11086 IEM_MC_BEGIN(4, 3);
11087 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11088 IEM_MC_LOCAL(X86XMMREG, Dst);
11089 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11090 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11091 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11093
11094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11095 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11096 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11099 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11100
11101 IEM_MC_PREPARE_SSE_USAGE();
11102 IEM_MC_REF_MXCSR(pfMxcsr);
11103 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11104 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11105 IEM_MC_IF_MXCSR_XCPT_PENDING()
11106 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11107 IEM_MC_ELSE()
11108 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11109 IEM_MC_ENDIF();
11110
11111 IEM_MC_ADVANCE_RIP();
11112 IEM_MC_END();
11113 }
11114 return VINF_SUCCESS;
11115}
11116
11117
11118/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11119FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11120{
11121 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11122
11123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11124 if (IEM_IS_MODRM_REG_MODE(bRm))
11125 {
11126 /*
11127 * Register, register.
11128 */
11129 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11131 IEM_MC_BEGIN(4, 2);
11132 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11133 IEM_MC_LOCAL(X86XMMREG, Dst);
11134 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11135 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11136 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11139 IEM_MC_PREPARE_SSE_USAGE();
11140 IEM_MC_REF_MXCSR(pfMxcsr);
11141 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11142 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11143 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11144 IEM_MC_IF_MXCSR_XCPT_PENDING()
11145 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11146 IEM_MC_ELSE()
11147 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11148 IEM_MC_ENDIF();
11149
11150 IEM_MC_ADVANCE_RIP();
11151 IEM_MC_END();
11152 }
11153 else
11154 {
11155 /*
11156 * Register, memory.
11157 */
11158 IEM_MC_BEGIN(4, 3);
11159 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11160 IEM_MC_LOCAL(X86XMMREG, Dst);
11161 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11162 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11163 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11165
11166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11167 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11168 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11171 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11172
11173 IEM_MC_PREPARE_SSE_USAGE();
11174 IEM_MC_REF_MXCSR(pfMxcsr);
11175 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11177 IEM_MC_IF_MXCSR_XCPT_PENDING()
11178 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11179 IEM_MC_ELSE()
11180 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11181 IEM_MC_ENDIF();
11182
11183 IEM_MC_ADVANCE_RIP();
11184 IEM_MC_END();
11185 }
11186 return VINF_SUCCESS;
11187}
11188
11189
11190/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11191FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11192{
11193 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11194
11195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11196 if (IEM_IS_MODRM_REG_MODE(bRm))
11197 {
11198 /*
11199 * Register, register.
11200 */
11201 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11203 IEM_MC_BEGIN(4, 2);
11204 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11205 IEM_MC_LOCAL(X86XMMREG, Dst);
11206 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11207 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11208 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11209 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11211 IEM_MC_PREPARE_SSE_USAGE();
11212 IEM_MC_REF_MXCSR(pfMxcsr);
11213 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11214 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11216 IEM_MC_IF_MXCSR_XCPT_PENDING()
11217 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11218 IEM_MC_ELSE()
11219 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11220 IEM_MC_ENDIF();
11221
11222 IEM_MC_ADVANCE_RIP();
11223 IEM_MC_END();
11224 }
11225 else
11226 {
11227 /*
11228 * Register, memory.
11229 */
11230 IEM_MC_BEGIN(4, 3);
11231 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11232 IEM_MC_LOCAL(X86XMMREG, Dst);
11233 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11234 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11235 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11237
11238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11239 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11240 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11242 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11243 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11244
11245 IEM_MC_PREPARE_SSE_USAGE();
11246 IEM_MC_REF_MXCSR(pfMxcsr);
11247 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11249 IEM_MC_IF_MXCSR_XCPT_PENDING()
11250 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11251 IEM_MC_ELSE()
11252 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11253 IEM_MC_ENDIF();
11254
11255 IEM_MC_ADVANCE_RIP();
11256 IEM_MC_END();
11257 }
11258 return VINF_SUCCESS;
11259}
11260
11261
11262/** Opcode 0x0f 0xc3. */
11263FNIEMOP_DEF(iemOp_movnti_My_Gy)
11264{
11265 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11266
11267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11268
11269 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11270 if (IEM_IS_MODRM_MEM_MODE(bRm))
11271 {
11272 switch (pVCpu->iem.s.enmEffOpSize)
11273 {
11274 case IEMMODE_32BIT:
11275 IEM_MC_BEGIN(0, 2);
11276 IEM_MC_LOCAL(uint32_t, u32Value);
11277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11278
11279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11281 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11282 return IEMOP_RAISE_INVALID_OPCODE();
11283
11284 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11285 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11286 IEM_MC_ADVANCE_RIP();
11287 IEM_MC_END();
11288 break;
11289
11290 case IEMMODE_64BIT:
11291 IEM_MC_BEGIN(0, 2);
11292 IEM_MC_LOCAL(uint64_t, u64Value);
11293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11294
11295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11297 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11298 return IEMOP_RAISE_INVALID_OPCODE();
11299
11300 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11301 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11302 IEM_MC_ADVANCE_RIP();
11303 IEM_MC_END();
11304 break;
11305
11306 case IEMMODE_16BIT:
11307 /** @todo check this form. */
11308 return IEMOP_RAISE_INVALID_OPCODE();
11309 }
11310 }
11311 else
11312 return IEMOP_RAISE_INVALID_OPCODE();
11313 return VINF_SUCCESS;
11314}
11315
11316
11317/* Opcode 0x66 0x0f 0xc3 - invalid */
11318/* Opcode 0xf3 0x0f 0xc3 - invalid */
11319/* Opcode 0xf2 0x0f 0xc3 - invalid */
11320
11321
11322/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11323FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11324{
11325 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11327 if (IEM_IS_MODRM_REG_MODE(bRm))
11328 {
11329 /*
11330 * Register, register.
11331 */
11332 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11334 IEM_MC_BEGIN(3, 0);
11335 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11336 IEM_MC_ARG(uint16_t, u16Src, 1);
11337 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11338 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11339 IEM_MC_PREPARE_FPU_USAGE();
11340 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11341 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11343 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11344 IEM_MC_FPU_TO_MMX_MODE();
11345 IEM_MC_ADVANCE_RIP();
11346 IEM_MC_END();
11347 }
11348 else
11349 {
11350 /*
11351 * Register, memory.
11352 */
11353 IEM_MC_BEGIN(3, 2);
11354 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11355 IEM_MC_ARG(uint16_t, u16Src, 1);
11356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11357
11358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11359 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11360 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11362 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11363 IEM_MC_PREPARE_FPU_USAGE();
11364
11365 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11366 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11368 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11369 IEM_MC_FPU_TO_MMX_MODE();
11370 IEM_MC_ADVANCE_RIP();
11371 IEM_MC_END();
11372 }
11373 return VINF_SUCCESS;
11374}
11375
11376
11377/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11378FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11379{
11380 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11382 if (IEM_IS_MODRM_REG_MODE(bRm))
11383 {
11384 /*
11385 * Register, register.
11386 */
11387 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11389 IEM_MC_BEGIN(3, 0);
11390 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11391 IEM_MC_ARG(uint16_t, u16Src, 1);
11392 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11393 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11394 IEM_MC_PREPARE_SSE_USAGE();
11395 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11396 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11398 IEM_MC_ADVANCE_RIP();
11399 IEM_MC_END();
11400 }
11401 else
11402 {
11403 /*
11404 * Register, memory.
11405 */
11406 IEM_MC_BEGIN(3, 2);
11407 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11408 IEM_MC_ARG(uint16_t, u16Src, 1);
11409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11410
11411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11412 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11413 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11415 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11416 IEM_MC_PREPARE_SSE_USAGE();
11417
11418 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11419 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11420 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11421 IEM_MC_ADVANCE_RIP();
11422 IEM_MC_END();
11423 }
11424 return VINF_SUCCESS;
11425}
11426
11427
11428/* Opcode 0xf3 0x0f 0xc4 - invalid */
11429/* Opcode 0xf2 0x0f 0xc4 - invalid */
11430
11431
11432/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11433FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11434{
11435 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11437 if (IEM_IS_MODRM_REG_MODE(bRm))
11438 {
11439 /*
11440 * Register, register.
11441 */
11442 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11444 IEM_MC_BEGIN(3, 1);
11445 IEM_MC_LOCAL(uint16_t, u16Dst);
11446 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11447 IEM_MC_ARG(uint64_t, u64Src, 1);
11448 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11449 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11450 IEM_MC_PREPARE_FPU_USAGE();
11451 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11452 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11453 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11454 IEM_MC_FPU_TO_MMX_MODE();
11455 IEM_MC_ADVANCE_RIP();
11456 IEM_MC_END();
11457 return VINF_SUCCESS;
11458 }
11459
11460 /* No memory operand. */
11461 return IEMOP_RAISE_INVALID_OPCODE();
11462}
11463
11464
11465/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11466FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11467{
11468 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11470 if (IEM_IS_MODRM_REG_MODE(bRm))
11471 {
11472 /*
11473 * Register, register.
11474 */
11475 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11477 IEM_MC_BEGIN(3, 1);
11478 IEM_MC_LOCAL(uint16_t, u16Dst);
11479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11480 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11481 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11482 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11483 IEM_MC_PREPARE_SSE_USAGE();
11484 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11485 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11486 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11487 IEM_MC_ADVANCE_RIP();
11488 IEM_MC_END();
11489 return VINF_SUCCESS;
11490 }
11491
11492 /* No memory operand. */
11493 return IEMOP_RAISE_INVALID_OPCODE();
11494}
11495
11496
11497/* Opcode 0xf3 0x0f 0xc5 - invalid */
11498/* Opcode 0xf2 0x0f 0xc5 - invalid */
11499
11500
11501/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11502FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11503{
11504 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11506 if (IEM_IS_MODRM_REG_MODE(bRm))
11507 {
11508 /*
11509 * Register, register.
11510 */
11511 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_BEGIN(3, 0);
11514 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11515 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11516 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11518 IEM_MC_PREPARE_SSE_USAGE();
11519 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11520 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11521 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11522 IEM_MC_ADVANCE_RIP();
11523 IEM_MC_END();
11524 }
11525 else
11526 {
11527 /*
11528 * Register, memory.
11529 */
11530 IEM_MC_BEGIN(3, 2);
11531 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11532 IEM_MC_LOCAL(RTUINT128U, uSrc);
11533 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11535
11536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11537 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11538 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11541 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11542
11543 IEM_MC_PREPARE_SSE_USAGE();
11544 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11546
11547 IEM_MC_ADVANCE_RIP();
11548 IEM_MC_END();
11549 }
11550 return VINF_SUCCESS;
11551}
11552
11553
11554/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11555FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11556{
11557 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11559 if (IEM_IS_MODRM_REG_MODE(bRm))
11560 {
11561 /*
11562 * Register, register.
11563 */
11564 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11566 IEM_MC_BEGIN(3, 0);
11567 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11568 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11569 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11571 IEM_MC_PREPARE_SSE_USAGE();
11572 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11573 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11575 IEM_MC_ADVANCE_RIP();
11576 IEM_MC_END();
11577 }
11578 else
11579 {
11580 /*
11581 * Register, memory.
11582 */
11583 IEM_MC_BEGIN(3, 2);
11584 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11585 IEM_MC_LOCAL(RTUINT128U, uSrc);
11586 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11588
11589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11590 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11591 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11593 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11594 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11595
11596 IEM_MC_PREPARE_SSE_USAGE();
11597 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11598 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11599
11600 IEM_MC_ADVANCE_RIP();
11601 IEM_MC_END();
11602 }
11603 return VINF_SUCCESS;
11604}
11605
11606
11607/* Opcode 0xf3 0x0f 0xc6 - invalid */
11608/* Opcode 0xf2 0x0f 0xc6 - invalid */
11609
11610
11611/** Opcode 0x0f 0xc7 !11/1. */
11612FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
11613{
11614 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
11615
11616 IEM_MC_BEGIN(4, 3);
11617 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
11618 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
11619 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
11620 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11621 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
11622 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
11623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11624
11625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11626 IEMOP_HLP_DONE_DECODING();
11627 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11628
11629 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
11630 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
11631 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
11632
11633 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
11634 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
11635 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
11636
11637 IEM_MC_FETCH_EFLAGS(EFlags);
11638 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11639 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11640 else
11641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11642
11643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
11644 IEM_MC_COMMIT_EFLAGS(EFlags);
11645 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11646 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
11647 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
11648 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
11649 IEM_MC_ENDIF();
11650 IEM_MC_ADVANCE_RIP();
11651
11652 IEM_MC_END();
11653 return VINF_SUCCESS;
11654}
11655
11656
11657/** Opcode REX.W 0x0f 0xc7 !11/1. */
11658FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
11659{
11660 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
11661 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11662 {
11663#if 0
11664 RT_NOREF(bRm);
11665 IEMOP_BITCH_ABOUT_STUB();
11666 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
11667#else
11668 IEM_MC_BEGIN(4, 3);
11669 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
11670 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
11671 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
11672 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11673 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
11674 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
11675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11676
11677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11678 IEMOP_HLP_DONE_DECODING();
11679 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
11680 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11681
11682 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
11683 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
11684 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
11685
11686 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
11687 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
11688 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
11689
11690 IEM_MC_FETCH_EFLAGS(EFlags);
11691# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
11692# if defined(RT_ARCH_AMD64)
11693 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11694# endif
11695 {
11696 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11697 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11698 else
11699 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11700 }
11701# if defined(RT_ARCH_AMD64)
11702 else
11703# endif
11704# endif
11705# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
11706 {
11707 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
11708 accesses and not all all atomic, which works fine on in UNI CPU guest
11709 configuration (ignoring DMA). If guest SMP is active we have no choice
11710 but to use a rendezvous callback here. Sigh. */
11711 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
11712 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11713 else
11714 {
11715 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11716 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
11717 }
11718 }
11719# endif
11720
11721 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
11722 IEM_MC_COMMIT_EFLAGS(EFlags);
11723 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11724 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
11725 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
11726 IEM_MC_ENDIF();
11727 IEM_MC_ADVANCE_RIP();
11728
11729 IEM_MC_END();
11730 return VINF_SUCCESS;
11731#endif
11732 }
11733 Log(("cmpxchg16b -> #UD\n"));
11734 return IEMOP_RAISE_INVALID_OPCODE();
11735}
11736
11737FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
11738{
11739 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
11740 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
11741 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
11742}
11743
11744/** Opcode 0x0f 0xc7 11/6. */
11745FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
11746
11747/** Opcode 0x0f 0xc7 !11/6. */
11748#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11749FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
11750{
11751 IEMOP_MNEMONIC(vmptrld, "vmptrld");
11752 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
11753 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
11754 IEM_MC_BEGIN(2, 0);
11755 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11756 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11758 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11759 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11760 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
11761 IEM_MC_END();
11762 return VINF_SUCCESS;
11763}
11764#else
11765FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
11766#endif
11767
11768/** Opcode 0x66 0x0f 0xc7 !11/6. */
11769#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11770FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
11771{
11772 IEMOP_MNEMONIC(vmclear, "vmclear");
11773 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
11774 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
11775 IEM_MC_BEGIN(2, 0);
11776 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11777 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11779 IEMOP_HLP_DONE_DECODING();
11780 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11781 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
11782 IEM_MC_END();
11783 return VINF_SUCCESS;
11784}
11785#else
11786FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
11787#endif
11788
11789/** Opcode 0xf3 0x0f 0xc7 !11/6. */
11790#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11791FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
11792{
11793 IEMOP_MNEMONIC(vmxon, "vmxon");
11794 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
11795 IEM_MC_BEGIN(2, 0);
11796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11799 IEMOP_HLP_DONE_DECODING();
11800 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11801 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
11802 IEM_MC_END();
11803 return VINF_SUCCESS;
11804}
11805#else
11806FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
11807#endif
11808
11809/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
11810#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11811FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
11812{
11813 IEMOP_MNEMONIC(vmptrst, "vmptrst");
11814 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
11815 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
11816 IEM_MC_BEGIN(2, 0);
11817 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11818 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11820 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11821 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11822 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
11823 IEM_MC_END();
11824 return VINF_SUCCESS;
11825}
11826#else
11827FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
11828#endif
11829
11830/** Opcode 0x0f 0xc7 11/7. */
11831FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
11832
11833
11834/**
11835 * Group 9 jump table for register variant.
11836 */
11837IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
11838{ /* pfx: none, 066h, 0f3h, 0f2h */
11839 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11840 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
11841 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11842 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11843 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11844 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11845 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11846 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11847};
11848AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
11849
11850
11851/**
11852 * Group 9 jump table for memory variant.
11853 */
11854IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
11855{ /* pfx: none, 066h, 0f3h, 0f2h */
11856 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11857 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
11858 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11859 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11860 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11861 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11862 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
11863 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11864};
11865AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
11866
11867
11868/** Opcode 0x0f 0xc7. */
11869FNIEMOP_DEF(iemOp_Grp9)
11870{
11871 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
11872 if (IEM_IS_MODRM_REG_MODE(bRm))
11873 /* register, register */
11874 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11875 + pVCpu->iem.s.idxPrefix], bRm);
11876 /* memory, register */
11877 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11878 + pVCpu->iem.s.idxPrefix], bRm);
11879}
11880
11881
11882/**
11883 * Common 'bswap register' helper.
11884 */
11885FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
11886{
11887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11888 switch (pVCpu->iem.s.enmEffOpSize)
11889 {
11890 case IEMMODE_16BIT:
11891 IEM_MC_BEGIN(1, 0);
11892 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11893 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
11894 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
11895 IEM_MC_ADVANCE_RIP();
11896 IEM_MC_END();
11897 return VINF_SUCCESS;
11898
11899 case IEMMODE_32BIT:
11900 IEM_MC_BEGIN(1, 0);
11901 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11902 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
11903 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11904 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
11905 IEM_MC_ADVANCE_RIP();
11906 IEM_MC_END();
11907 return VINF_SUCCESS;
11908
11909 case IEMMODE_64BIT:
11910 IEM_MC_BEGIN(1, 0);
11911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11912 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
11913 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
11914 IEM_MC_ADVANCE_RIP();
11915 IEM_MC_END();
11916 return VINF_SUCCESS;
11917
11918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11919 }
11920}
11921
11922
11923/** Opcode 0x0f 0xc8. */
11924FNIEMOP_DEF(iemOp_bswap_rAX_r8)
11925{
11926 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
11927 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
11928 prefix. REX.B is the correct prefix it appears. For a parallel
11929 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
11930 IEMOP_HLP_MIN_486();
11931 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
11932}
11933
11934
11935/** Opcode 0x0f 0xc9. */
11936FNIEMOP_DEF(iemOp_bswap_rCX_r9)
11937{
11938 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
11939 IEMOP_HLP_MIN_486();
11940 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
11941}
11942
11943
11944/** Opcode 0x0f 0xca. */
11945FNIEMOP_DEF(iemOp_bswap_rDX_r10)
11946{
11947 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
11948 IEMOP_HLP_MIN_486();
11949 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
11950}
11951
11952
11953/** Opcode 0x0f 0xcb. */
11954FNIEMOP_DEF(iemOp_bswap_rBX_r11)
11955{
11956 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
11957 IEMOP_HLP_MIN_486();
11958 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
11959}
11960
11961
11962/** Opcode 0x0f 0xcc. */
11963FNIEMOP_DEF(iemOp_bswap_rSP_r12)
11964{
11965 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
11966 IEMOP_HLP_MIN_486();
11967 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
11968}
11969
11970
11971/** Opcode 0x0f 0xcd. */
11972FNIEMOP_DEF(iemOp_bswap_rBP_r13)
11973{
11974 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
11975 IEMOP_HLP_MIN_486();
11976 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
11977}
11978
11979
11980/** Opcode 0x0f 0xce. */
11981FNIEMOP_DEF(iemOp_bswap_rSI_r14)
11982{
11983 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
11984 IEMOP_HLP_MIN_486();
11985 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
11986}
11987
11988
11989/** Opcode 0x0f 0xcf. */
11990FNIEMOP_DEF(iemOp_bswap_rDI_r15)
11991{
11992 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
11993 IEMOP_HLP_MIN_486();
11994 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
11995}
11996
11997
11998/* Opcode 0x0f 0xd0 - invalid */
11999
12000
12001/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12002FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12003{
12004 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12005 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12006}
12007
12008
12009/* Opcode 0xf3 0x0f 0xd0 - invalid */
12010
12011
12012/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12013FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12014{
12015 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12016 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12017}
12018
12019
12020
12021/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12022FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12023{
12024 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12025 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12026}
12027
12028/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12029FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12030{
12031 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12032 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12033}
12034
12035/* Opcode 0xf3 0x0f 0xd1 - invalid */
12036/* Opcode 0xf2 0x0f 0xd1 - invalid */
12037
12038/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12039FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12040{
12041 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12042 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12043}
12044
12045
12046/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12047FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12048{
12049 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12050 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12051}
12052
12053
12054/* Opcode 0xf3 0x0f 0xd2 - invalid */
12055/* Opcode 0xf2 0x0f 0xd2 - invalid */
12056
12057/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12058FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12059{
12060 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12061 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12062}
12063
12064
12065/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12066FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12067{
12068 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12069 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12070}
12071
12072
12073/* Opcode 0xf3 0x0f 0xd3 - invalid */
12074/* Opcode 0xf2 0x0f 0xd3 - invalid */
12075
12076
12077/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12078FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12079{
12080 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12081 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12082}
12083
12084
12085/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12086FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12087{
12088 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12089 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12090}
12091
12092
12093/* Opcode 0xf3 0x0f 0xd4 - invalid */
12094/* Opcode 0xf2 0x0f 0xd4 - invalid */
12095
12096/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12097FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12098{
12099 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12100 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12101}
12102
12103/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12104FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12105{
12106 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12107 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12108}
12109
12110
12111/* Opcode 0xf3 0x0f 0xd5 - invalid */
12112/* Opcode 0xf2 0x0f 0xd5 - invalid */
12113
12114/* Opcode 0x0f 0xd6 - invalid */
12115
12116/**
12117 * @opcode 0xd6
12118 * @oppfx 0x66
12119 * @opcpuid sse2
12120 * @opgroup og_sse2_pcksclr_datamove
12121 * @opxcpttype none
12122 * @optest op1=-1 op2=2 -> op1=2
12123 * @optest op1=0 op2=-42 -> op1=-42
12124 */
12125FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12126{
12127 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12129 if (IEM_IS_MODRM_REG_MODE(bRm))
12130 {
12131 /*
12132 * Register, register.
12133 */
12134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12135 IEM_MC_BEGIN(0, 2);
12136 IEM_MC_LOCAL(uint64_t, uSrc);
12137
12138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12140
12141 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12142 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12143
12144 IEM_MC_ADVANCE_RIP();
12145 IEM_MC_END();
12146 }
12147 else
12148 {
12149 /*
12150 * Memory, register.
12151 */
12152 IEM_MC_BEGIN(0, 2);
12153 IEM_MC_LOCAL(uint64_t, uSrc);
12154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12155
12156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12160
12161 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12162 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12163
12164 IEM_MC_ADVANCE_RIP();
12165 IEM_MC_END();
12166 }
12167 return VINF_SUCCESS;
12168}
12169
12170
12171/**
12172 * @opcode 0xd6
12173 * @opcodesub 11 mr/reg
12174 * @oppfx f3
12175 * @opcpuid sse2
12176 * @opgroup og_sse2_simdint_datamove
12177 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12178 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12179 */
12180FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12181{
12182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12183 if (IEM_IS_MODRM_REG_MODE(bRm))
12184 {
12185 /*
12186 * Register, register.
12187 */
12188 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12190 IEM_MC_BEGIN(0, 1);
12191 IEM_MC_LOCAL(uint64_t, uSrc);
12192
12193 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12194 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12195
12196 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12197 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12198 IEM_MC_FPU_TO_MMX_MODE();
12199
12200 IEM_MC_ADVANCE_RIP();
12201 IEM_MC_END();
12202 return VINF_SUCCESS;
12203 }
12204
12205 /**
12206 * @opdone
12207 * @opmnemonic udf30fd6mem
12208 * @opcode 0xd6
12209 * @opcodesub !11 mr/reg
12210 * @oppfx f3
12211 * @opunused intel-modrm
12212 * @opcpuid sse
12213 * @optest ->
12214 */
12215 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12216}
12217
12218
12219/**
12220 * @opcode 0xd6
12221 * @opcodesub 11 mr/reg
12222 * @oppfx f2
12223 * @opcpuid sse2
12224 * @opgroup og_sse2_simdint_datamove
12225 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12226 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12227 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12228 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12229 * @optest op1=-42 op2=0xfedcba9876543210
12230 * -> op1=0xfedcba9876543210 ftw=0xff
12231 */
12232FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12233{
12234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12235 if (IEM_IS_MODRM_REG_MODE(bRm))
12236 {
12237 /*
12238 * Register, register.
12239 */
12240 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12242 IEM_MC_BEGIN(0, 1);
12243 IEM_MC_LOCAL(uint64_t, uSrc);
12244
12245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12246 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12247
12248 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12249 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12250 IEM_MC_FPU_TO_MMX_MODE();
12251
12252 IEM_MC_ADVANCE_RIP();
12253 IEM_MC_END();
12254 return VINF_SUCCESS;
12255 }
12256
12257 /**
12258 * @opdone
12259 * @opmnemonic udf20fd6mem
12260 * @opcode 0xd6
12261 * @opcodesub !11 mr/reg
12262 * @oppfx f2
12263 * @opunused intel-modrm
12264 * @opcpuid sse
12265 * @optest ->
12266 */
12267 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12268}
12269
12270
12271/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12272FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12273{
12274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12275 /* Docs says register only. */
12276 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12277 {
12278 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12279 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12281 IEM_MC_BEGIN(2, 0);
12282 IEM_MC_ARG(uint64_t *, puDst, 0);
12283 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12284 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12285 IEM_MC_PREPARE_FPU_USAGE();
12286 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12287 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12288 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12289 IEM_MC_FPU_TO_MMX_MODE();
12290 IEM_MC_ADVANCE_RIP();
12291 IEM_MC_END();
12292 return VINF_SUCCESS;
12293 }
12294 return IEMOP_RAISE_INVALID_OPCODE();
12295}
12296
12297
12298/** Opcode 0x66 0x0f 0xd7 - */
12299FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12300{
12301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12302 /* Docs says register only. */
12303 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12304 {
12305 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12306 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12308 IEM_MC_BEGIN(2, 0);
12309 IEM_MC_ARG(uint64_t *, puDst, 0);
12310 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12311 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12312 IEM_MC_PREPARE_SSE_USAGE();
12313 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12314 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12315 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12316 IEM_MC_ADVANCE_RIP();
12317 IEM_MC_END();
12318 return VINF_SUCCESS;
12319 }
12320 return IEMOP_RAISE_INVALID_OPCODE();
12321}
12322
12323
12324/* Opcode 0xf3 0x0f 0xd7 - invalid */
12325/* Opcode 0xf2 0x0f 0xd7 - invalid */
12326
12327
12328/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12329FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12330{
12331 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12332 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12333}
12334
12335
12336/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12337FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12338{
12339 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12340 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12341}
12342
12343
12344/* Opcode 0xf3 0x0f 0xd8 - invalid */
12345/* Opcode 0xf2 0x0f 0xd8 - invalid */
12346
12347/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12348FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12349{
12350 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12351 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12352}
12353
12354
12355/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12356FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12357{
12358 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12359 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12360}
12361
12362
12363/* Opcode 0xf3 0x0f 0xd9 - invalid */
12364/* Opcode 0xf2 0x0f 0xd9 - invalid */
12365
12366/** Opcode 0x0f 0xda - pminub Pq, Qq */
12367FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12368{
12369 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12370 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12371}
12372
12373
12374/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12375FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12376{
12377 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12378 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12379}
12380
12381/* Opcode 0xf3 0x0f 0xda - invalid */
12382/* Opcode 0xf2 0x0f 0xda - invalid */
12383
12384/** Opcode 0x0f 0xdb - pand Pq, Qq */
12385FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12386{
12387 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12388 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12389}
12390
12391
12392/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12393FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12394{
12395 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12396 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12397}
12398
12399
12400/* Opcode 0xf3 0x0f 0xdb - invalid */
12401/* Opcode 0xf2 0x0f 0xdb - invalid */
12402
12403/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12404FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12405{
12406 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12407 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12408}
12409
12410
12411/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12412FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12413{
12414 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12415 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12416}
12417
12418
12419/* Opcode 0xf3 0x0f 0xdc - invalid */
12420/* Opcode 0xf2 0x0f 0xdc - invalid */
12421
12422/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12423FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12424{
12425 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12426 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12427}
12428
12429
12430/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12431FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12432{
12433 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12434 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12435}
12436
12437
12438/* Opcode 0xf3 0x0f 0xdd - invalid */
12439/* Opcode 0xf2 0x0f 0xdd - invalid */
12440
12441/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12442FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12443{
12444 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12445 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12446}
12447
12448
12449/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12450FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12451{
12452 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12453 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12454}
12455
12456/* Opcode 0xf3 0x0f 0xde - invalid */
12457/* Opcode 0xf2 0x0f 0xde - invalid */
12458
12459
12460/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12461FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12462{
12463 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12464 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12465}
12466
12467
12468/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12469FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12470{
12471 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12472 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12473}
12474
12475
12476/* Opcode 0xf3 0x0f 0xdf - invalid */
12477/* Opcode 0xf2 0x0f 0xdf - invalid */
12478
12479/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12480FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12481{
12482 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12483 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12484}
12485
12486
12487/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12488FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12489{
12490 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12491 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12492}
12493
12494
12495/* Opcode 0xf3 0x0f 0xe0 - invalid */
12496/* Opcode 0xf2 0x0f 0xe0 - invalid */
12497
12498/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12499FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12500{
12501 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12502 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12503}
12504
12505
12506/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12507FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12508{
12509 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12510 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12511}
12512
12513
12514/* Opcode 0xf3 0x0f 0xe1 - invalid */
12515/* Opcode 0xf2 0x0f 0xe1 - invalid */
12516
12517/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12518FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12519{
12520 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12521 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12522}
12523
12524
12525/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12526FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12527{
12528 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12529 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12530}
12531
12532
12533/* Opcode 0xf3 0x0f 0xe2 - invalid */
12534/* Opcode 0xf2 0x0f 0xe2 - invalid */
12535
12536/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12537FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12538{
12539 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12540 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12541}
12542
12543
12544/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12545FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12546{
12547 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12548 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12549}
12550
12551
12552/* Opcode 0xf3 0x0f 0xe3 - invalid */
12553/* Opcode 0xf2 0x0f 0xe3 - invalid */
12554
12555/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12556FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12557{
12558 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12559 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
12560}
12561
12562
12563/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
12564FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
12565{
12566 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12567 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
12568}
12569
12570
12571/* Opcode 0xf3 0x0f 0xe4 - invalid */
12572/* Opcode 0xf2 0x0f 0xe4 - invalid */
12573
12574/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
12575FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
12576{
12577 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12578 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
12579}
12580
12581
12582/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
12583FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
12584{
12585 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12586 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
12587}
12588
12589
12590/* Opcode 0xf3 0x0f 0xe5 - invalid */
12591/* Opcode 0xf2 0x0f 0xe5 - invalid */
12592/* Opcode 0x0f 0xe6 - invalid */
12593
12594
12595/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
12596FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
12597{
12598 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12599 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
12600}
12601
12602
12603/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
12604FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
12605{
12606 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12607 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
12608}
12609
12610
12611/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
12612FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
12613{
12614 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12615 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
12616}
12617
12618
12619/**
12620 * @opcode 0xe7
12621 * @opcodesub !11 mr/reg
12622 * @oppfx none
12623 * @opcpuid sse
12624 * @opgroup og_sse1_cachect
12625 * @opxcpttype none
12626 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
12627 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12628 */
12629FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
12630{
12631 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12633 if (IEM_IS_MODRM_MEM_MODE(bRm))
12634 {
12635 /* Register, memory. */
12636 IEM_MC_BEGIN(0, 2);
12637 IEM_MC_LOCAL(uint64_t, uSrc);
12638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12639
12640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12642 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12643 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12644
12645 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
12646 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12647 IEM_MC_FPU_TO_MMX_MODE();
12648
12649 IEM_MC_ADVANCE_RIP();
12650 IEM_MC_END();
12651 return VINF_SUCCESS;
12652 }
12653 /**
12654 * @opdone
12655 * @opmnemonic ud0fe7reg
12656 * @opcode 0xe7
12657 * @opcodesub 11 mr/reg
12658 * @oppfx none
12659 * @opunused immediate
12660 * @opcpuid sse
12661 * @optest ->
12662 */
12663 return IEMOP_RAISE_INVALID_OPCODE();
12664}
12665
12666/**
12667 * @opcode 0xe7
12668 * @opcodesub !11 mr/reg
12669 * @oppfx 0x66
12670 * @opcpuid sse2
12671 * @opgroup og_sse2_cachect
12672 * @opxcpttype 1
12673 * @optest op1=-1 op2=2 -> op1=2
12674 * @optest op1=0 op2=-42 -> op1=-42
12675 */
12676FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
12677{
12678 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12680 if (IEM_IS_MODRM_MEM_MODE(bRm))
12681 {
12682 /* Register, memory. */
12683 IEM_MC_BEGIN(0, 2);
12684 IEM_MC_LOCAL(RTUINT128U, uSrc);
12685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12686
12687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12690 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12691
12692 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12693 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12694
12695 IEM_MC_ADVANCE_RIP();
12696 IEM_MC_END();
12697 return VINF_SUCCESS;
12698 }
12699
12700 /**
12701 * @opdone
12702 * @opmnemonic ud660fe7reg
12703 * @opcode 0xe7
12704 * @opcodesub 11 mr/reg
12705 * @oppfx 0x66
12706 * @opunused immediate
12707 * @opcpuid sse
12708 * @optest ->
12709 */
12710 return IEMOP_RAISE_INVALID_OPCODE();
12711}
12712
12713/* Opcode 0xf3 0x0f 0xe7 - invalid */
12714/* Opcode 0xf2 0x0f 0xe7 - invalid */
12715
12716
12717/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
12718FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
12719{
12720 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12721 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
12722}
12723
12724
12725/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
12726FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
12727{
12728 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12729 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
12730}
12731
12732
12733/* Opcode 0xf3 0x0f 0xe8 - invalid */
12734/* Opcode 0xf2 0x0f 0xe8 - invalid */
12735
12736/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
12737FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
12738{
12739 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12740 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
12741}
12742
12743
12744/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
12745FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
12746{
12747 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12748 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
12749}
12750
12751
12752/* Opcode 0xf3 0x0f 0xe9 - invalid */
12753/* Opcode 0xf2 0x0f 0xe9 - invalid */
12754
12755
12756/** Opcode 0x0f 0xea - pminsw Pq, Qq */
12757FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
12758{
12759 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12760 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
12761}
12762
12763
12764/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
12765FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
12766{
12767 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12768 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
12769}
12770
12771
12772/* Opcode 0xf3 0x0f 0xea - invalid */
12773/* Opcode 0xf2 0x0f 0xea - invalid */
12774
12775
12776/** Opcode 0x0f 0xeb - por Pq, Qq */
12777FNIEMOP_DEF(iemOp_por_Pq_Qq)
12778{
12779 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12780 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
12781}
12782
12783
12784/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
12785FNIEMOP_DEF(iemOp_por_Vx_Wx)
12786{
12787 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12788 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
12789}
12790
12791
12792/* Opcode 0xf3 0x0f 0xeb - invalid */
12793/* Opcode 0xf2 0x0f 0xeb - invalid */
12794
12795/** Opcode 0x0f 0xec - paddsb Pq, Qq */
12796FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
12797{
12798 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12799 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
12800}
12801
12802
12803/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
12804FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
12805{
12806 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12807 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
12808}
12809
12810
12811/* Opcode 0xf3 0x0f 0xec - invalid */
12812/* Opcode 0xf2 0x0f 0xec - invalid */
12813
12814/** Opcode 0x0f 0xed - paddsw Pq, Qq */
12815FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
12816{
12817 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12818 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
12819}
12820
12821
12822/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
12823FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
12824{
12825 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12826 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
12827}
12828
12829
12830/* Opcode 0xf3 0x0f 0xed - invalid */
12831/* Opcode 0xf2 0x0f 0xed - invalid */
12832
12833
12834/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
12835FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
12836{
12837 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12838 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
12839}
12840
12841
12842/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
12843FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
12844{
12845 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12846 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
12847}
12848
12849
12850/* Opcode 0xf3 0x0f 0xee - invalid */
12851/* Opcode 0xf2 0x0f 0xee - invalid */
12852
12853
12854/** Opcode 0x0f 0xef - pxor Pq, Qq */
12855FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
12856{
12857 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12858 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
12859}
12860
12861
12862/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
12863FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
12864{
12865 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12866 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
12867}
12868
12869
12870/* Opcode 0xf3 0x0f 0xef - invalid */
12871/* Opcode 0xf2 0x0f 0xef - invalid */
12872
12873/* Opcode 0x0f 0xf0 - invalid */
12874/* Opcode 0x66 0x0f 0xf0 - invalid */
12875
12876
12877/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
12878FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
12879{
12880 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12882 if (IEM_IS_MODRM_REG_MODE(bRm))
12883 {
12884 /*
12885 * Register, register - (not implemented, assuming it raises \#UD).
12886 */
12887 return IEMOP_RAISE_INVALID_OPCODE();
12888 }
12889 else
12890 {
12891 /*
12892 * Register, memory.
12893 */
12894 IEM_MC_BEGIN(0, 2);
12895 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
12896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12897
12898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12900 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
12901 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12902 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12903 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
12904
12905 IEM_MC_ADVANCE_RIP();
12906 IEM_MC_END();
12907 }
12908 return VINF_SUCCESS;
12909}
12910
12911
12912/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
12913FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
12914{
12915 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12916 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
12917}
12918
12919
12920/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
12921FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
12922{
12923 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12924 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
12925}
12926
12927
12928/* Opcode 0xf2 0x0f 0xf1 - invalid */
12929
12930/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
12931FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
12932{
12933 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12934 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
12935}
12936
12937
12938/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
12939FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
12940{
12941 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12942 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
12943}
12944
12945
12946/* Opcode 0xf2 0x0f 0xf2 - invalid */
12947
12948/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
12949FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
12950{
12951 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12952 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
12953}
12954
12955
12956/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
12957FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
12958{
12959 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12960 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
12961}
12962
12963/* Opcode 0xf2 0x0f 0xf3 - invalid */
12964
12965/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
12966FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
12967{
12968 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12969 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
12970}
12971
12972
12973/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
12974FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
12975{
12976 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12977 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
12978}
12979
12980
12981/* Opcode 0xf2 0x0f 0xf4 - invalid */
12982
12983/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
12984FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
12985{
12986 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12987 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
12988}
12989
12990
12991/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
12992FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
12993{
12994 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12995 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
12996}
12997
12998/* Opcode 0xf2 0x0f 0xf5 - invalid */
12999
13000/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13001FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13002{
13003 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13004 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13005}
13006
13007
13008/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13009FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13010{
13011 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13012 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13013}
13014
13015
13016/* Opcode 0xf2 0x0f 0xf6 - invalid */
13017
13018/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13019FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13020/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13021FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13022/* Opcode 0xf2 0x0f 0xf7 - invalid */
13023
13024
13025/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13026FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13027{
13028 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13029 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13030}
13031
13032
13033/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13034FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13035{
13036 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13037 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13038}
13039
13040
13041/* Opcode 0xf2 0x0f 0xf8 - invalid */
13042
13043
13044/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13045FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13046{
13047 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13048 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13049}
13050
13051
13052/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13053FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13054{
13055 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13056 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13057}
13058
13059
13060/* Opcode 0xf2 0x0f 0xf9 - invalid */
13061
13062
13063/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13064FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13065{
13066 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13067 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13068}
13069
13070
13071/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13072FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13073{
13074 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13075 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13076}
13077
13078
13079/* Opcode 0xf2 0x0f 0xfa - invalid */
13080
13081
13082/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13083FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13084{
13085 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13086 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13087}
13088
13089
13090/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13091FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13092{
13093 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13094 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13095}
13096
13097
13098/* Opcode 0xf2 0x0f 0xfb - invalid */
13099
13100
13101/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13102FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13103{
13104 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13105 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13106}
13107
13108
13109/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13110FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13111{
13112 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13113 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13114}
13115
13116
13117/* Opcode 0xf2 0x0f 0xfc - invalid */
13118
13119
13120/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13121FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13122{
13123 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13124 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13125}
13126
13127
13128/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13129FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13130{
13131 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13132 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13133}
13134
13135
13136/* Opcode 0xf2 0x0f 0xfd - invalid */
13137
13138
13139/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13140FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13141{
13142 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13143 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13144}
13145
13146
13147/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13148FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13149{
13150 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13151 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13152}
13153
13154
13155/* Opcode 0xf2 0x0f 0xfe - invalid */
13156
13157
13158/** Opcode **** 0x0f 0xff - UD0 */
13159FNIEMOP_DEF(iemOp_ud0)
13160{
13161 IEMOP_MNEMONIC(ud0, "ud0");
13162 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13163 {
13164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13165#ifndef TST_IEM_CHECK_MC
13166 if (IEM_IS_MODRM_MEM_MODE(bRm))
13167 {
13168 RTGCPTR GCPtrEff;
13169 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13170 if (rcStrict != VINF_SUCCESS)
13171 return rcStrict;
13172 }
13173#endif
13174 IEMOP_HLP_DONE_DECODING();
13175 }
13176 return IEMOP_RAISE_INVALID_OPCODE();
13177}
13178
13179
13180
13181/**
13182 * Two byte opcode map, first byte 0x0f.
13183 *
13184 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13185 * check if it needs updating as well when making changes.
13186 */
13187IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13188{
13189 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13190 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13191 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13192 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13193 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13194 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13195 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13196 /* 0x06 */ IEMOP_X4(iemOp_clts),
13197 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13198 /* 0x08 */ IEMOP_X4(iemOp_invd),
13199 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13200 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13201 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13202 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13203 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13204 /* 0x0e */ IEMOP_X4(iemOp_femms),
13205 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13206
13207 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13208 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13209 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13210 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13211 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13212 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13213 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13214 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13215 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13216 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13217 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13218 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13219 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13220 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13221 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13222 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13223
13224 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13225 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13226 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13227 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13228 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13229 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13230 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13231 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13232 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13233 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13234 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13235 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13236 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13237 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13238 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13239 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13240
13241 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13242 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13243 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13244 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13245 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13246 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13247 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13248 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13249 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13250 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13251 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13252 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13253 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13254 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13255 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13256 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13257
13258 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13259 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13260 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13261 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13262 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13263 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13264 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13265 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13266 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13267 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13268 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13269 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13270 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13271 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13272 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13273 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13274
13275 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13276 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13277 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13278 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13279 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13280 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13281 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13282 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13283 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13284 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13285 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13286 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13287 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13288 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13289 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13290 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13291
13292 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13293 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13294 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13295 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13296 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13297 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13298 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13299 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13300 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13301 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13302 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13303 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13304 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13305 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13306 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13307 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13308
13309 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13310 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13311 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13312 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13313 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13314 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13315 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13316 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13317
13318 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13319 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13320 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13321 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13322 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13323 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13324 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13325 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13326
13327 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13328 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13329 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13330 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13331 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13332 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13333 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13334 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13335 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13336 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13337 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13338 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13339 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13340 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13341 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13342 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13343
13344 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13345 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13346 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13347 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13348 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13349 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13350 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13351 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13352 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13353 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13354 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13355 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13356 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13357 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13358 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13359 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13360
13361 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13362 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13363 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13364 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13365 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13366 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13367 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13368 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13369 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13370 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13371 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13372 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13373 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13374 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13375 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13376 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13377
13378 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13379 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13380 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13381 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13382 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13383 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13384 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13385 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13386 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13387 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13388 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13389 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13390 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13391 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13392 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13393 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13394
13395 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13396 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13397 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13398 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13399 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13400 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13401 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13402 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13403 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13404 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13405 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13406 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13407 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13408 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13409 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13410 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13411
13412 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13413 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13414 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13415 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13416 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13417 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13418 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13419 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13420 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13421 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13422 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13423 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13424 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13425 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13426 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13427 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13428
13429 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13430 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13431 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13432 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13433 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13434 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13435 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13436 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13437 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13438 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13439 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13440 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13441 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13442 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13443 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13444 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13445
13446 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13447 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13448 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13449 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13450 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13451 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13452 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13453 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13454 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13455 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13456 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13457 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13458 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13459 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13460 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13461 /* 0xff */ IEMOP_X4(iemOp_ud0),
13462};
13463AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13464
13465/** @} */
13466
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use