VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96933

Last change on this file since 96933 was 96930, checked in by vboxsync, 21 months ago

VMM/IEM: Implement cvtps2pi/cvttps2pi instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 475.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96930 2022-09-29 09:55:19Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP();
92 IEM_MC_END();
93 }
94 return VINF_SUCCESS;
95}
96
97
98/**
99 * Common worker for MMX instructions on the form:
100 * pxxx mm1, mm2/mem64
101 *
102 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
103 * no FXSAVE state, just the operands.
104 */
105FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
114 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
116 IEM_MC_BEGIN(2, 0);
117 IEM_MC_ARG(uint64_t *, pDst, 0);
118 IEM_MC_ARG(uint64_t const *, pSrc, 1);
119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
120 IEM_MC_PREPARE_FPU_USAGE();
121 IEM_MC_FPU_TO_MMX_MODE();
122
123 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
124 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
125 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
126 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 }
131 else
132 {
133 /*
134 * Register, memory.
135 */
136 IEM_MC_BEGIN(2, 2);
137 IEM_MC_ARG(uint64_t *, pDst, 0);
138 IEM_MC_LOCAL(uint64_t, uSrc);
139 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
141
142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
146
147 IEM_MC_PREPARE_FPU_USAGE();
148 IEM_MC_FPU_TO_MMX_MODE();
149
150 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
151 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
152 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
153
154 IEM_MC_ADVANCE_RIP();
155 IEM_MC_END();
156 }
157 return VINF_SUCCESS;
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * for instructions introduced with SSE.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * Register, register.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
177 IEM_MC_BEGIN(2, 0);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * Register, memory.
196 */
197 IEM_MC_BEGIN(2, 2);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/**
223 * Common worker for MMX instructions on the form:
224 * pxxx mm1, mm2/mem64
225 * for instructions introduced with SSE.
226 *
227 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
228 * no FXSAVE state, just the operands.
229 */
230FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
231{
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
233 if (IEM_IS_MODRM_REG_MODE(bRm))
234 {
235 /*
236 * Register, register.
237 */
238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
241 IEM_MC_BEGIN(2, 0);
242 IEM_MC_ARG(uint64_t *, pDst, 0);
243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
245 IEM_MC_PREPARE_FPU_USAGE();
246 IEM_MC_FPU_TO_MMX_MODE();
247
248 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
249 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
250 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 else
257 {
258 /*
259 * Register, memory.
260 */
261 IEM_MC_BEGIN(2, 2);
262 IEM_MC_ARG(uint64_t *, pDst, 0);
263 IEM_MC_LOCAL(uint64_t, uSrc);
264 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
266
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
270 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
271
272 IEM_MC_PREPARE_FPU_USAGE();
273 IEM_MC_FPU_TO_MMX_MODE();
274
275 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
276 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
277 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for MMX instructions on the form:
288 * pxxx mm1, mm2/mem64
289 * that was introduced with SSE2.
290 */
291FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
300 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_BEGIN(2, 0);
303 IEM_MC_ARG(uint64_t *, pDst, 0);
304 IEM_MC_ARG(uint64_t const *, pSrc, 1);
305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
306 IEM_MC_PREPARE_FPU_USAGE();
307 IEM_MC_FPU_TO_MMX_MODE();
308
309 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
310 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313
314 IEM_MC_ADVANCE_RIP();
315 IEM_MC_END();
316 }
317 else
318 {
319 /*
320 * Register, memory.
321 */
322 IEM_MC_BEGIN(2, 2);
323 IEM_MC_ARG(uint64_t *, pDst, 0);
324 IEM_MC_LOCAL(uint64_t, uSrc);
325 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
327
328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
331 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
332
333 IEM_MC_PREPARE_FPU_USAGE();
334 IEM_MC_FPU_TO_MMX_MODE();
335
336 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
337 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
338 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
339
340 IEM_MC_ADVANCE_RIP();
341 IEM_MC_END();
342 }
343 return VINF_SUCCESS;
344}
345
346
347/**
348 * Common worker for SSE2 instructions on the forms:
349 * pxxx xmm1, xmm2/mem128
350 *
351 * Proper alignment of the 128-bit operand is enforced.
352 * Exceptions type 4. SSE2 cpuid checks.
353 *
354 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
355 */
356FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
357{
358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
359 if (IEM_IS_MODRM_REG_MODE(bRm))
360 {
361 /*
362 * Register, register.
363 */
364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
365 IEM_MC_BEGIN(2, 0);
366 IEM_MC_ARG(PRTUINT128U, pDst, 0);
367 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
369 IEM_MC_PREPARE_SSE_USAGE();
370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
371 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
372 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 }
376 else
377 {
378 /*
379 * Register, memory.
380 */
381 IEM_MC_BEGIN(2, 2);
382 IEM_MC_ARG(PRTUINT128U, pDst, 0);
383 IEM_MC_LOCAL(RTUINT128U, uSrc);
384 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
386
387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
391
392 IEM_MC_PREPARE_SSE_USAGE();
393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
394 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
395
396 IEM_MC_ADVANCE_RIP();
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/**
404 * Common worker for SSE2 instructions on the forms:
405 * pxxx xmm1, xmm2/mem128
406 *
407 * Proper alignment of the 128-bit operand is enforced.
408 * Exceptions type 4. SSE2 cpuid checks.
409 *
410 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
411 * no FXSAVE state, just the operands.
412 *
413 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
414 */
415FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
416{
417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
418 if (IEM_IS_MODRM_REG_MODE(bRm))
419 {
420 /*
421 * Register, register.
422 */
423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
424 IEM_MC_BEGIN(2, 0);
425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
426 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
428 IEM_MC_PREPARE_SSE_USAGE();
429 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
430 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
431 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
432 IEM_MC_ADVANCE_RIP();
433 IEM_MC_END();
434 }
435 else
436 {
437 /*
438 * Register, memory.
439 */
440 IEM_MC_BEGIN(2, 2);
441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
442 IEM_MC_LOCAL(RTUINT128U, uSrc);
443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
445
446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
450
451 IEM_MC_PREPARE_SSE_USAGE();
452 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
453 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
454
455 IEM_MC_ADVANCE_RIP();
456 IEM_MC_END();
457 }
458 return VINF_SUCCESS;
459}
460
461
462/**
463 * Common worker for MMX instructions on the forms:
464 * pxxxx mm1, mm2/mem32
465 *
466 * The 2nd operand is the first half of a register, which in the memory case
467 * means a 32-bit memory access.
468 */
469FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
470{
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * Register, register.
476 */
477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
478 IEM_MC_BEGIN(2, 0);
479 IEM_MC_ARG(uint64_t *, puDst, 0);
480 IEM_MC_ARG(uint64_t const *, puSrc, 1);
481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_FPU_TO_MMX_MODE();
484
485 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
486 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
487 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
488 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
489
490 IEM_MC_ADVANCE_RIP();
491 IEM_MC_END();
492 }
493 else
494 {
495 /*
496 * Register, memory.
497 */
498 IEM_MC_BEGIN(2, 2);
499 IEM_MC_ARG(uint64_t *, puDst, 0);
500 IEM_MC_LOCAL(uint64_t, uSrc);
501 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
506 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
507 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
508
509 IEM_MC_PREPARE_FPU_USAGE();
510 IEM_MC_FPU_TO_MMX_MODE();
511
512 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
513 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
514 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
515
516 IEM_MC_ADVANCE_RIP();
517 IEM_MC_END();
518 }
519 return VINF_SUCCESS;
520}
521
522
523/**
524 * Common worker for SSE instructions on the forms:
525 * pxxxx xmm1, xmm2/mem128
526 *
527 * The 2nd operand is the first half of a register, which in the memory case
528 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
529 *
530 * Exceptions type 4.
531 */
532FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
533{
534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
535 if (IEM_IS_MODRM_REG_MODE(bRm))
536 {
537 /*
538 * Register, register.
539 */
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
541 IEM_MC_BEGIN(2, 0);
542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
543 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
548 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 else
553 {
554 /*
555 * Register, memory.
556 */
557 IEM_MC_BEGIN(2, 2);
558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
559 IEM_MC_LOCAL(RTUINT128U, uSrc);
560 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
562
563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
566 /** @todo Most CPUs probably only read the low qword. We read everything to
567 * make sure we apply segmentation and alignment checks correctly.
568 * When we have time, it would be interesting to explore what real
569 * CPUs actually does and whether it will do a TLB load for the high
570 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
571 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
572
573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
574 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
575 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
576
577 IEM_MC_ADVANCE_RIP();
578 IEM_MC_END();
579 }
580 return VINF_SUCCESS;
581}
582
583
584/**
585 * Common worker for SSE2 instructions on the forms:
586 * pxxxx xmm1, xmm2/mem128
587 *
588 * The 2nd operand is the first half of a register, which in the memory case
589 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
590 *
591 * Exceptions type 4.
592 */
593FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
594{
595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
596 if (IEM_IS_MODRM_REG_MODE(bRm))
597 {
598 /*
599 * Register, register.
600 */
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_BEGIN(2, 0);
603 IEM_MC_ARG(PRTUINT128U, puDst, 0);
604 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
609 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 else
614 {
615 /*
616 * Register, memory.
617 */
618 IEM_MC_BEGIN(2, 2);
619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
620 IEM_MC_LOCAL(RTUINT128U, uSrc);
621 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
623
624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
627 /** @todo Most CPUs probably only read the low qword. We read everything to
628 * make sure we apply segmentation and alignment checks correctly.
629 * When we have time, it would be interesting to explore what real
630 * CPUs actually does and whether it will do a TLB load for the high
631 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
632 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
633
634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
636 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
637
638 IEM_MC_ADVANCE_RIP();
639 IEM_MC_END();
640 }
641 return VINF_SUCCESS;
642}
643
644
645/**
646 * Common worker for MMX instructions on the form:
647 * pxxxx mm1, mm2/mem64
648 *
649 * The 2nd operand is the second half of a register, which in the memory case
650 * means a 64-bit memory access for MMX.
651 */
652FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
653{
654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
655 if (IEM_IS_MODRM_REG_MODE(bRm))
656 {
657 /*
658 * Register, register.
659 */
660 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
661 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
663 IEM_MC_BEGIN(2, 0);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_ARG(uint64_t const *, puSrc, 1);
666 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
667 IEM_MC_PREPARE_FPU_USAGE();
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
671 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
672 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
673 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
674
675 IEM_MC_ADVANCE_RIP();
676 IEM_MC_END();
677 }
678 else
679 {
680 /*
681 * Register, memory.
682 */
683 IEM_MC_BEGIN(2, 2);
684 IEM_MC_ARG(uint64_t *, puDst, 0);
685 IEM_MC_LOCAL(uint64_t, uSrc);
686 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
688
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
692 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
693
694 IEM_MC_PREPARE_FPU_USAGE();
695 IEM_MC_FPU_TO_MMX_MODE();
696
697 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
698 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
699 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
700
701 IEM_MC_ADVANCE_RIP();
702 IEM_MC_END();
703 }
704 return VINF_SUCCESS;
705}
706
707
708/**
709 * Common worker for SSE instructions on the form:
710 * pxxxx xmm1, xmm2/mem128
711 *
712 * The 2nd operand is the second half of a register, which for SSE a 128-bit
713 * aligned access where it may read the full 128 bits or only the upper 64 bits.
714 *
715 * Exceptions type 4.
716 */
717FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
718{
719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
720 if (IEM_IS_MODRM_REG_MODE(bRm))
721 {
722 /*
723 * Register, register.
724 */
725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
726 IEM_MC_BEGIN(2, 0);
727 IEM_MC_ARG(PRTUINT128U, puDst, 0);
728 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 IEM_MC_PREPARE_SSE_USAGE();
731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
732 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
733 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
734 IEM_MC_ADVANCE_RIP();
735 IEM_MC_END();
736 }
737 else
738 {
739 /*
740 * Register, memory.
741 */
742 IEM_MC_BEGIN(2, 2);
743 IEM_MC_ARG(PRTUINT128U, puDst, 0);
744 IEM_MC_LOCAL(RTUINT128U, uSrc);
745 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 /** @todo Most CPUs probably only read the high qword. We read everything to
752 * make sure we apply segmentation and alignment checks correctly.
753 * When we have time, it would be interesting to explore what real
754 * CPUs actually does and whether it will do a TLB load for the lower
755 * part or skip any associated \#PF. */
756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
757
758 IEM_MC_PREPARE_SSE_USAGE();
759 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
760 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
761
762 IEM_MC_ADVANCE_RIP();
763 IEM_MC_END();
764 }
765 return VINF_SUCCESS;
766}
767
768
769/**
770 * Common worker for SSE instructions on the forms:
771 * pxxs xmm1, xmm2/mem128
772 *
773 * Proper alignment of the 128-bit operand is enforced.
774 * Exceptions type 2. SSE cpuid checks.
775 *
776 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
777 */
778FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
779{
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
790 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
791 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
793 IEM_MC_PREPARE_SSE_USAGE();
794 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
795 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
797 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
798 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
799
800 IEM_MC_ADVANCE_RIP();
801 IEM_MC_END();
802 }
803 else
804 {
805 /*
806 * Register, memory.
807 */
808 IEM_MC_BEGIN(3, 2);
809 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
810 IEM_MC_LOCAL(X86XMMREG, uSrc2);
811 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
812 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
813 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
815
816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
819 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
820
821 IEM_MC_PREPARE_SSE_USAGE();
822 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
823 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
824 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 return VINF_SUCCESS;
831}
832
833
834/**
835 * Common worker for SSE instructions on the forms:
836 * pxxs xmm1, xmm2/mem32
837 *
838 * Proper alignment of the 128-bit operand is enforced.
839 * Exceptions type 2. SSE cpuid checks.
840 *
841 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
842 */
843FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
844{
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /*
849 * Register, register.
850 */
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_BEGIN(3, 1);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
858 IEM_MC_PREPARE_SSE_USAGE();
859 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
860 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
861 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
862 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
863 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
864
865 IEM_MC_ADVANCE_RIP();
866 IEM_MC_END();
867 }
868 else
869 {
870 /*
871 * Register, memory.
872 */
873 IEM_MC_BEGIN(3, 2);
874 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
875 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
876 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
877 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
878 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
880
881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
884 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
885
886 IEM_MC_PREPARE_SSE_USAGE();
887 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
888 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
889 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
891
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 }
895 return VINF_SUCCESS;
896}
897
898
899/**
900 * Common worker for SSE2 instructions on the forms:
901 * pxxd xmm1, xmm2/mem128
902 *
903 * Proper alignment of the 128-bit operand is enforced.
904 * Exceptions type 2. SSE cpuid checks.
905 *
906 * @sa iemOpCommonSseFp_FullFull_To_Full
907 */
908FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
909{
910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
911 if (IEM_IS_MODRM_REG_MODE(bRm))
912 {
913 /*
914 * Register, register.
915 */
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_BEGIN(3, 1);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
920 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
921 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
923 IEM_MC_PREPARE_SSE_USAGE();
924 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
925 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
926 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
927 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
928 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
929
930 IEM_MC_ADVANCE_RIP();
931 IEM_MC_END();
932 }
933 else
934 {
935 /*
936 * Register, memory.
937 */
938 IEM_MC_BEGIN(3, 2);
939 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
940 IEM_MC_LOCAL(X86XMMREG, uSrc2);
941 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
942 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
943 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
945
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
949 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
950
951 IEM_MC_PREPARE_SSE_USAGE();
952 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
953 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
954 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
956
957 IEM_MC_ADVANCE_RIP();
958 IEM_MC_END();
959 }
960 return VINF_SUCCESS;
961}
962
963
964/**
965 * Common worker for SSE2 instructions on the forms:
966 * pxxs xmm1, xmm2/mem64
967 *
968 * Proper alignment of the 128-bit operand is enforced.
969 * Exceptions type 2. SSE2 cpuid checks.
970 *
971 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
972 */
973FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
974{
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if (IEM_IS_MODRM_REG_MODE(bRm))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(3, 1);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
985 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
986 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_PREPARE_SSE_USAGE();
989 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
990 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
991 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
992 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
994
995 IEM_MC_ADVANCE_RIP();
996 IEM_MC_END();
997 }
998 else
999 {
1000 /*
1001 * Register, memory.
1002 */
1003 IEM_MC_BEGIN(3, 2);
1004 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1005 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1006 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1007 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1008 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1013 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1014 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1015
1016 IEM_MC_PREPARE_SSE_USAGE();
1017 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1019 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1020 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1021
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/**
1030 * Common worker for SSE2 instructions on the form:
1031 * pxxxx xmm1, xmm2/mem128
1032 *
1033 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1034 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1035 *
1036 * Exceptions type 4.
1037 */
1038FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1039{
1040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1041 if (IEM_IS_MODRM_REG_MODE(bRm))
1042 {
1043 /*
1044 * Register, register.
1045 */
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1047 IEM_MC_BEGIN(2, 0);
1048 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1054 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 else
1059 {
1060 /*
1061 * Register, memory.
1062 */
1063 IEM_MC_BEGIN(2, 2);
1064 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1065 IEM_MC_LOCAL(RTUINT128U, uSrc);
1066 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1072 /** @todo Most CPUs probably only read the high qword. We read everything to
1073 * make sure we apply segmentation and alignment checks correctly.
1074 * When we have time, it would be interesting to explore what real
1075 * CPUs actually does and whether it will do a TLB load for the lower
1076 * part or skip any associated \#PF. */
1077 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1078
1079 IEM_MC_PREPARE_SSE_USAGE();
1080 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1081 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1082
1083 IEM_MC_ADVANCE_RIP();
1084 IEM_MC_END();
1085 }
1086 return VINF_SUCCESS;
1087}
1088
1089
1090/**
1091 * Common worker for SSE3 instructions on the forms:
1092 * hxxx xmm1, xmm2/mem128
1093 *
1094 * Proper alignment of the 128-bit operand is enforced.
1095 * Exceptions type 2. SSE3 cpuid checks.
1096 *
1097 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1098 */
1099FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1100{
1101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1102 if (IEM_IS_MODRM_REG_MODE(bRm))
1103 {
1104 /*
1105 * Register, register.
1106 */
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_BEGIN(3, 1);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1111 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1113 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1114 IEM_MC_PREPARE_SSE_USAGE();
1115 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1116 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1117 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1118 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1120
1121 IEM_MC_ADVANCE_RIP();
1122 IEM_MC_END();
1123 }
1124 else
1125 {
1126 /*
1127 * Register, memory.
1128 */
1129 IEM_MC_BEGIN(3, 2);
1130 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1131 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1132 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1133 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1134 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1136
1137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1139 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1140 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141
1142 IEM_MC_PREPARE_SSE_USAGE();
1143 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1144 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1145 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1146 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1147
1148 IEM_MC_ADVANCE_RIP();
1149 IEM_MC_END();
1150 }
1151 return VINF_SUCCESS;
1152}
1153
1154
1155/** Opcode 0x0f 0x00 /0. */
1156FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1157{
1158 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1159 IEMOP_HLP_MIN_286();
1160 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1161
1162 if (IEM_IS_MODRM_REG_MODE(bRm))
1163 {
1164 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1166 }
1167
1168 /* Ignore operand size here, memory refs are always 16-bit. */
1169 IEM_MC_BEGIN(2, 0);
1170 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1173 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1175 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1176 IEM_MC_END();
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/** Opcode 0x0f 0x00 /1. */
1182FNIEMOPRM_DEF(iemOp_Grp6_str)
1183{
1184 IEMOP_MNEMONIC(str, "str Rv/Mw");
1185 IEMOP_HLP_MIN_286();
1186 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1187
1188
1189 if (IEM_IS_MODRM_REG_MODE(bRm))
1190 {
1191 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1193 }
1194
1195 /* Ignore operand size here, memory refs are always 16-bit. */
1196 IEM_MC_BEGIN(2, 0);
1197 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1198 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1200 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1201 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1202 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1203 IEM_MC_END();
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/** Opcode 0x0f 0x00 /2. */
1209FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1210{
1211 IEMOP_MNEMONIC(lldt, "lldt Ew");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1214
1215 if (IEM_IS_MODRM_REG_MODE(bRm))
1216 {
1217 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1218 IEM_MC_BEGIN(1, 0);
1219 IEM_MC_ARG(uint16_t, u16Sel, 0);
1220 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1221 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1222 IEM_MC_END();
1223 }
1224 else
1225 {
1226 IEM_MC_BEGIN(1, 1);
1227 IEM_MC_ARG(uint16_t, u16Sel, 0);
1228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1230 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1231 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1232 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1233 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1234 IEM_MC_END();
1235 }
1236 return VINF_SUCCESS;
1237}
1238
1239
1240/** Opcode 0x0f 0x00 /3. */
1241FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1242{
1243 IEMOP_MNEMONIC(ltr, "ltr Ew");
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_BEGIN(1, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1253 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1254 IEM_MC_END();
1255 }
1256 else
1257 {
1258 IEM_MC_BEGIN(1, 1);
1259 IEM_MC_ARG(uint16_t, u16Sel, 0);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1263 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1264 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1265 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1266 IEM_MC_END();
1267 }
1268 return VINF_SUCCESS;
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1274{
1275 IEMOP_HLP_MIN_286();
1276 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1277
1278 if (IEM_IS_MODRM_REG_MODE(bRm))
1279 {
1280 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1281 IEM_MC_BEGIN(2, 0);
1282 IEM_MC_ARG(uint16_t, u16Sel, 0);
1283 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(2, 1);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1295 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1298 IEM_MC_END();
1299 }
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/** Opcode 0x0f 0x00 /4. */
1305FNIEMOPRM_DEF(iemOp_Grp6_verr)
1306{
1307 IEMOP_MNEMONIC(verr, "verr Ew");
1308 IEMOP_HLP_MIN_286();
1309 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1310}
1311
1312
1313/** Opcode 0x0f 0x00 /5. */
1314FNIEMOPRM_DEF(iemOp_Grp6_verw)
1315{
1316 IEMOP_MNEMONIC(verw, "verw Ew");
1317 IEMOP_HLP_MIN_286();
1318 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1319}
1320
1321
1322/**
1323 * Group 6 jump table.
1324 */
1325IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1326{
1327 iemOp_Grp6_sldt,
1328 iemOp_Grp6_str,
1329 iemOp_Grp6_lldt,
1330 iemOp_Grp6_ltr,
1331 iemOp_Grp6_verr,
1332 iemOp_Grp6_verw,
1333 iemOp_InvalidWithRM,
1334 iemOp_InvalidWithRM
1335};
1336
1337/** Opcode 0x0f 0x00. */
1338FNIEMOP_DEF(iemOp_Grp6)
1339{
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1342}
1343
1344
1345/** Opcode 0x0f 0x01 /0. */
1346FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1347{
1348 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1349 IEMOP_HLP_MIN_286();
1350 IEMOP_HLP_64BIT_OP_SIZE();
1351 IEM_MC_BEGIN(2, 1);
1352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1353 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1357 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1358 IEM_MC_END();
1359 return VINF_SUCCESS;
1360}
1361
1362
1363/** Opcode 0x0f 0x01 /0. */
1364FNIEMOP_DEF(iemOp_Grp7_vmcall)
1365{
1366 IEMOP_MNEMONIC(vmcall, "vmcall");
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1368
1369 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1370 want all hypercalls regardless of instruction used, and if a
1371 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1372 (NEM/win makes ASSUMPTIONS about this behavior.) */
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1379FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1380{
1381 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1382 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1383 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1384 IEMOP_HLP_DONE_DECODING();
1385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1386}
1387#else
1388FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1389{
1390 IEMOP_BITCH_ABOUT_STUB();
1391 return IEMOP_RAISE_INVALID_OPCODE();
1392}
1393#endif
1394
1395
1396/** Opcode 0x0f 0x01 /0. */
1397#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1398FNIEMOP_DEF(iemOp_Grp7_vmresume)
1399{
1400 IEMOP_MNEMONIC(vmresume, "vmresume");
1401 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1402 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1403 IEMOP_HLP_DONE_DECODING();
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1405}
1406#else
1407FNIEMOP_DEF(iemOp_Grp7_vmresume)
1408{
1409 IEMOP_BITCH_ABOUT_STUB();
1410 return IEMOP_RAISE_INVALID_OPCODE();
1411}
1412#endif
1413
1414
1415/** Opcode 0x0f 0x01 /0. */
1416#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1417FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1418{
1419 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1420 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1421 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1422 IEMOP_HLP_DONE_DECODING();
1423 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1424}
1425#else
1426FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1427{
1428 IEMOP_BITCH_ABOUT_STUB();
1429 return IEMOP_RAISE_INVALID_OPCODE();
1430}
1431#endif
1432
1433
1434/** Opcode 0x0f 0x01 /1. */
1435FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1436{
1437 IEMOP_MNEMONIC(sidt, "sidt Ms");
1438 IEMOP_HLP_MIN_286();
1439 IEMOP_HLP_64BIT_OP_SIZE();
1440 IEM_MC_BEGIN(2, 1);
1441 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1446 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1447 IEM_MC_END();
1448 return VINF_SUCCESS;
1449}
1450
1451
1452/** Opcode 0x0f 0x01 /1. */
1453FNIEMOP_DEF(iemOp_Grp7_monitor)
1454{
1455 IEMOP_MNEMONIC(monitor, "monitor");
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1457 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1458}
1459
1460
1461/** Opcode 0x0f 0x01 /1. */
1462FNIEMOP_DEF(iemOp_Grp7_mwait)
1463{
1464 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1467}
1468
1469
1470/** Opcode 0x0f 0x01 /2. */
1471FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1472{
1473 IEMOP_MNEMONIC(lgdt, "lgdt");
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(3, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1482 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 0xd0. */
1489FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1490{
1491 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1493 {
1494 /** @todo r=ramshankar: We should use
1495 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1496 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1499 }
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501}
1502
1503
1504/** Opcode 0x0f 0x01 0xd1. */
1505FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1506{
1507 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1508 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1509 {
1510 /** @todo r=ramshankar: We should use
1511 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1512 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1515 }
1516 return IEMOP_RAISE_INVALID_OPCODE();
1517}
1518
1519
1520/** Opcode 0x0f 0x01 /3. */
1521FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1522{
1523 IEMOP_MNEMONIC(lidt, "lidt");
1524 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1525 ? IEMMODE_64BIT
1526 : pVCpu->iem.s.enmEffOpSize;
1527 IEM_MC_BEGIN(3, 1);
1528 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1529 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1534 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1535 IEM_MC_END();
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/** Opcode 0x0f 0x01 0xd8. */
1541#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1542FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1543{
1544 IEMOP_MNEMONIC(vmrun, "vmrun");
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1547}
1548#else
1549FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1550#endif
1551
1552/** Opcode 0x0f 0x01 0xd9. */
1553FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1554{
1555 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1557
1558 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1559 want all hypercalls regardless of instruction used, and if a
1560 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1561 (NEM/win makes ASSUMPTIONS about this behavior.) */
1562 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1563}
1564
1565/** Opcode 0x0f 0x01 0xda. */
1566#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1567FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1568{
1569 IEMOP_MNEMONIC(vmload, "vmload");
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1571 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1572}
1573#else
1574FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1575#endif
1576
1577
1578/** Opcode 0x0f 0x01 0xdb. */
1579#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1580FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1581{
1582 IEMOP_MNEMONIC(vmsave, "vmsave");
1583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1584 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1585}
1586#else
1587FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1588#endif
1589
1590
1591/** Opcode 0x0f 0x01 0xdc. */
1592#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1593FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1594{
1595 IEMOP_MNEMONIC(stgi, "stgi");
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1598}
1599#else
1600FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1601#endif
1602
1603
1604/** Opcode 0x0f 0x01 0xdd. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1607{
1608 IEMOP_MNEMONIC(clgi, "clgi");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdf. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1620{
1621 IEMOP_MNEMONIC(invlpga, "invlpga");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xde. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1633{
1634 IEMOP_MNEMONIC(skinit, "skinit");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 /4. */
1644FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1645{
1646 IEMOP_MNEMONIC(smsw, "smsw");
1647 IEMOP_HLP_MIN_286();
1648 if (IEM_IS_MODRM_REG_MODE(bRm))
1649 {
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1652 }
1653
1654 /* Ignore operand size here, memory refs are always 16-bit. */
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1662 IEM_MC_END();
1663 return VINF_SUCCESS;
1664}
1665
1666
1667/** Opcode 0x0f 0x01 /6. */
1668FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1669{
1670 /* The operand size is effectively ignored, all is 16-bit and only the
1671 lower 3-bits are used. */
1672 IEMOP_MNEMONIC(lmsw, "lmsw");
1673 IEMOP_HLP_MIN_286();
1674 if (IEM_IS_MODRM_REG_MODE(bRm))
1675 {
1676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1677 IEM_MC_BEGIN(2, 0);
1678 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1679 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1680 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1681 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1682 IEM_MC_END();
1683 }
1684 else
1685 {
1686 IEM_MC_BEGIN(2, 0);
1687 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1692 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1693 IEM_MC_END();
1694 }
1695 return VINF_SUCCESS;
1696}
1697
1698
1699/** Opcode 0x0f 0x01 /7. */
1700FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1701{
1702 IEMOP_MNEMONIC(invlpg, "invlpg");
1703 IEMOP_HLP_MIN_486();
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_BEGIN(1, 1);
1706 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1708 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1709 IEM_MC_END();
1710 return VINF_SUCCESS;
1711}
1712
1713
1714/** Opcode 0x0f 0x01 /7. */
1715FNIEMOP_DEF(iemOp_Grp7_swapgs)
1716{
1717 IEMOP_MNEMONIC(swapgs, "swapgs");
1718 IEMOP_HLP_ONLY_64BIT();
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1721}
1722
1723
1724/** Opcode 0x0f 0x01 /7. */
1725FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1726{
1727 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1730}
1731
1732
1733/**
1734 * Group 7 jump table, memory variant.
1735 */
1736IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1737{
1738 iemOp_Grp7_sgdt,
1739 iemOp_Grp7_sidt,
1740 iemOp_Grp7_lgdt,
1741 iemOp_Grp7_lidt,
1742 iemOp_Grp7_smsw,
1743 iemOp_InvalidWithRM,
1744 iemOp_Grp7_lmsw,
1745 iemOp_Grp7_invlpg
1746};
1747
1748
1749/** Opcode 0x0f 0x01. */
1750FNIEMOP_DEF(iemOp_Grp7)
1751{
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 if (IEM_IS_MODRM_MEM_MODE(bRm))
1754 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1755
1756 switch (IEM_GET_MODRM_REG_8(bRm))
1757 {
1758 case 0:
1759 switch (IEM_GET_MODRM_RM_8(bRm))
1760 {
1761 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1762 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1763 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1764 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1765 }
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767
1768 case 1:
1769 switch (IEM_GET_MODRM_RM_8(bRm))
1770 {
1771 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1772 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1773 }
1774 return IEMOP_RAISE_INVALID_OPCODE();
1775
1776 case 2:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 case 3:
1785 switch (IEM_GET_MODRM_RM_8(bRm))
1786 {
1787 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1792 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1793 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1794 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1796 }
1797
1798 case 4:
1799 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1800
1801 case 5:
1802 return IEMOP_RAISE_INVALID_OPCODE();
1803
1804 case 6:
1805 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1806
1807 case 7:
1808 switch (IEM_GET_MODRM_RM_8(bRm))
1809 {
1810 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1811 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1812 }
1813 return IEMOP_RAISE_INVALID_OPCODE();
1814
1815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1816 }
1817}
1818
1819/** Opcode 0x0f 0x00 /3. */
1820FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1821{
1822 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824
1825 if (IEM_IS_MODRM_REG_MODE(bRm))
1826 {
1827 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1828 switch (pVCpu->iem.s.enmEffOpSize)
1829 {
1830 case IEMMODE_16BIT:
1831 {
1832 IEM_MC_BEGIN(3, 0);
1833 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1834 IEM_MC_ARG(uint16_t, u16Sel, 1);
1835 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1836
1837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1838 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1839 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1840
1841 IEM_MC_END();
1842 return VINF_SUCCESS;
1843 }
1844
1845 case IEMMODE_32BIT:
1846 case IEMMODE_64BIT:
1847 {
1848 IEM_MC_BEGIN(3, 0);
1849 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1850 IEM_MC_ARG(uint16_t, u16Sel, 1);
1851 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1852
1853 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1854 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1855 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1856
1857 IEM_MC_END();
1858 return VINF_SUCCESS;
1859 }
1860
1861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1862 }
1863 }
1864 else
1865 {
1866 switch (pVCpu->iem.s.enmEffOpSize)
1867 {
1868 case IEMMODE_16BIT:
1869 {
1870 IEM_MC_BEGIN(3, 1);
1871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1872 IEM_MC_ARG(uint16_t, u16Sel, 1);
1873 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1875
1876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1877 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 case IEMMODE_32BIT:
1888 case IEMMODE_64BIT:
1889 {
1890 IEM_MC_BEGIN(3, 1);
1891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1892 IEM_MC_ARG(uint16_t, u16Sel, 1);
1893 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1898/** @todo testcase: make sure it's a 16-bit read. */
1899
1900 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1902 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1903
1904 IEM_MC_END();
1905 return VINF_SUCCESS;
1906 }
1907
1908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1909 }
1910 }
1911}
1912
1913
1914
1915/** Opcode 0x0f 0x02. */
1916FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1917{
1918 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1919 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1920}
1921
1922
1923/** Opcode 0x0f 0x03. */
1924FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1925{
1926 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1927 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1928}
1929
1930
1931/** Opcode 0x0f 0x05. */
1932FNIEMOP_DEF(iemOp_syscall)
1933{
1934 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1937}
1938
1939
1940/** Opcode 0x0f 0x06. */
1941FNIEMOP_DEF(iemOp_clts)
1942{
1943 IEMOP_MNEMONIC(clts, "clts");
1944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1945 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1946}
1947
1948
1949/** Opcode 0x0f 0x07. */
1950FNIEMOP_DEF(iemOp_sysret)
1951{
1952 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1955}
1956
1957
1958/** Opcode 0x0f 0x08. */
1959FNIEMOP_DEF(iemOp_invd)
1960{
1961 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1962 IEMOP_HLP_MIN_486();
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1964 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1965}
1966
1967
1968/** Opcode 0x0f 0x09. */
1969FNIEMOP_DEF(iemOp_wbinvd)
1970{
1971 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1972 IEMOP_HLP_MIN_486();
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1975}
1976
1977
1978/** Opcode 0x0f 0x0b. */
1979FNIEMOP_DEF(iemOp_ud2)
1980{
1981 IEMOP_MNEMONIC(ud2, "ud2");
1982 return IEMOP_RAISE_INVALID_OPCODE();
1983}
1984
1985/** Opcode 0x0f 0x0d. */
1986FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1987{
1988 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1989 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1990 {
1991 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1992 return IEMOP_RAISE_INVALID_OPCODE();
1993 }
1994
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996 if (IEM_IS_MODRM_REG_MODE(bRm))
1997 {
1998 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1999 return IEMOP_RAISE_INVALID_OPCODE();
2000 }
2001
2002 switch (IEM_GET_MODRM_REG_8(bRm))
2003 {
2004 case 2: /* Aliased to /0 for the time being. */
2005 case 4: /* Aliased to /0 for the time being. */
2006 case 5: /* Aliased to /0 for the time being. */
2007 case 6: /* Aliased to /0 for the time being. */
2008 case 7: /* Aliased to /0 for the time being. */
2009 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2010 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2011 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014
2015 IEM_MC_BEGIN(0, 1);
2016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 /* Currently a NOP. */
2020 NOREF(GCPtrEffSrc);
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 return VINF_SUCCESS;
2024}
2025
2026
2027/** Opcode 0x0f 0x0e. */
2028FNIEMOP_DEF(iemOp_femms)
2029{
2030 IEMOP_MNEMONIC(femms, "femms");
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032
2033 IEM_MC_BEGIN(0,0);
2034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2037 IEM_MC_FPU_FROM_MMX_MODE();
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 return VINF_SUCCESS;
2041}
2042
2043
2044/** Opcode 0x0f 0x0f. */
2045FNIEMOP_DEF(iemOp_3Dnow)
2046{
2047 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2048 {
2049 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2050 return IEMOP_RAISE_INVALID_OPCODE();
2051 }
2052
2053#ifdef IEM_WITH_3DNOW
2054 /* This is pretty sparse, use switch instead of table. */
2055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2056 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2057#else
2058 IEMOP_BITCH_ABOUT_STUB();
2059 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2060#endif
2061}
2062
2063
2064/**
2065 * @opcode 0x10
2066 * @oppfx none
2067 * @opcpuid sse
2068 * @opgroup og_sse_simdfp_datamove
2069 * @opxcpttype 4UA
2070 * @optest op1=1 op2=2 -> op1=2
2071 * @optest op1=0 op2=-22 -> op1=-22
2072 */
2073FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2074{
2075 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2077 if (IEM_IS_MODRM_REG_MODE(bRm))
2078 {
2079 /*
2080 * Register, register.
2081 */
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2086 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2087 IEM_GET_MODRM_RM(pVCpu, bRm));
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /*
2094 * Memory, register.
2095 */
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2104
2105 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2106 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 return VINF_SUCCESS;
2112
2113}
2114
2115
2116/**
2117 * @opcode 0x10
2118 * @oppfx 0x66
2119 * @opcpuid sse2
2120 * @opgroup og_sse2_pcksclr_datamove
2121 * @opxcpttype 4UA
2122 * @optest op1=1 op2=2 -> op1=2
2123 * @optest op1=0 op2=-42 -> op1=-42
2124 */
2125FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2126{
2127 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2129 if (IEM_IS_MODRM_REG_MODE(bRm))
2130 {
2131 /*
2132 * Register, register.
2133 */
2134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2135 IEM_MC_BEGIN(0, 0);
2136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2138 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2139 IEM_GET_MODRM_RM(pVCpu, bRm));
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 else
2144 {
2145 /*
2146 * Memory, register.
2147 */
2148 IEM_MC_BEGIN(0, 2);
2149 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2151
2152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156
2157 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2158 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2159
2160 IEM_MC_ADVANCE_RIP();
2161 IEM_MC_END();
2162 }
2163 return VINF_SUCCESS;
2164}
2165
2166
2167/**
2168 * @opcode 0x10
2169 * @oppfx 0xf3
2170 * @opcpuid sse
2171 * @opgroup og_sse_simdfp_datamove
2172 * @opxcpttype 5
2173 * @optest op1=1 op2=2 -> op1=2
2174 * @optest op1=0 op2=-22 -> op1=-22
2175 */
2176FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2177{
2178 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 if (IEM_IS_MODRM_REG_MODE(bRm))
2181 {
2182 /*
2183 * Register, register.
2184 */
2185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2186 IEM_MC_BEGIN(0, 1);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2192 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 }
2197 else
2198 {
2199 /*
2200 * Memory, register.
2201 */
2202 IEM_MC_BEGIN(0, 2);
2203 IEM_MC_LOCAL(uint32_t, uSrc);
2204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2205
2206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2209 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2210
2211 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2212 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2213
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/**
2222 * @opcode 0x10
2223 * @oppfx 0xf2
2224 * @opcpuid sse2
2225 * @opgroup og_sse2_pcksclr_datamove
2226 * @opxcpttype 5
2227 * @optest op1=1 op2=2 -> op1=2
2228 * @optest op1=0 op2=-42 -> op1=-42
2229 */
2230FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2231{
2232 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 if (IEM_IS_MODRM_REG_MODE(bRm))
2235 {
2236 /*
2237 * Register, register.
2238 */
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_BEGIN(0, 1);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242
2243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2244 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2245 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP();
2249 IEM_MC_END();
2250 }
2251 else
2252 {
2253 /*
2254 * Memory, register.
2255 */
2256 IEM_MC_BEGIN(0, 2);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2259
2260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264
2265 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2266 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2267
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/**
2276 * @opcode 0x11
2277 * @oppfx none
2278 * @opcpuid sse
2279 * @opgroup og_sse_simdfp_datamove
2280 * @opxcpttype 4UA
2281 * @optest op1=1 op2=2 -> op1=2
2282 * @optest op1=0 op2=-42 -> op1=-42
2283 */
2284FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2285{
2286 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2288 if (IEM_IS_MODRM_REG_MODE(bRm))
2289 {
2290 /*
2291 * Register, register.
2292 */
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_BEGIN(0, 0);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2298 IEM_GET_MODRM_REG(pVCpu, bRm));
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 /*
2305 * Memory, register.
2306 */
2307 IEM_MC_BEGIN(0, 2);
2308 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2310
2311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2315
2316 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2318
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323}
2324
2325
2326/**
2327 * @opcode 0x11
2328 * @oppfx 0x66
2329 * @opcpuid sse2
2330 * @opgroup og_sse2_pcksclr_datamove
2331 * @opxcpttype 4UA
2332 * @optest op1=1 op2=2 -> op1=2
2333 * @optest op1=0 op2=-42 -> op1=-42
2334 */
2335FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2336{
2337 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2339 if (IEM_IS_MODRM_REG_MODE(bRm))
2340 {
2341 /*
2342 * Register, register.
2343 */
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345 IEM_MC_BEGIN(0, 0);
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2349 IEM_GET_MODRM_REG(pVCpu, bRm));
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Memory, register.
2357 */
2358 IEM_MC_BEGIN(0, 2);
2359 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2361
2362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2366
2367 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2368 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/**
2378 * @opcode 0x11
2379 * @oppfx 0xf3
2380 * @opcpuid sse
2381 * @opgroup og_sse_simdfp_datamove
2382 * @opxcpttype 5
2383 * @optest op1=1 op2=2 -> op1=2
2384 * @optest op1=0 op2=-22 -> op1=-22
2385 */
2386FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2387{
2388 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 if (IEM_IS_MODRM_REG_MODE(bRm))
2391 {
2392 /*
2393 * Register, register.
2394 */
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(uint32_t, uSrc);
2398
2399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2401 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2402 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Memory, register.
2411 */
2412 IEM_MC_BEGIN(0, 2);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2420
2421 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x11
2433 * @oppfx 0xf2
2434 * @opcpuid sse2
2435 * @opgroup og_sse2_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=1 op2=2 -> op1=2
2438 * @optest op1=0 op2=-42 -> op1=-42
2439 */
2440FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2441{
2442 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(0, 1);
2451 IEM_MC_LOCAL(uint64_t, uSrc);
2452
2453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2455 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2456 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 else
2462 {
2463 /*
2464 * Memory, register.
2465 */
2466 IEM_MC_BEGIN(0, 2);
2467 IEM_MC_LOCAL(uint64_t, uSrc);
2468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2469
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2474
2475 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2476 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2477
2478 IEM_MC_ADVANCE_RIP();
2479 IEM_MC_END();
2480 }
2481 return VINF_SUCCESS;
2482}
2483
2484
2485FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2486{
2487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2488 if (IEM_IS_MODRM_REG_MODE(bRm))
2489 {
2490 /**
2491 * @opcode 0x12
2492 * @opcodesub 11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2501
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_BEGIN(0, 1);
2504 IEM_MC_LOCAL(uint64_t, uSrc);
2505
2506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2508 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 else
2515 {
2516 /**
2517 * @opdone
2518 * @opcode 0x12
2519 * @opcodesub !11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2527 */
2528 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 2);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2533
2534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2538
2539 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2540 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/**
2550 * @opcode 0x12
2551 * @opcodesub !11 mr/reg
2552 * @oppfx 0x66
2553 * @opcpuid sse2
2554 * @opgroup og_sse2_pcksclr_datamove
2555 * @opxcpttype 5
2556 * @optest op1=1 op2=2 -> op1=2
2557 * @optest op1=0 op2=-42 -> op1=-42
2558 */
2559FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2560{
2561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2562 if (IEM_IS_MODRM_MEM_MODE(bRm))
2563 {
2564 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2565
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(uint64_t, uSrc);
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 return VINF_SUCCESS;
2581 }
2582
2583 /**
2584 * @opdone
2585 * @opmnemonic ud660f12m3
2586 * @opcode 0x12
2587 * @opcodesub 11 mr/reg
2588 * @oppfx 0x66
2589 * @opunused immediate
2590 * @opcpuid sse
2591 * @optest ->
2592 */
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594}
2595
2596
2597/**
2598 * @opcode 0x12
2599 * @oppfx 0xf3
2600 * @opcpuid sse3
2601 * @opgroup og_sse3_pcksclr_datamove
2602 * @opxcpttype 4
2603 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2604 * op1=0x00000002000000020000000100000001
2605 */
2606FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2607{
2608 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2610 if (IEM_IS_MODRM_REG_MODE(bRm))
2611 {
2612 /*
2613 * Register, register.
2614 */
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEM_MC_BEGIN(2, 0);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2619
2620 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2621 IEM_MC_PREPARE_SSE_USAGE();
2622
2623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2625 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 /*
2633 * Register, memory.
2634 */
2635 IEM_MC_BEGIN(2, 2);
2636 IEM_MC_LOCAL(RTUINT128U, uSrc);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2639 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2640
2641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2644 IEM_MC_PREPARE_SSE_USAGE();
2645
2646 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2647 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/**
2658 * @opcode 0x12
2659 * @oppfx 0xf2
2660 * @opcpuid sse3
2661 * @opgroup og_sse3_pcksclr_datamove
2662 * @opxcpttype 5
2663 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2664 * op1=0x22222222111111112222222211111111
2665 */
2666FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2667{
2668 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 if (IEM_IS_MODRM_REG_MODE(bRm))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(2, 0);
2677 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2678 IEM_MC_ARG(uint64_t, uSrc, 1);
2679
2680 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2684 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2685 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 else
2691 {
2692 /*
2693 * Register, memory.
2694 */
2695 IEM_MC_BEGIN(2, 2);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2698 IEM_MC_ARG(uint64_t, uSrc, 1);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2703 IEM_MC_PREPARE_SSE_USAGE();
2704
2705 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2706 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2707 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x13
2718 * @opcodesub !11 mr/reg
2719 * @oppfx none
2720 * @opcpuid sse
2721 * @opgroup og_sse_simdfp_datamove
2722 * @opxcpttype 5
2723 * @optest op1=1 op2=2 -> op1=2
2724 * @optest op1=0 op2=-42 -> op1=-42
2725 */
2726FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if (IEM_IS_MODRM_MEM_MODE(bRm))
2730 {
2731 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2732
2733 IEM_MC_BEGIN(0, 2);
2734 IEM_MC_LOCAL(uint64_t, uSrc);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2736
2737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2740 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2741
2742 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2744
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 return VINF_SUCCESS;
2748 }
2749
2750 /**
2751 * @opdone
2752 * @opmnemonic ud0f13m3
2753 * @opcode 0x13
2754 * @opcodesub 11 mr/reg
2755 * @oppfx none
2756 * @opunused immediate
2757 * @opcpuid sse
2758 * @optest ->
2759 */
2760 return IEMOP_RAISE_INVALID_OPCODE();
2761}
2762
2763
2764/**
2765 * @opcode 0x13
2766 * @opcodesub !11 mr/reg
2767 * @oppfx 0x66
2768 * @opcpuid sse2
2769 * @opgroup og_sse2_pcksclr_datamove
2770 * @opxcpttype 5
2771 * @optest op1=1 op2=2 -> op1=2
2772 * @optest op1=0 op2=-42 -> op1=-42
2773 */
2774FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 if (IEM_IS_MODRM_MEM_MODE(bRm))
2778 {
2779 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2780 IEM_MC_BEGIN(0, 2);
2781 IEM_MC_LOCAL(uint64_t, uSrc);
2782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2783
2784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2788
2789 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2791
2792 IEM_MC_ADVANCE_RIP();
2793 IEM_MC_END();
2794 return VINF_SUCCESS;
2795 }
2796
2797 /**
2798 * @opdone
2799 * @opmnemonic ud660f13m3
2800 * @opcode 0x13
2801 * @opcodesub 11 mr/reg
2802 * @oppfx 0x66
2803 * @opunused immediate
2804 * @opcpuid sse
2805 * @optest ->
2806 */
2807 return IEMOP_RAISE_INVALID_OPCODE();
2808}
2809
2810
2811/**
2812 * @opmnemonic udf30f13
2813 * @opcode 0x13
2814 * @oppfx 0xf3
2815 * @opunused intel-modrm
2816 * @opcpuid sse
2817 * @optest ->
2818 * @opdone
2819 */
2820
2821/**
2822 * @opmnemonic udf20f13
2823 * @opcode 0x13
2824 * @oppfx 0xf2
2825 * @opunused intel-modrm
2826 * @opcpuid sse
2827 * @optest ->
2828 * @opdone
2829 */
2830
2831/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2832FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2833{
2834 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2836}
2837
2838
2839/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2840FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2841{
2842 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2843 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2844}
2845
2846
2847/**
2848 * @opdone
2849 * @opmnemonic udf30f14
2850 * @opcode 0x14
2851 * @oppfx 0xf3
2852 * @opunused intel-modrm
2853 * @opcpuid sse
2854 * @optest ->
2855 * @opdone
2856 */
2857
2858/**
2859 * @opmnemonic udf20f14
2860 * @opcode 0x14
2861 * @oppfx 0xf2
2862 * @opunused intel-modrm
2863 * @opcpuid sse
2864 * @optest ->
2865 * @opdone
2866 */
2867
2868/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2869FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2870{
2871 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2872 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2873}
2874
2875
2876/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2877FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2878{
2879 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2880 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2881}
2882
2883
2884/* Opcode 0xf3 0x0f 0x15 - invalid */
2885/* Opcode 0xf2 0x0f 0x15 - invalid */
2886
2887/**
2888 * @opdone
2889 * @opmnemonic udf30f15
2890 * @opcode 0x15
2891 * @oppfx 0xf3
2892 * @opunused intel-modrm
2893 * @opcpuid sse
2894 * @optest ->
2895 * @opdone
2896 */
2897
2898/**
2899 * @opmnemonic udf20f15
2900 * @opcode 0x15
2901 * @oppfx 0xf2
2902 * @opunused intel-modrm
2903 * @opcpuid sse
2904 * @optest ->
2905 * @opdone
2906 */
2907
2908FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2909{
2910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2911 if (IEM_IS_MODRM_REG_MODE(bRm))
2912 {
2913 /**
2914 * @opcode 0x16
2915 * @opcodesub 11 mr/reg
2916 * @oppfx none
2917 * @opcpuid sse
2918 * @opgroup og_sse_simdfp_datamove
2919 * @opxcpttype 5
2920 * @optest op1=1 op2=2 -> op1=2
2921 * @optest op1=0 op2=-42 -> op1=-42
2922 */
2923 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2924
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEM_MC_BEGIN(0, 1);
2927 IEM_MC_LOCAL(uint64_t, uSrc);
2928
2929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2930 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2931 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /**
2940 * @opdone
2941 * @opcode 0x16
2942 * @opcodesub !11 mr/reg
2943 * @oppfx none
2944 * @opcpuid sse
2945 * @opgroup og_sse_simdfp_datamove
2946 * @opxcpttype 5
2947 * @optest op1=1 op2=2 -> op1=2
2948 * @optest op1=0 op2=-42 -> op1=-42
2949 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2950 */
2951 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2952
2953 IEM_MC_BEGIN(0, 2);
2954 IEM_MC_LOCAL(uint64_t, uSrc);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2960 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2961
2962 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2963 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2964
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 return VINF_SUCCESS;
2969}
2970
2971
2972/**
2973 * @opcode 0x16
2974 * @opcodesub !11 mr/reg
2975 * @oppfx 0x66
2976 * @opcpuid sse2
2977 * @opgroup og_sse2_pcksclr_datamove
2978 * @opxcpttype 5
2979 * @optest op1=1 op2=2 -> op1=2
2980 * @optest op1=0 op2=-42 -> op1=-42
2981 */
2982FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if (IEM_IS_MODRM_MEM_MODE(bRm))
2986 {
2987 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2988 IEM_MC_BEGIN(0, 2);
2989 IEM_MC_LOCAL(uint64_t, uSrc);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2991
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2996
2997 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2998 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2999
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 return VINF_SUCCESS;
3003 }
3004
3005 /**
3006 * @opdone
3007 * @opmnemonic ud660f16m3
3008 * @opcode 0x16
3009 * @opcodesub 11 mr/reg
3010 * @oppfx 0x66
3011 * @opunused immediate
3012 * @opcpuid sse
3013 * @optest ->
3014 */
3015 return IEMOP_RAISE_INVALID_OPCODE();
3016}
3017
3018
3019/**
3020 * @opcode 0x16
3021 * @oppfx 0xf3
3022 * @opcpuid sse3
3023 * @opgroup og_sse3_pcksclr_datamove
3024 * @opxcpttype 4
3025 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3026 * op1=0x00000002000000020000000100000001
3027 */
3028FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3029{
3030 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 if (IEM_IS_MODRM_REG_MODE(bRm))
3033 {
3034 /*
3035 * Register, register.
3036 */
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEM_MC_BEGIN(2, 0);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3041
3042 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3046 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3047 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3048
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 else
3053 {
3054 /*
3055 * Register, memory.
3056 */
3057 IEM_MC_BEGIN(2, 2);
3058 IEM_MC_LOCAL(RTUINT128U, uSrc);
3059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3060 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3061 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3062
3063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP();
3073 IEM_MC_END();
3074 }
3075 return VINF_SUCCESS;
3076}
3077
3078/**
3079 * @opdone
3080 * @opmnemonic udf30f16
3081 * @opcode 0x16
3082 * @oppfx 0xf2
3083 * @opunused intel-modrm
3084 * @opcpuid sse
3085 * @optest ->
3086 * @opdone
3087 */
3088
3089
3090/**
3091 * @opcode 0x17
3092 * @opcodesub !11 mr/reg
3093 * @oppfx none
3094 * @opcpuid sse
3095 * @opgroup og_sse_simdfp_datamove
3096 * @opxcpttype 5
3097 * @optest op1=1 op2=2 -> op1=2
3098 * @optest op1=0 op2=-42 -> op1=-42
3099 */
3100FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if (IEM_IS_MODRM_MEM_MODE(bRm))
3104 {
3105 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3106
3107 IEM_MC_BEGIN(0, 2);
3108 IEM_MC_LOCAL(uint64_t, uSrc);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3115
3116 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3118
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 return VINF_SUCCESS;
3122 }
3123
3124 /**
3125 * @opdone
3126 * @opmnemonic ud0f17m3
3127 * @opcode 0x17
3128 * @opcodesub 11 mr/reg
3129 * @oppfx none
3130 * @opunused immediate
3131 * @opcpuid sse
3132 * @optest ->
3133 */
3134 return IEMOP_RAISE_INVALID_OPCODE();
3135}
3136
3137
3138/**
3139 * @opcode 0x17
3140 * @opcodesub !11 mr/reg
3141 * @oppfx 0x66
3142 * @opcpuid sse2
3143 * @opgroup og_sse2_pcksclr_datamove
3144 * @opxcpttype 5
3145 * @optest op1=1 op2=2 -> op1=2
3146 * @optest op1=0 op2=-42 -> op1=-42
3147 */
3148FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (IEM_IS_MODRM_MEM_MODE(bRm))
3152 {
3153 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3154
3155 IEM_MC_BEGIN(0, 2);
3156 IEM_MC_LOCAL(uint64_t, uSrc);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3162 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3163
3164 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3165 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3166
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 return VINF_SUCCESS;
3170 }
3171
3172 /**
3173 * @opdone
3174 * @opmnemonic ud660f17m3
3175 * @opcode 0x17
3176 * @opcodesub 11 mr/reg
3177 * @oppfx 0x66
3178 * @opunused immediate
3179 * @opcpuid sse
3180 * @optest ->
3181 */
3182 return IEMOP_RAISE_INVALID_OPCODE();
3183}
3184
3185
3186/**
3187 * @opdone
3188 * @opmnemonic udf30f17
3189 * @opcode 0x17
3190 * @oppfx 0xf3
3191 * @opunused intel-modrm
3192 * @opcpuid sse
3193 * @optest ->
3194 * @opdone
3195 */
3196
3197/**
3198 * @opmnemonic udf20f17
3199 * @opcode 0x17
3200 * @oppfx 0xf2
3201 * @opunused intel-modrm
3202 * @opcpuid sse
3203 * @optest ->
3204 * @opdone
3205 */
3206
3207
3208/** Opcode 0x0f 0x18. */
3209FNIEMOP_DEF(iemOp_prefetch_Grp16)
3210{
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if (IEM_IS_MODRM_MEM_MODE(bRm))
3213 {
3214 switch (IEM_GET_MODRM_REG_8(bRm))
3215 {
3216 case 4: /* Aliased to /0 for the time being according to AMD. */
3217 case 5: /* Aliased to /0 for the time being according to AMD. */
3218 case 6: /* Aliased to /0 for the time being according to AMD. */
3219 case 7: /* Aliased to /0 for the time being according to AMD. */
3220 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3221 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3222 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3223 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226
3227 IEM_MC_BEGIN(0, 1);
3228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3231 /* Currently a NOP. */
3232 NOREF(GCPtrEffSrc);
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 return VINF_SUCCESS;
3236 }
3237
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239}
3240
3241
3242/** Opcode 0x0f 0x19..0x1f. */
3243FNIEMOP_DEF(iemOp_nop_Ev)
3244{
3245 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3247 if (IEM_IS_MODRM_REG_MODE(bRm))
3248 {
3249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3250 IEM_MC_BEGIN(0, 0);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 /* Currently a NOP. */
3261 NOREF(GCPtrEffSrc);
3262 IEM_MC_ADVANCE_RIP();
3263 IEM_MC_END();
3264 }
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0x20. */
3270FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3271{
3272 /* mod is ignored, as is operand size overrides. */
3273 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3274 IEMOP_HLP_MIN_386();
3275 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3276 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3277 else
3278 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3279
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3283 {
3284 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3285 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3286 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3287 iCrReg |= 8;
3288 }
3289 switch (iCrReg)
3290 {
3291 case 0: case 2: case 3: case 4: case 8:
3292 break;
3293 default:
3294 return IEMOP_RAISE_INVALID_OPCODE();
3295 }
3296 IEMOP_HLP_DONE_DECODING();
3297
3298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3299}
3300
3301
3302/** Opcode 0x0f 0x21. */
3303FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3304{
3305 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3306 IEMOP_HLP_MIN_386();
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3310 return IEMOP_RAISE_INVALID_OPCODE();
3311 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3312 IEM_GET_MODRM_RM(pVCpu, bRm),
3313 IEM_GET_MODRM_REG_8(bRm));
3314}
3315
3316
3317/** Opcode 0x0f 0x22. */
3318FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3319{
3320 /* mod is ignored, as is operand size overrides. */
3321 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3322 IEMOP_HLP_MIN_386();
3323 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3324 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3325 else
3326 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3327
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3330 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3331 {
3332 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3333 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3334 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3335 iCrReg |= 8;
3336 }
3337 switch (iCrReg)
3338 {
3339 case 0: case 2: case 3: case 4: case 8:
3340 break;
3341 default:
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 }
3344 IEMOP_HLP_DONE_DECODING();
3345
3346 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x23. */
3351FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3352{
3353 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3358 return IEMOP_RAISE_INVALID_OPCODE();
3359 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3360 IEM_GET_MODRM_REG_8(bRm),
3361 IEM_GET_MODRM_RM(pVCpu, bRm));
3362}
3363
3364
3365/** Opcode 0x0f 0x24. */
3366FNIEMOP_DEF(iemOp_mov_Rd_Td)
3367{
3368 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3369 IEMOP_HLP_MIN_386();
3370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3372 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3373 return IEMOP_RAISE_INVALID_OPCODE();
3374 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3375 IEM_GET_MODRM_RM(pVCpu, bRm),
3376 IEM_GET_MODRM_REG_8(bRm));
3377}
3378
3379
3380/** Opcode 0x0f 0x26. */
3381FNIEMOP_DEF(iemOp_mov_Td_Rd)
3382{
3383 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3384 IEMOP_HLP_MIN_386();
3385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3388 return IEMOP_RAISE_INVALID_OPCODE();
3389 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3390 IEM_GET_MODRM_REG_8(bRm),
3391 IEM_GET_MODRM_RM(pVCpu, bRm));
3392}
3393
3394
3395/**
3396 * @opcode 0x28
3397 * @oppfx none
3398 * @opcpuid sse
3399 * @opgroup og_sse_simdfp_datamove
3400 * @opxcpttype 1
3401 * @optest op1=1 op2=2 -> op1=2
3402 * @optest op1=0 op2=-42 -> op1=-42
3403 */
3404FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3405{
3406 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 if (IEM_IS_MODRM_REG_MODE(bRm))
3409 {
3410 /*
3411 * Register, register.
3412 */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3418 IEM_GET_MODRM_RM(pVCpu, bRm));
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 else
3423 {
3424 /*
3425 * Register, memory.
3426 */
3427 IEM_MC_BEGIN(0, 2);
3428 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3430
3431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435
3436 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3437 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445/**
3446 * @opcode 0x28
3447 * @oppfx 66
3448 * @opcpuid sse2
3449 * @opgroup og_sse2_pcksclr_datamove
3450 * @opxcpttype 1
3451 * @optest op1=1 op2=2 -> op1=2
3452 * @optest op1=0 op2=-42 -> op1=-42
3453 */
3454FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3455{
3456 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3458 if (IEM_IS_MODRM_REG_MODE(bRm))
3459 {
3460 /*
3461 * Register, register.
3462 */
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_BEGIN(0, 0);
3465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3468 IEM_GET_MODRM_RM(pVCpu, bRm));
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(0, 2);
3478 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480
3481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3485
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3488
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 return VINF_SUCCESS;
3493}
3494
3495/* Opcode 0xf3 0x0f 0x28 - invalid */
3496/* Opcode 0xf2 0x0f 0x28 - invalid */
3497
3498/**
3499 * @opcode 0x29
3500 * @oppfx none
3501 * @opcpuid sse
3502 * @opgroup og_sse_simdfp_datamove
3503 * @opxcpttype 1
3504 * @optest op1=1 op2=2 -> op1=2
3505 * @optest op1=0 op2=-42 -> op1=-42
3506 */
3507FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3508{
3509 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3511 if (IEM_IS_MODRM_REG_MODE(bRm))
3512 {
3513 /*
3514 * Register, register.
3515 */
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3520 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3521 IEM_GET_MODRM_REG(pVCpu, bRm));
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 }
3525 else
3526 {
3527 /*
3528 * Memory, register.
3529 */
3530 IEM_MC_BEGIN(0, 2);
3531 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3538
3539 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548/**
3549 * @opcode 0x29
3550 * @oppfx 66
3551 * @opcpuid sse2
3552 * @opgroup og_sse2_pcksclr_datamove
3553 * @opxcpttype 1
3554 * @optest op1=1 op2=2 -> op1=2
3555 * @optest op1=0 op2=-42 -> op1=-42
3556 */
3557FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3558{
3559 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3561 if (IEM_IS_MODRM_REG_MODE(bRm))
3562 {
3563 /*
3564 * Register, register.
3565 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 0);
3568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3570 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3571 IEM_GET_MODRM_REG(pVCpu, bRm));
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 /*
3578 * Memory, register.
3579 */
3580 IEM_MC_BEGIN(0, 2);
3581 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3583
3584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3588
3589 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3590 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3591
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3604{
3605 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if (IEM_IS_MODRM_REG_MODE(bRm))
3608 {
3609 /*
3610 * Register, register.
3611 */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3613
3614 IEM_MC_BEGIN(3, 1);
3615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3616 IEM_MC_LOCAL(X86XMMREG, Dst);
3617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3618 IEM_MC_ARG(uint64_t, u64Src, 2);
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_PREPARE_FPU_USAGE();
3621 IEM_MC_FPU_TO_MMX_MODE();
3622
3623 IEM_MC_REF_MXCSR(pfMxcsr);
3624 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3625 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3626
3627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3628 IEM_MC_IF_MXCSR_XCPT_PENDING()
3629 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3630 IEM_MC_ELSE()
3631 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3632 IEM_MC_ENDIF();
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 }
3637 else
3638 {
3639 /*
3640 * Register, memory.
3641 */
3642 IEM_MC_BEGIN(3, 3);
3643 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3644 IEM_MC_LOCAL(X86XMMREG, Dst);
3645 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3646 IEM_MC_ARG(uint64_t, u64Src, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648
3649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3652 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3653
3654 IEM_MC_PREPARE_FPU_USAGE();
3655 IEM_MC_FPU_TO_MMX_MODE();
3656 IEM_MC_REF_MXCSR(pfMxcsr);
3657
3658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3659 IEM_MC_IF_MXCSR_XCPT_PENDING()
3660 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3661 IEM_MC_ELSE()
3662 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3663 IEM_MC_ENDIF();
3664
3665 IEM_MC_ADVANCE_RIP();
3666 IEM_MC_END();
3667 }
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3673FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3674{
3675 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3677 if (IEM_IS_MODRM_REG_MODE(bRm))
3678 {
3679 /*
3680 * Register, register.
3681 */
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683
3684 IEM_MC_BEGIN(3, 1);
3685 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3686 IEM_MC_LOCAL(X86XMMREG, Dst);
3687 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3688 IEM_MC_ARG(uint64_t, u64Src, 2);
3689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3690 IEM_MC_PREPARE_FPU_USAGE();
3691 IEM_MC_FPU_TO_MMX_MODE();
3692
3693 IEM_MC_REF_MXCSR(pfMxcsr);
3694 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3695
3696 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3697 IEM_MC_IF_MXCSR_XCPT_PENDING()
3698 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3699 IEM_MC_ELSE()
3700 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3701 IEM_MC_ENDIF();
3702
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /*
3709 * Register, memory.
3710 */
3711 IEM_MC_BEGIN(3, 3);
3712 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3713 IEM_MC_LOCAL(X86XMMREG, Dst);
3714 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3715 IEM_MC_ARG(uint64_t, u64Src, 2);
3716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3717
3718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3721 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3722
3723 /* Doesn't cause a transition to MMX mode. */
3724 IEM_MC_PREPARE_SSE_USAGE();
3725 IEM_MC_REF_MXCSR(pfMxcsr);
3726
3727 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3728 IEM_MC_IF_MXCSR_XCPT_PENDING()
3729 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3730 IEM_MC_ELSE()
3731 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3732 IEM_MC_ENDIF();
3733
3734 IEM_MC_ADVANCE_RIP();
3735 IEM_MC_END();
3736 }
3737 return VINF_SUCCESS;
3738}
3739
3740
3741/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3742FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3743{
3744 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3745
3746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3748 {
3749 if (IEM_IS_MODRM_REG_MODE(bRm))
3750 {
3751 /* XMM, greg64 */
3752 IEM_MC_BEGIN(3, 4);
3753 IEM_MC_LOCAL(uint32_t, fMxcsr);
3754 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3756 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3757 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3758
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3762
3763 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3765 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3766 IEM_MC_IF_MXCSR_XCPT_PENDING()
3767 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3768 IEM_MC_ELSE()
3769 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3770 IEM_MC_ENDIF();
3771
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 }
3775 else
3776 {
3777 /* XMM, [mem64] */
3778 IEM_MC_BEGIN(3, 4);
3779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3780 IEM_MC_LOCAL(uint32_t, fMxcsr);
3781 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3782 IEM_MC_LOCAL(int64_t, i64Src);
3783 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3784 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3785 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3786
3787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3790 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3791
3792 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3794 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3795 IEM_MC_IF_MXCSR_XCPT_PENDING()
3796 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3797 IEM_MC_ELSE()
3798 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3799 IEM_MC_ENDIF();
3800
3801 IEM_MC_ADVANCE_RIP();
3802 IEM_MC_END();
3803 }
3804 }
3805 else
3806 {
3807 if (IEM_IS_MODRM_REG_MODE(bRm))
3808 {
3809 /* greg, XMM */
3810 IEM_MC_BEGIN(3, 4);
3811 IEM_MC_LOCAL(uint32_t, fMxcsr);
3812 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3814 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3815 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3816
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3819 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3820
3821 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3823 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3824 IEM_MC_IF_MXCSR_XCPT_PENDING()
3825 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3826 IEM_MC_ELSE()
3827 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3828 IEM_MC_ENDIF();
3829
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 }
3833 else
3834 {
3835 /* greg, [mem] */
3836 IEM_MC_BEGIN(3, 4);
3837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3838 IEM_MC_LOCAL(uint32_t, fMxcsr);
3839 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3840 IEM_MC_LOCAL(int32_t, i32Src);
3841 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3842 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3843 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3844
3845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3848 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3849
3850 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3852 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3853 IEM_MC_IF_MXCSR_XCPT_PENDING()
3854 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3855 IEM_MC_ELSE()
3856 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3857 IEM_MC_ENDIF();
3858
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 }
3863 return VINF_SUCCESS;
3864}
3865
3866
3867/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3868FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3869{
3870 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3871
3872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3874 {
3875 if (IEM_IS_MODRM_REG_MODE(bRm))
3876 {
3877 /* XMM, greg64 */
3878 IEM_MC_BEGIN(3, 4);
3879 IEM_MC_LOCAL(uint32_t, fMxcsr);
3880 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3881 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3882 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3883 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3884
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3887 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3888
3889 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3890 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3891 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3892 IEM_MC_IF_MXCSR_XCPT_PENDING()
3893 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3894 IEM_MC_ELSE()
3895 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3896 IEM_MC_ENDIF();
3897
3898 IEM_MC_ADVANCE_RIP();
3899 IEM_MC_END();
3900 }
3901 else
3902 {
3903 /* XMM, [mem64] */
3904 IEM_MC_BEGIN(3, 4);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906 IEM_MC_LOCAL(uint32_t, fMxcsr);
3907 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3908 IEM_MC_LOCAL(int64_t, i64Src);
3909 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3910 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3911 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3912
3913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3916 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3917
3918 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3919 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3920 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3921 IEM_MC_IF_MXCSR_XCPT_PENDING()
3922 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3923 IEM_MC_ELSE()
3924 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3925 IEM_MC_ENDIF();
3926
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 }
3930 }
3931 else
3932 {
3933 if (IEM_IS_MODRM_REG_MODE(bRm))
3934 {
3935 /* greg, XMM */
3936 IEM_MC_BEGIN(3, 4);
3937 IEM_MC_LOCAL(uint32_t, fMxcsr);
3938 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3939 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3940 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3941 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3942
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3945 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3946
3947 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3949 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3950 IEM_MC_IF_MXCSR_XCPT_PENDING()
3951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3952 IEM_MC_ELSE()
3953 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3954 IEM_MC_ENDIF();
3955
3956 IEM_MC_ADVANCE_RIP();
3957 IEM_MC_END();
3958 }
3959 else
3960 {
3961 /* greg, [mem] */
3962 IEM_MC_BEGIN(3, 4);
3963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3964 IEM_MC_LOCAL(uint32_t, fMxcsr);
3965 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3966 IEM_MC_LOCAL(int32_t, i32Src);
3967 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3968 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3969 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3970
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3974 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3975
3976 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3977 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3978 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3979 IEM_MC_IF_MXCSR_XCPT_PENDING()
3980 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3981 IEM_MC_ELSE()
3982 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3983 IEM_MC_ENDIF();
3984
3985 IEM_MC_ADVANCE_RIP();
3986 IEM_MC_END();
3987 }
3988 }
3989 return VINF_SUCCESS;
3990}
3991
3992
3993/**
3994 * @opcode 0x2b
3995 * @opcodesub !11 mr/reg
3996 * @oppfx none
3997 * @opcpuid sse
3998 * @opgroup og_sse1_cachect
3999 * @opxcpttype 1
4000 * @optest op1=1 op2=2 -> op1=2
4001 * @optest op1=0 op2=-42 -> op1=-42
4002 */
4003FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4004{
4005 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4007 if (IEM_IS_MODRM_MEM_MODE(bRm))
4008 {
4009 /*
4010 * memory, register.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4020
4021 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4023
4024 IEM_MC_ADVANCE_RIP();
4025 IEM_MC_END();
4026 }
4027 /* The register, register encoding is invalid. */
4028 else
4029 return IEMOP_RAISE_INVALID_OPCODE();
4030 return VINF_SUCCESS;
4031}
4032
4033/**
4034 * @opcode 0x2b
4035 * @opcodesub !11 mr/reg
4036 * @oppfx 0x66
4037 * @opcpuid sse2
4038 * @opgroup og_sse2_cachect
4039 * @opxcpttype 1
4040 * @optest op1=1 op2=2 -> op1=2
4041 * @optest op1=0 op2=-42 -> op1=-42
4042 */
4043FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4044{
4045 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4047 if (IEM_IS_MODRM_MEM_MODE(bRm))
4048 {
4049 /*
4050 * memory, register.
4051 */
4052 IEM_MC_BEGIN(0, 2);
4053 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4055
4056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4059 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4060
4061 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4062 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4063
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 }
4067 /* The register, register encoding is invalid. */
4068 else
4069 return IEMOP_RAISE_INVALID_OPCODE();
4070 return VINF_SUCCESS;
4071}
4072/* Opcode 0xf3 0x0f 0x2b - invalid */
4073/* Opcode 0xf2 0x0f 0x2b - invalid */
4074
4075
4076/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4077FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4078{
4079 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4081 if (IEM_IS_MODRM_REG_MODE(bRm))
4082 {
4083 /*
4084 * Register, register.
4085 */
4086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4087
4088 IEM_MC_BEGIN(3, 1);
4089 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4090 IEM_MC_LOCAL(uint64_t, u64Dst);
4091 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4092 IEM_MC_ARG(uint64_t, u64Src, 2);
4093 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4094 IEM_MC_PREPARE_FPU_USAGE();
4095 IEM_MC_FPU_TO_MMX_MODE();
4096
4097 IEM_MC_REF_MXCSR(pfMxcsr);
4098 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4099
4100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4101 IEM_MC_IF_MXCSR_XCPT_PENDING()
4102 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4103 IEM_MC_ELSE()
4104 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4105 IEM_MC_ENDIF();
4106
4107 IEM_MC_ADVANCE_RIP();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /*
4113 * Register, memory.
4114 */
4115 IEM_MC_BEGIN(3, 2);
4116 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4117 IEM_MC_LOCAL(uint64_t, u64Dst);
4118 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4119 IEM_MC_ARG(uint64_t, u64Src, 2);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4121
4122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4124 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4125 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4126
4127 IEM_MC_PREPARE_FPU_USAGE();
4128 IEM_MC_FPU_TO_MMX_MODE();
4129 IEM_MC_REF_MXCSR(pfMxcsr);
4130
4131 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4132 IEM_MC_IF_MXCSR_XCPT_PENDING()
4133 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4134 IEM_MC_ELSE()
4135 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4136 IEM_MC_ENDIF();
4137
4138 IEM_MC_ADVANCE_RIP();
4139 IEM_MC_END();
4140 }
4141 return VINF_SUCCESS;
4142}
4143
4144
4145/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4146FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4147{
4148 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4150 if (IEM_IS_MODRM_REG_MODE(bRm))
4151 {
4152 /*
4153 * Register, register.
4154 */
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(3, 1);
4158 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4159 IEM_MC_LOCAL(uint64_t, u64Dst);
4160 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4161 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4162 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4163 IEM_MC_PREPARE_FPU_USAGE();
4164 IEM_MC_FPU_TO_MMX_MODE();
4165
4166 IEM_MC_REF_MXCSR(pfMxcsr);
4167 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4168
4169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4170 IEM_MC_IF_MXCSR_XCPT_PENDING()
4171 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4172 IEM_MC_ELSE()
4173 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4174 IEM_MC_ENDIF();
4175
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(3, 3);
4185 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4186 IEM_MC_LOCAL(uint64_t, u64Dst);
4187 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4188 IEM_MC_LOCAL(X86XMMREG, uSrc);
4189 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4191
4192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4194 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4195 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4196
4197 IEM_MC_PREPARE_FPU_USAGE();
4198 IEM_MC_FPU_TO_MMX_MODE();
4199
4200 IEM_MC_REF_MXCSR(pfMxcsr);
4201
4202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4203 IEM_MC_IF_MXCSR_XCPT_PENDING()
4204 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4205 IEM_MC_ELSE()
4206 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4207 IEM_MC_ENDIF();
4208
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4217FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4218{
4219 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4220
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4223 {
4224 if (IEM_IS_MODRM_REG_MODE(bRm))
4225 {
4226 /* greg64, XMM */
4227 IEM_MC_BEGIN(3, 4);
4228 IEM_MC_LOCAL(uint32_t, fMxcsr);
4229 IEM_MC_LOCAL(int64_t, i64Dst);
4230 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4231 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4232 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4233
4234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4235 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4236 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4237
4238 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4239 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4240 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4241 IEM_MC_IF_MXCSR_XCPT_PENDING()
4242 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4243 IEM_MC_ELSE()
4244 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4245 IEM_MC_ENDIF();
4246
4247 IEM_MC_ADVANCE_RIP();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /* greg64, [mem64] */
4253 IEM_MC_BEGIN(3, 4);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4255 IEM_MC_LOCAL(uint32_t, fMxcsr);
4256 IEM_MC_LOCAL(int64_t, i64Dst);
4257 IEM_MC_LOCAL(uint32_t, u32Src);
4258 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4259 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4260 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4261
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4265 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4266
4267 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4268 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4269 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4270 IEM_MC_IF_MXCSR_XCPT_PENDING()
4271 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4272 IEM_MC_ELSE()
4273 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4274 IEM_MC_ENDIF();
4275
4276 IEM_MC_ADVANCE_RIP();
4277 IEM_MC_END();
4278 }
4279 }
4280 else
4281 {
4282 if (IEM_IS_MODRM_REG_MODE(bRm))
4283 {
4284 /* greg, XMM */
4285 IEM_MC_BEGIN(3, 4);
4286 IEM_MC_LOCAL(uint32_t, fMxcsr);
4287 IEM_MC_LOCAL(int32_t, i32Dst);
4288 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4289 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4290 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4291
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4294 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4295
4296 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4297 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4298 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4299 IEM_MC_IF_MXCSR_XCPT_PENDING()
4300 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4301 IEM_MC_ELSE()
4302 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4303 IEM_MC_ENDIF();
4304
4305 IEM_MC_ADVANCE_RIP();
4306 IEM_MC_END();
4307 }
4308 else
4309 {
4310 /* greg, [mem] */
4311 IEM_MC_BEGIN(3, 4);
4312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4313 IEM_MC_LOCAL(uint32_t, fMxcsr);
4314 IEM_MC_LOCAL(int32_t, i32Dst);
4315 IEM_MC_LOCAL(uint32_t, u32Src);
4316 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4317 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4318 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4319
4320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4323 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4324
4325 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4326 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4327 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4328 IEM_MC_IF_MXCSR_XCPT_PENDING()
4329 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4330 IEM_MC_ELSE()
4331 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4332 IEM_MC_ENDIF();
4333
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4343FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4344{
4345 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4346
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4349 {
4350 if (IEM_IS_MODRM_REG_MODE(bRm))
4351 {
4352 /* greg64, XMM */
4353 IEM_MC_BEGIN(3, 4);
4354 IEM_MC_LOCAL(uint32_t, fMxcsr);
4355 IEM_MC_LOCAL(int64_t, i64Dst);
4356 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4357 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4358 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4359
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4361 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4362 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4363
4364 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4365 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4366 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4367 IEM_MC_IF_MXCSR_XCPT_PENDING()
4368 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4369 IEM_MC_ELSE()
4370 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4371 IEM_MC_ENDIF();
4372
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 /* greg64, [mem64] */
4379 IEM_MC_BEGIN(3, 4);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_LOCAL(uint32_t, fMxcsr);
4382 IEM_MC_LOCAL(int64_t, i64Dst);
4383 IEM_MC_LOCAL(uint64_t, u64Src);
4384 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4385 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4386 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4391 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4392
4393 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4394 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4395 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4396 IEM_MC_IF_MXCSR_XCPT_PENDING()
4397 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4398 IEM_MC_ELSE()
4399 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4400 IEM_MC_ENDIF();
4401
4402 IEM_MC_ADVANCE_RIP();
4403 IEM_MC_END();
4404 }
4405 }
4406 else
4407 {
4408 if (IEM_IS_MODRM_REG_MODE(bRm))
4409 {
4410 /* greg, XMM */
4411 IEM_MC_BEGIN(3, 4);
4412 IEM_MC_LOCAL(uint32_t, fMxcsr);
4413 IEM_MC_LOCAL(int32_t, i32Dst);
4414 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4415 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4416 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4417
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4423 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4424 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4425 IEM_MC_IF_MXCSR_XCPT_PENDING()
4426 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4427 IEM_MC_ELSE()
4428 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4429 IEM_MC_ENDIF();
4430
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 }
4434 else
4435 {
4436 /* greg, [mem] */
4437 IEM_MC_BEGIN(3, 4);
4438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4439 IEM_MC_LOCAL(uint32_t, fMxcsr);
4440 IEM_MC_LOCAL(int32_t, i32Dst);
4441 IEM_MC_LOCAL(uint64_t, u64Src);
4442 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4443 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4444 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4445
4446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4449 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4450
4451 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4452 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4453 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4454 IEM_MC_IF_MXCSR_XCPT_PENDING()
4455 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4456 IEM_MC_ELSE()
4457 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4458 IEM_MC_ENDIF();
4459
4460 IEM_MC_ADVANCE_RIP();
4461 IEM_MC_END();
4462 }
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4469FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4470{
4471 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4473 if (IEM_IS_MODRM_REG_MODE(bRm))
4474 {
4475 /*
4476 * Register, register.
4477 */
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479
4480 IEM_MC_BEGIN(3, 1);
4481 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4482 IEM_MC_LOCAL(uint64_t, u64Dst);
4483 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4484 IEM_MC_ARG(uint64_t, u64Src, 2);
4485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4486 IEM_MC_PREPARE_FPU_USAGE();
4487 IEM_MC_FPU_TO_MMX_MODE();
4488
4489 IEM_MC_REF_MXCSR(pfMxcsr);
4490 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4491
4492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4493 IEM_MC_IF_MXCSR_XCPT_PENDING()
4494 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4495 IEM_MC_ELSE()
4496 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4497 IEM_MC_ENDIF();
4498
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 else
4503 {
4504 /*
4505 * Register, memory.
4506 */
4507 IEM_MC_BEGIN(3, 2);
4508 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4509 IEM_MC_LOCAL(uint64_t, u64Dst);
4510 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4511 IEM_MC_ARG(uint64_t, u64Src, 2);
4512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4513
4514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4517 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4518
4519 IEM_MC_PREPARE_FPU_USAGE();
4520 IEM_MC_FPU_TO_MMX_MODE();
4521 IEM_MC_REF_MXCSR(pfMxcsr);
4522
4523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4524 IEM_MC_IF_MXCSR_XCPT_PENDING()
4525 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4526 IEM_MC_ELSE()
4527 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4528 IEM_MC_ENDIF();
4529
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 return VINF_SUCCESS;
4534}
4535
4536
4537/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4538FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4539{
4540 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4542 if (IEM_IS_MODRM_REG_MODE(bRm))
4543 {
4544 /*
4545 * Register, register.
4546 */
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548
4549 IEM_MC_BEGIN(3, 1);
4550 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4551 IEM_MC_LOCAL(uint64_t, u64Dst);
4552 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4553 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4554 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4555 IEM_MC_PREPARE_FPU_USAGE();
4556 IEM_MC_FPU_TO_MMX_MODE();
4557
4558 IEM_MC_REF_MXCSR(pfMxcsr);
4559 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4560
4561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4562 IEM_MC_IF_MXCSR_XCPT_PENDING()
4563 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4564 IEM_MC_ELSE()
4565 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4566 IEM_MC_ENDIF();
4567
4568 IEM_MC_ADVANCE_RIP();
4569 IEM_MC_END();
4570 }
4571 else
4572 {
4573 /*
4574 * Register, memory.
4575 */
4576 IEM_MC_BEGIN(3, 3);
4577 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4578 IEM_MC_LOCAL(uint64_t, u64Dst);
4579 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4580 IEM_MC_LOCAL(X86XMMREG, uSrc);
4581 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4583
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4587 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4588
4589 IEM_MC_PREPARE_FPU_USAGE();
4590 IEM_MC_FPU_TO_MMX_MODE();
4591
4592 IEM_MC_REF_MXCSR(pfMxcsr);
4593
4594 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4595 IEM_MC_IF_MXCSR_XCPT_PENDING()
4596 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4597 IEM_MC_ELSE()
4598 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4599 IEM_MC_ENDIF();
4600
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 }
4604 return VINF_SUCCESS;
4605}
4606
4607
4608/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4609FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4610{
4611 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4612
4613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4615 {
4616 if (IEM_IS_MODRM_REG_MODE(bRm))
4617 {
4618 /* greg64, XMM */
4619 IEM_MC_BEGIN(3, 4);
4620 IEM_MC_LOCAL(uint32_t, fMxcsr);
4621 IEM_MC_LOCAL(int64_t, i64Dst);
4622 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4623 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4624 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4625
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4628 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4629
4630 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4631 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4632 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4633 IEM_MC_IF_MXCSR_XCPT_PENDING()
4634 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4635 IEM_MC_ELSE()
4636 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4637 IEM_MC_ENDIF();
4638
4639 IEM_MC_ADVANCE_RIP();
4640 IEM_MC_END();
4641 }
4642 else
4643 {
4644 /* greg64, [mem64] */
4645 IEM_MC_BEGIN(3, 4);
4646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4647 IEM_MC_LOCAL(uint32_t, fMxcsr);
4648 IEM_MC_LOCAL(int64_t, i64Dst);
4649 IEM_MC_LOCAL(uint32_t, u32Src);
4650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4651 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4652 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4653
4654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4657 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4658
4659 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4660 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4661 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4662 IEM_MC_IF_MXCSR_XCPT_PENDING()
4663 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4664 IEM_MC_ELSE()
4665 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4666 IEM_MC_ENDIF();
4667
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 }
4671 }
4672 else
4673 {
4674 if (IEM_IS_MODRM_REG_MODE(bRm))
4675 {
4676 /* greg, XMM */
4677 IEM_MC_BEGIN(3, 4);
4678 IEM_MC_LOCAL(uint32_t, fMxcsr);
4679 IEM_MC_LOCAL(int32_t, i32Dst);
4680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4681 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4682 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4683
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4686 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4687
4688 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4689 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4690 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4691 IEM_MC_IF_MXCSR_XCPT_PENDING()
4692 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4693 IEM_MC_ELSE()
4694 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4695 IEM_MC_ENDIF();
4696
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 }
4700 else
4701 {
4702 /* greg, [mem] */
4703 IEM_MC_BEGIN(3, 4);
4704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4705 IEM_MC_LOCAL(uint32_t, fMxcsr);
4706 IEM_MC_LOCAL(int32_t, i32Dst);
4707 IEM_MC_LOCAL(uint32_t, u32Src);
4708 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4709 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4710 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4711
4712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4715 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4716
4717 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4718 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4719 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4720 IEM_MC_IF_MXCSR_XCPT_PENDING()
4721 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4722 IEM_MC_ELSE()
4723 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4724 IEM_MC_ENDIF();
4725
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 }
4730 return VINF_SUCCESS;
4731}
4732
4733
4734/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4735FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4736{
4737 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4738
4739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4741 {
4742 if (IEM_IS_MODRM_REG_MODE(bRm))
4743 {
4744 /* greg64, XMM */
4745 IEM_MC_BEGIN(3, 4);
4746 IEM_MC_LOCAL(uint32_t, fMxcsr);
4747 IEM_MC_LOCAL(int64_t, i64Dst);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4749 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4750 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4751
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4755
4756 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4759 IEM_MC_IF_MXCSR_XCPT_PENDING()
4760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4761 IEM_MC_ELSE()
4762 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4763 IEM_MC_ENDIF();
4764
4765 IEM_MC_ADVANCE_RIP();
4766 IEM_MC_END();
4767 }
4768 else
4769 {
4770 /* greg64, [mem64] */
4771 IEM_MC_BEGIN(3, 4);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773 IEM_MC_LOCAL(uint32_t, fMxcsr);
4774 IEM_MC_LOCAL(int64_t, i64Dst);
4775 IEM_MC_LOCAL(uint64_t, u64Src);
4776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4777 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4778 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4784
4785 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4788 IEM_MC_IF_MXCSR_XCPT_PENDING()
4789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4790 IEM_MC_ELSE()
4791 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4792 IEM_MC_ENDIF();
4793
4794 IEM_MC_ADVANCE_RIP();
4795 IEM_MC_END();
4796 }
4797 }
4798 else
4799 {
4800 if (IEM_IS_MODRM_REG_MODE(bRm))
4801 {
4802 /* greg, XMM */
4803 IEM_MC_BEGIN(3, 4);
4804 IEM_MC_LOCAL(uint32_t, fMxcsr);
4805 IEM_MC_LOCAL(int32_t, i32Dst);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4807 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4808 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4809
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4813
4814 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4817 IEM_MC_IF_MXCSR_XCPT_PENDING()
4818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4819 IEM_MC_ELSE()
4820 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4821 IEM_MC_ENDIF();
4822
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* greg, [mem] */
4829 IEM_MC_BEGIN(3, 4);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, fMxcsr);
4832 IEM_MC_LOCAL(int32_t, i32Dst);
4833 IEM_MC_LOCAL(uint64_t, u64Src);
4834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4835 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4836 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4842
4843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4846 IEM_MC_IF_MXCSR_XCPT_PENDING()
4847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4848 IEM_MC_ELSE()
4849 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4850 IEM_MC_ENDIF();
4851
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 }
4856 return VINF_SUCCESS;
4857}
4858
4859
4860/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4861FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4862{
4863 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4865 if (IEM_IS_MODRM_REG_MODE(bRm))
4866 {
4867 /*
4868 * Register, register.
4869 */
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_BEGIN(4, 1);
4872 IEM_MC_LOCAL(uint32_t, fEFlags);
4873 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4874 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4875 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4876 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4878 IEM_MC_PREPARE_SSE_USAGE();
4879 IEM_MC_FETCH_EFLAGS(fEFlags);
4880 IEM_MC_REF_MXCSR(pfMxcsr);
4881 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4882 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4883 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4884 IEM_MC_IF_MXCSR_XCPT_PENDING()
4885 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4886 IEM_MC_ELSE()
4887 IEM_MC_COMMIT_EFLAGS(fEFlags);
4888 IEM_MC_ENDIF();
4889
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 }
4893 else
4894 {
4895 /*
4896 * Register, memory.
4897 */
4898 IEM_MC_BEGIN(4, 3);
4899 IEM_MC_LOCAL(uint32_t, fEFlags);
4900 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4901 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4902 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4903 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4904 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4906
4907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4910 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4911
4912 IEM_MC_PREPARE_SSE_USAGE();
4913 IEM_MC_REF_MXCSR(pfMxcsr);
4914 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4915 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4916 IEM_MC_IF_MXCSR_XCPT_PENDING()
4917 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4918 IEM_MC_ELSE()
4919 IEM_MC_COMMIT_EFLAGS(fEFlags);
4920 IEM_MC_ENDIF();
4921
4922 IEM_MC_ADVANCE_RIP();
4923 IEM_MC_END();
4924 }
4925 return VINF_SUCCESS;
4926}
4927
4928
4929/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4930FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4931{
4932 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4934 if (IEM_IS_MODRM_REG_MODE(bRm))
4935 {
4936 /*
4937 * Register, register.
4938 */
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_BEGIN(4, 1);
4941 IEM_MC_LOCAL(uint32_t, fEFlags);
4942 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4943 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4944 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4945 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4947 IEM_MC_PREPARE_SSE_USAGE();
4948 IEM_MC_FETCH_EFLAGS(fEFlags);
4949 IEM_MC_REF_MXCSR(pfMxcsr);
4950 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4951 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4952 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4953 IEM_MC_IF_MXCSR_XCPT_PENDING()
4954 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4955 IEM_MC_ELSE()
4956 IEM_MC_COMMIT_EFLAGS(fEFlags);
4957 IEM_MC_ENDIF();
4958
4959 IEM_MC_ADVANCE_RIP();
4960 IEM_MC_END();
4961 }
4962 else
4963 {
4964 /*
4965 * Register, memory.
4966 */
4967 IEM_MC_BEGIN(4, 3);
4968 IEM_MC_LOCAL(uint32_t, fEFlags);
4969 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4970 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4971 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4972 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4973 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4975
4976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4978 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4979 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4980
4981 IEM_MC_PREPARE_SSE_USAGE();
4982 IEM_MC_REF_MXCSR(pfMxcsr);
4983 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4984 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4985 IEM_MC_IF_MXCSR_XCPT_PENDING()
4986 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4987 IEM_MC_ELSE()
4988 IEM_MC_COMMIT_EFLAGS(fEFlags);
4989 IEM_MC_ENDIF();
4990
4991 IEM_MC_ADVANCE_RIP();
4992 IEM_MC_END();
4993 }
4994 return VINF_SUCCESS;
4995}
4996
4997
4998/* Opcode 0xf3 0x0f 0x2e - invalid */
4999/* Opcode 0xf2 0x0f 0x2e - invalid */
5000
5001
5002/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5003FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5004{
5005 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5007 if (IEM_IS_MODRM_REG_MODE(bRm))
5008 {
5009 /*
5010 * Register, register.
5011 */
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5013 IEM_MC_BEGIN(4, 1);
5014 IEM_MC_LOCAL(uint32_t, fEFlags);
5015 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5016 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5017 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5018 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5020 IEM_MC_PREPARE_SSE_USAGE();
5021 IEM_MC_FETCH_EFLAGS(fEFlags);
5022 IEM_MC_REF_MXCSR(pfMxcsr);
5023 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5024 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5025 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5026 IEM_MC_IF_MXCSR_XCPT_PENDING()
5027 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5028 IEM_MC_ELSE()
5029 IEM_MC_COMMIT_EFLAGS(fEFlags);
5030 IEM_MC_ENDIF();
5031
5032 IEM_MC_ADVANCE_RIP();
5033 IEM_MC_END();
5034 }
5035 else
5036 {
5037 /*
5038 * Register, memory.
5039 */
5040 IEM_MC_BEGIN(4, 3);
5041 IEM_MC_LOCAL(uint32_t, fEFlags);
5042 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5043 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5044 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5045 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5046 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5048
5049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5051 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5052 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5053
5054 IEM_MC_PREPARE_SSE_USAGE();
5055 IEM_MC_REF_MXCSR(pfMxcsr);
5056 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5058 IEM_MC_IF_MXCSR_XCPT_PENDING()
5059 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5060 IEM_MC_ELSE()
5061 IEM_MC_COMMIT_EFLAGS(fEFlags);
5062 IEM_MC_ENDIF();
5063
5064 IEM_MC_ADVANCE_RIP();
5065 IEM_MC_END();
5066 }
5067 return VINF_SUCCESS;
5068}
5069
5070
5071/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5072FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5073{
5074 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 if (IEM_IS_MODRM_REG_MODE(bRm))
5077 {
5078 /*
5079 * Register, register.
5080 */
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5082 IEM_MC_BEGIN(4, 1);
5083 IEM_MC_LOCAL(uint32_t, fEFlags);
5084 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5085 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5086 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5087 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5088 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5089 IEM_MC_PREPARE_SSE_USAGE();
5090 IEM_MC_FETCH_EFLAGS(fEFlags);
5091 IEM_MC_REF_MXCSR(pfMxcsr);
5092 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5093 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5094 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5095 IEM_MC_IF_MXCSR_XCPT_PENDING()
5096 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5097 IEM_MC_ELSE()
5098 IEM_MC_COMMIT_EFLAGS(fEFlags);
5099 IEM_MC_ENDIF();
5100
5101 IEM_MC_ADVANCE_RIP();
5102 IEM_MC_END();
5103 }
5104 else
5105 {
5106 /*
5107 * Register, memory.
5108 */
5109 IEM_MC_BEGIN(4, 3);
5110 IEM_MC_LOCAL(uint32_t, fEFlags);
5111 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5112 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5113 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5114 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5115 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5117
5118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5121 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5122
5123 IEM_MC_PREPARE_SSE_USAGE();
5124 IEM_MC_REF_MXCSR(pfMxcsr);
5125 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5127 IEM_MC_IF_MXCSR_XCPT_PENDING()
5128 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5129 IEM_MC_ELSE()
5130 IEM_MC_COMMIT_EFLAGS(fEFlags);
5131 IEM_MC_ENDIF();
5132
5133 IEM_MC_ADVANCE_RIP();
5134 IEM_MC_END();
5135 }
5136 return VINF_SUCCESS;
5137}
5138
5139
5140/* Opcode 0xf3 0x0f 0x2f - invalid */
5141/* Opcode 0xf2 0x0f 0x2f - invalid */
5142
5143/** Opcode 0x0f 0x30. */
5144FNIEMOP_DEF(iemOp_wrmsr)
5145{
5146 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5148 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5149}
5150
5151
5152/** Opcode 0x0f 0x31. */
5153FNIEMOP_DEF(iemOp_rdtsc)
5154{
5155 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5157 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5158}
5159
5160
5161/** Opcode 0x0f 0x33. */
5162FNIEMOP_DEF(iemOp_rdmsr)
5163{
5164 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5166 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5167}
5168
5169
5170/** Opcode 0x0f 0x34. */
5171FNIEMOP_DEF(iemOp_rdpmc)
5172{
5173 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5176}
5177
5178
5179/** Opcode 0x0f 0x34. */
5180FNIEMOP_DEF(iemOp_sysenter)
5181{
5182 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5185}
5186
5187/** Opcode 0x0f 0x35. */
5188FNIEMOP_DEF(iemOp_sysexit)
5189{
5190 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5193}
5194
5195/** Opcode 0x0f 0x37. */
5196FNIEMOP_STUB(iemOp_getsec);
5197
5198
5199/** Opcode 0x0f 0x38. */
5200FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5201{
5202#ifdef IEM_WITH_THREE_0F_38
5203 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5204 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5205#else
5206 IEMOP_BITCH_ABOUT_STUB();
5207 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5208#endif
5209}
5210
5211
5212/** Opcode 0x0f 0x3a. */
5213FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5214{
5215#ifdef IEM_WITH_THREE_0F_3A
5216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5217 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5218#else
5219 IEMOP_BITCH_ABOUT_STUB();
5220 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5221#endif
5222}
5223
5224
5225/**
5226 * Implements a conditional move.
5227 *
5228 * Wish there was an obvious way to do this where we could share and reduce
5229 * code bloat.
5230 *
5231 * @param a_Cnd The conditional "microcode" operation.
5232 */
5233#define CMOV_X(a_Cnd) \
5234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5235 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5236 { \
5237 switch (pVCpu->iem.s.enmEffOpSize) \
5238 { \
5239 case IEMMODE_16BIT: \
5240 IEM_MC_BEGIN(0, 1); \
5241 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5242 a_Cnd { \
5243 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5244 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5245 } IEM_MC_ENDIF(); \
5246 IEM_MC_ADVANCE_RIP(); \
5247 IEM_MC_END(); \
5248 return VINF_SUCCESS; \
5249 \
5250 case IEMMODE_32BIT: \
5251 IEM_MC_BEGIN(0, 1); \
5252 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5253 a_Cnd { \
5254 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5255 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5256 } IEM_MC_ELSE() { \
5257 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5258 } IEM_MC_ENDIF(); \
5259 IEM_MC_ADVANCE_RIP(); \
5260 IEM_MC_END(); \
5261 return VINF_SUCCESS; \
5262 \
5263 case IEMMODE_64BIT: \
5264 IEM_MC_BEGIN(0, 1); \
5265 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5266 a_Cnd { \
5267 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5268 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5269 } IEM_MC_ENDIF(); \
5270 IEM_MC_ADVANCE_RIP(); \
5271 IEM_MC_END(); \
5272 return VINF_SUCCESS; \
5273 \
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5275 } \
5276 } \
5277 else \
5278 { \
5279 switch (pVCpu->iem.s.enmEffOpSize) \
5280 { \
5281 case IEMMODE_16BIT: \
5282 IEM_MC_BEGIN(0, 2); \
5283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5284 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5286 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5287 a_Cnd { \
5288 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5289 } IEM_MC_ENDIF(); \
5290 IEM_MC_ADVANCE_RIP(); \
5291 IEM_MC_END(); \
5292 return VINF_SUCCESS; \
5293 \
5294 case IEMMODE_32BIT: \
5295 IEM_MC_BEGIN(0, 2); \
5296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5297 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5299 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5300 a_Cnd { \
5301 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5302 } IEM_MC_ELSE() { \
5303 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5304 } IEM_MC_ENDIF(); \
5305 IEM_MC_ADVANCE_RIP(); \
5306 IEM_MC_END(); \
5307 return VINF_SUCCESS; \
5308 \
5309 case IEMMODE_64BIT: \
5310 IEM_MC_BEGIN(0, 2); \
5311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5312 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5314 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5315 a_Cnd { \
5316 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5317 } IEM_MC_ENDIF(); \
5318 IEM_MC_ADVANCE_RIP(); \
5319 IEM_MC_END(); \
5320 return VINF_SUCCESS; \
5321 \
5322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5323 } \
5324 } do {} while (0)
5325
5326
5327
5328/** Opcode 0x0f 0x40. */
5329FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5330{
5331 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5332 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5333}
5334
5335
5336/** Opcode 0x0f 0x41. */
5337FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5338{
5339 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5340 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5341}
5342
5343
5344/** Opcode 0x0f 0x42. */
5345FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5346{
5347 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5348 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5349}
5350
5351
5352/** Opcode 0x0f 0x43. */
5353FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5354{
5355 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5356 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5357}
5358
5359
5360/** Opcode 0x0f 0x44. */
5361FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5362{
5363 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5364 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5365}
5366
5367
5368/** Opcode 0x0f 0x45. */
5369FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5370{
5371 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5372 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5373}
5374
5375
5376/** Opcode 0x0f 0x46. */
5377FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5378{
5379 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5380 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5381}
5382
5383
5384/** Opcode 0x0f 0x47. */
5385FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5386{
5387 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5388 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5389}
5390
5391
5392/** Opcode 0x0f 0x48. */
5393FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5394{
5395 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5396 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5397}
5398
5399
5400/** Opcode 0x0f 0x49. */
5401FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5402{
5403 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5404 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5405}
5406
5407
5408/** Opcode 0x0f 0x4a. */
5409FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5410{
5411 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5412 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5413}
5414
5415
5416/** Opcode 0x0f 0x4b. */
5417FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5418{
5419 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5420 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5421}
5422
5423
5424/** Opcode 0x0f 0x4c. */
5425FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5426{
5427 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5428 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5429}
5430
5431
5432/** Opcode 0x0f 0x4d. */
5433FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5434{
5435 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5436 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5437}
5438
5439
5440/** Opcode 0x0f 0x4e. */
5441FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5442{
5443 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5444 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5445}
5446
5447
5448/** Opcode 0x0f 0x4f. */
5449FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5450{
5451 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5452 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5453}
5454
5455#undef CMOV_X
5456
5457/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5458FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5459{
5460 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5462 if (IEM_IS_MODRM_REG_MODE(bRm))
5463 {
5464 /*
5465 * Register, register.
5466 */
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5468 IEM_MC_BEGIN(2, 1);
5469 IEM_MC_LOCAL(uint8_t, u8Dst);
5470 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5471 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5472 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5473 IEM_MC_PREPARE_SSE_USAGE();
5474 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5475 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5476 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5477 IEM_MC_ADVANCE_RIP();
5478 IEM_MC_END();
5479 return VINF_SUCCESS;
5480 }
5481
5482 /* No memory operand. */
5483 return IEMOP_RAISE_INVALID_OPCODE();
5484}
5485
5486
5487/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5488FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5489{
5490 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5492 if (IEM_IS_MODRM_REG_MODE(bRm))
5493 {
5494 /*
5495 * Register, register.
5496 */
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 IEM_MC_BEGIN(2, 1);
5499 IEM_MC_LOCAL(uint8_t, u8Dst);
5500 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5501 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5503 IEM_MC_PREPARE_SSE_USAGE();
5504 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5505 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5507 IEM_MC_ADVANCE_RIP();
5508 IEM_MC_END();
5509 return VINF_SUCCESS;
5510 }
5511
5512 /* No memory operand. */
5513 return IEMOP_RAISE_INVALID_OPCODE();
5514
5515}
5516
5517
5518/* Opcode 0xf3 0x0f 0x50 - invalid */
5519/* Opcode 0xf2 0x0f 0x50 - invalid */
5520
5521
5522/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5523FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5524{
5525 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5526 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5527}
5528
5529
5530/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5531FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5532{
5533 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5534 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5535}
5536
5537
5538/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5539FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5540{
5541 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5542 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5543}
5544
5545
5546/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5547FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5548{
5549 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5550 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5551}
5552
5553
5554/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5555FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5556/* Opcode 0x66 0x0f 0x52 - invalid */
5557/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5558FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5559/* Opcode 0xf2 0x0f 0x52 - invalid */
5560
5561/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5562FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5563/* Opcode 0x66 0x0f 0x53 - invalid */
5564/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5565FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5566/* Opcode 0xf2 0x0f 0x53 - invalid */
5567
5568
5569/** Opcode 0x0f 0x54 - andps Vps, Wps */
5570FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5571{
5572 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5573 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5574}
5575
5576
5577/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5578FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5579{
5580 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5581 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5582}
5583
5584
5585/* Opcode 0xf3 0x0f 0x54 - invalid */
5586/* Opcode 0xf2 0x0f 0x54 - invalid */
5587
5588
5589/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5590FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5591{
5592 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5593 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5594}
5595
5596
5597/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5598FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5599{
5600 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5601 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5602}
5603
5604
5605/* Opcode 0xf3 0x0f 0x55 - invalid */
5606/* Opcode 0xf2 0x0f 0x55 - invalid */
5607
5608
5609/** Opcode 0x0f 0x56 - orps Vps, Wps */
5610FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5611{
5612 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5613 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5614}
5615
5616
5617/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5618FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5619{
5620 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5621 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5622}
5623
5624
5625/* Opcode 0xf3 0x0f 0x56 - invalid */
5626/* Opcode 0xf2 0x0f 0x56 - invalid */
5627
5628
5629/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5630FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5631{
5632 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5633 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5634}
5635
5636
5637/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5638FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5639{
5640 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5641 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5642}
5643
5644
5645/* Opcode 0xf3 0x0f 0x57 - invalid */
5646/* Opcode 0xf2 0x0f 0x57 - invalid */
5647
5648/** Opcode 0x0f 0x58 - addps Vps, Wps */
5649FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5650{
5651 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5652 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5653}
5654
5655
5656/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5657FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5658{
5659 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5660 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5661}
5662
5663
5664/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5665FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5666{
5667 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5668 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5669}
5670
5671
5672/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5673FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5674{
5675 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5676 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5677}
5678
5679
5680/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5681FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5682{
5683 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5684 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5685}
5686
5687
5688/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5689FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5690{
5691 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5692 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5693}
5694
5695
5696/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5697FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5698{
5699 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5700 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5701}
5702
5703
5704/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5705FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5706{
5707 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5708 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5709}
5710
5711
5712/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5713FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5714{
5715 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5716 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5717}
5718
5719
5720/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5721FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5722{
5723 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5724 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5725}
5726
5727
5728/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5729FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5730{
5731 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5732 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5733}
5734
5735
5736/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5737FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5738{
5739 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5740 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5741}
5742
5743
5744/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5745FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5746{
5747 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5748 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5749}
5750
5751
5752/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5753FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5754{
5755 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5756 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5757}
5758
5759
5760/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5761FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5762{
5763 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5764 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5765}
5766
5767
5768/* Opcode 0xf2 0x0f 0x5b - invalid */
5769
5770
5771/** Opcode 0x0f 0x5c - subps Vps, Wps */
5772FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5773{
5774 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5775 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5776}
5777
5778
5779/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5780FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5781{
5782 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5783 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5784}
5785
5786
5787/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5788FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5789{
5790 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5791 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5792}
5793
5794
5795/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5796FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5797{
5798 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5799 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5800}
5801
5802
5803/** Opcode 0x0f 0x5d - minps Vps, Wps */
5804FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5805{
5806 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5807 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5808}
5809
5810
5811/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5812FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5813{
5814 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5815 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5816}
5817
5818
5819/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5820FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5821{
5822 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5823 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5824}
5825
5826
5827/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5828FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5829{
5830 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5831 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5832}
5833
5834
5835/** Opcode 0x0f 0x5e - divps Vps, Wps */
5836FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5837{
5838 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5839 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5840}
5841
5842
5843/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5844FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5845{
5846 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5847 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5848}
5849
5850
5851/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5852FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5853{
5854 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5855 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5856}
5857
5858
5859/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5860FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5861{
5862 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5863 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5864}
5865
5866
5867/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5868FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5869{
5870 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5871 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5872}
5873
5874
5875/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5876FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5877{
5878 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5879 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5880}
5881
5882
5883/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5884FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5885{
5886 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5887 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5888}
5889
5890
5891/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5892FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5893{
5894 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5895 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5896}
5897
5898
5899/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5900FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5901{
5902 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5903 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5904}
5905
5906
5907/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5908FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5909{
5910 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5911 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5912}
5913
5914
5915/* Opcode 0xf3 0x0f 0x60 - invalid */
5916
5917
5918/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5919FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5920{
5921 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5922 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5923 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5924}
5925
5926
5927/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5928FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5929{
5930 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5931 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5932}
5933
5934
5935/* Opcode 0xf3 0x0f 0x61 - invalid */
5936
5937
5938/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5939FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5940{
5941 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5942 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5943}
5944
5945
5946/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5947FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5948{
5949 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5950 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5951}
5952
5953
5954/* Opcode 0xf3 0x0f 0x62 - invalid */
5955
5956
5957
5958/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5959FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5960{
5961 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5962 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5963}
5964
5965
5966/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5967FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5968{
5969 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5970 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5971}
5972
5973
5974/* Opcode 0xf3 0x0f 0x63 - invalid */
5975
5976
5977/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5978FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5979{
5980 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5981 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5982}
5983
5984
5985/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5986FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5987{
5988 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5989 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5990}
5991
5992
5993/* Opcode 0xf3 0x0f 0x64 - invalid */
5994
5995
5996/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5997FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5998{
5999 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6000 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6001}
6002
6003
6004/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6005FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6006{
6007 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6008 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6009}
6010
6011
6012/* Opcode 0xf3 0x0f 0x65 - invalid */
6013
6014
6015/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6016FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6017{
6018 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6020}
6021
6022
6023/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6024FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6025{
6026 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6028}
6029
6030
6031/* Opcode 0xf3 0x0f 0x66 - invalid */
6032
6033
6034/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6035FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6036{
6037 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6038 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6039}
6040
6041
6042/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6043FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6044{
6045 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6046 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6047}
6048
6049
6050/* Opcode 0xf3 0x0f 0x67 - invalid */
6051
6052
6053/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6054 * @note Intel and AMD both uses Qd for the second parameter, however they
6055 * both list it as a mmX/mem64 operand and intel describes it as being
6056 * loaded as a qword, so it should be Qq, shouldn't it? */
6057FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6058{
6059 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6060 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6061}
6062
6063
6064/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6065FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6066{
6067 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6068 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6069}
6070
6071
6072/* Opcode 0xf3 0x0f 0x68 - invalid */
6073
6074
6075/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6076 * @note Intel and AMD both uses Qd for the second parameter, however they
6077 * both list it as a mmX/mem64 operand and intel describes it as being
6078 * loaded as a qword, so it should be Qq, shouldn't it? */
6079FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6080{
6081 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6082 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6083}
6084
6085
6086/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6087FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6088{
6089 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6090 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6091
6092}
6093
6094
6095/* Opcode 0xf3 0x0f 0x69 - invalid */
6096
6097
6098/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6099 * @note Intel and AMD both uses Qd for the second parameter, however they
6100 * both list it as a mmX/mem64 operand and intel describes it as being
6101 * loaded as a qword, so it should be Qq, shouldn't it? */
6102FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6103{
6104 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6105 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6106}
6107
6108
6109/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6110FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6111{
6112 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6113 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6114}
6115
6116
6117/* Opcode 0xf3 0x0f 0x6a - invalid */
6118
6119
6120/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6121FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6122{
6123 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6124 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6125}
6126
6127
6128/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6129FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6130{
6131 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6132 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6133}
6134
6135
6136/* Opcode 0xf3 0x0f 0x6b - invalid */
6137
6138
6139/* Opcode 0x0f 0x6c - invalid */
6140
6141
6142/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6143FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6144{
6145 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6146 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6147}
6148
6149
6150/* Opcode 0xf3 0x0f 0x6c - invalid */
6151/* Opcode 0xf2 0x0f 0x6c - invalid */
6152
6153
6154/* Opcode 0x0f 0x6d - invalid */
6155
6156
6157/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6158FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6159{
6160 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6161 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6162}
6163
6164
6165/* Opcode 0xf3 0x0f 0x6d - invalid */
6166
6167
6168FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6169{
6170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6171 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6172 {
6173 /**
6174 * @opcode 0x6e
6175 * @opcodesub rex.w=1
6176 * @oppfx none
6177 * @opcpuid mmx
6178 * @opgroup og_mmx_datamove
6179 * @opxcpttype 5
6180 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6181 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6182 */
6183 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6184 if (IEM_IS_MODRM_REG_MODE(bRm))
6185 {
6186 /* MMX, greg64 */
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188 IEM_MC_BEGIN(0, 1);
6189 IEM_MC_LOCAL(uint64_t, u64Tmp);
6190
6191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6192 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6193 IEM_MC_FPU_TO_MMX_MODE();
6194
6195 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6196 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6197
6198 IEM_MC_ADVANCE_RIP();
6199 IEM_MC_END();
6200 }
6201 else
6202 {
6203 /* MMX, [mem64] */
6204 IEM_MC_BEGIN(0, 2);
6205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6206 IEM_MC_LOCAL(uint64_t, u64Tmp);
6207
6208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6210 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6211 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6212 IEM_MC_FPU_TO_MMX_MODE();
6213
6214 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6215 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6216
6217 IEM_MC_ADVANCE_RIP();
6218 IEM_MC_END();
6219 }
6220 }
6221 else
6222 {
6223 /**
6224 * @opdone
6225 * @opcode 0x6e
6226 * @opcodesub rex.w=0
6227 * @oppfx none
6228 * @opcpuid mmx
6229 * @opgroup og_mmx_datamove
6230 * @opxcpttype 5
6231 * @opfunction iemOp_movd_q_Pd_Ey
6232 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6233 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6234 */
6235 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6236 if (IEM_IS_MODRM_REG_MODE(bRm))
6237 {
6238 /* MMX, greg */
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_BEGIN(0, 1);
6241 IEM_MC_LOCAL(uint64_t, u64Tmp);
6242
6243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6245 IEM_MC_FPU_TO_MMX_MODE();
6246
6247 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6248 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6249
6250 IEM_MC_ADVANCE_RIP();
6251 IEM_MC_END();
6252 }
6253 else
6254 {
6255 /* MMX, [mem] */
6256 IEM_MC_BEGIN(0, 2);
6257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6258 IEM_MC_LOCAL(uint32_t, u32Tmp);
6259
6260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6262 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6263 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6264 IEM_MC_FPU_TO_MMX_MODE();
6265
6266 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6267 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6268
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 }
6272 }
6273 return VINF_SUCCESS;
6274}
6275
6276FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6277{
6278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6279 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6280 {
6281 /**
6282 * @opcode 0x6e
6283 * @opcodesub rex.w=1
6284 * @oppfx 0x66
6285 * @opcpuid sse2
6286 * @opgroup og_sse2_simdint_datamove
6287 * @opxcpttype 5
6288 * @optest 64-bit / op1=1 op2=2 -> op1=2
6289 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6290 */
6291 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6292 if (IEM_IS_MODRM_REG_MODE(bRm))
6293 {
6294 /* XMM, greg64 */
6295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6296 IEM_MC_BEGIN(0, 1);
6297 IEM_MC_LOCAL(uint64_t, u64Tmp);
6298
6299 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6300 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6301
6302 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6303 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6304
6305 IEM_MC_ADVANCE_RIP();
6306 IEM_MC_END();
6307 }
6308 else
6309 {
6310 /* XMM, [mem64] */
6311 IEM_MC_BEGIN(0, 2);
6312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6313 IEM_MC_LOCAL(uint64_t, u64Tmp);
6314
6315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6317 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6319
6320 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6321 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6322
6323 IEM_MC_ADVANCE_RIP();
6324 IEM_MC_END();
6325 }
6326 }
6327 else
6328 {
6329 /**
6330 * @opdone
6331 * @opcode 0x6e
6332 * @opcodesub rex.w=0
6333 * @oppfx 0x66
6334 * @opcpuid sse2
6335 * @opgroup og_sse2_simdint_datamove
6336 * @opxcpttype 5
6337 * @opfunction iemOp_movd_q_Vy_Ey
6338 * @optest op1=1 op2=2 -> op1=2
6339 * @optest op1=0 op2=-42 -> op1=-42
6340 */
6341 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6342 if (IEM_IS_MODRM_REG_MODE(bRm))
6343 {
6344 /* XMM, greg32 */
6345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6346 IEM_MC_BEGIN(0, 1);
6347 IEM_MC_LOCAL(uint32_t, u32Tmp);
6348
6349 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6351
6352 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6353 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6354
6355 IEM_MC_ADVANCE_RIP();
6356 IEM_MC_END();
6357 }
6358 else
6359 {
6360 /* XMM, [mem32] */
6361 IEM_MC_BEGIN(0, 2);
6362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6363 IEM_MC_LOCAL(uint32_t, u32Tmp);
6364
6365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6367 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6369
6370 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6371 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6372
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 }
6376 }
6377 return VINF_SUCCESS;
6378}
6379
6380/* Opcode 0xf3 0x0f 0x6e - invalid */
6381
6382
6383/**
6384 * @opcode 0x6f
6385 * @oppfx none
6386 * @opcpuid mmx
6387 * @opgroup og_mmx_datamove
6388 * @opxcpttype 5
6389 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6390 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6391 */
6392FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6393{
6394 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6396 if (IEM_IS_MODRM_REG_MODE(bRm))
6397 {
6398 /*
6399 * Register, register.
6400 */
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_BEGIN(0, 1);
6403 IEM_MC_LOCAL(uint64_t, u64Tmp);
6404
6405 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6406 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6407 IEM_MC_FPU_TO_MMX_MODE();
6408
6409 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6410 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6411
6412 IEM_MC_ADVANCE_RIP();
6413 IEM_MC_END();
6414 }
6415 else
6416 {
6417 /*
6418 * Register, memory.
6419 */
6420 IEM_MC_BEGIN(0, 2);
6421 IEM_MC_LOCAL(uint64_t, u64Tmp);
6422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6423
6424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6428 IEM_MC_FPU_TO_MMX_MODE();
6429
6430 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6431 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6432
6433 IEM_MC_ADVANCE_RIP();
6434 IEM_MC_END();
6435 }
6436 return VINF_SUCCESS;
6437}
6438
6439/**
6440 * @opcode 0x6f
6441 * @oppfx 0x66
6442 * @opcpuid sse2
6443 * @opgroup og_sse2_simdint_datamove
6444 * @opxcpttype 1
6445 * @optest op1=1 op2=2 -> op1=2
6446 * @optest op1=0 op2=-42 -> op1=-42
6447 */
6448FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6449{
6450 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6452 if (IEM_IS_MODRM_REG_MODE(bRm))
6453 {
6454 /*
6455 * Register, register.
6456 */
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_BEGIN(0, 0);
6459
6460 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6461 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6462
6463 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6464 IEM_GET_MODRM_RM(pVCpu, bRm));
6465 IEM_MC_ADVANCE_RIP();
6466 IEM_MC_END();
6467 }
6468 else
6469 {
6470 /*
6471 * Register, memory.
6472 */
6473 IEM_MC_BEGIN(0, 2);
6474 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6476
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6481
6482 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6483 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6484
6485 IEM_MC_ADVANCE_RIP();
6486 IEM_MC_END();
6487 }
6488 return VINF_SUCCESS;
6489}
6490
6491/**
6492 * @opcode 0x6f
6493 * @oppfx 0xf3
6494 * @opcpuid sse2
6495 * @opgroup og_sse2_simdint_datamove
6496 * @opxcpttype 4UA
6497 * @optest op1=1 op2=2 -> op1=2
6498 * @optest op1=0 op2=-42 -> op1=-42
6499 */
6500FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6501{
6502 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6504 if (IEM_IS_MODRM_REG_MODE(bRm))
6505 {
6506 /*
6507 * Register, register.
6508 */
6509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6510 IEM_MC_BEGIN(0, 0);
6511 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6513 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6514 IEM_GET_MODRM_RM(pVCpu, bRm));
6515 IEM_MC_ADVANCE_RIP();
6516 IEM_MC_END();
6517 }
6518 else
6519 {
6520 /*
6521 * Register, memory.
6522 */
6523 IEM_MC_BEGIN(0, 2);
6524 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6526
6527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6531 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6532 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6533
6534 IEM_MC_ADVANCE_RIP();
6535 IEM_MC_END();
6536 }
6537 return VINF_SUCCESS;
6538}
6539
6540
6541/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6542FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6543{
6544 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6546 if (IEM_IS_MODRM_REG_MODE(bRm))
6547 {
6548 /*
6549 * Register, register.
6550 */
6551 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6553
6554 IEM_MC_BEGIN(3, 0);
6555 IEM_MC_ARG(uint64_t *, pDst, 0);
6556 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6557 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6558 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6559 IEM_MC_PREPARE_FPU_USAGE();
6560 IEM_MC_FPU_TO_MMX_MODE();
6561
6562 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6563 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6564 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6565 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6566
6567 IEM_MC_ADVANCE_RIP();
6568 IEM_MC_END();
6569 }
6570 else
6571 {
6572 /*
6573 * Register, memory.
6574 */
6575 IEM_MC_BEGIN(3, 2);
6576 IEM_MC_ARG(uint64_t *, pDst, 0);
6577 IEM_MC_LOCAL(uint64_t, uSrc);
6578 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6580
6581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6582 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6583 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6585 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6586 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6587
6588 IEM_MC_PREPARE_FPU_USAGE();
6589 IEM_MC_FPU_TO_MMX_MODE();
6590
6591 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6593 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6594
6595 IEM_MC_ADVANCE_RIP();
6596 IEM_MC_END();
6597 }
6598 return VINF_SUCCESS;
6599}
6600
6601
6602/**
6603 * Common worker for SSE2 instructions on the forms:
6604 * pshufd xmm1, xmm2/mem128, imm8
6605 * pshufhw xmm1, xmm2/mem128, imm8
6606 * pshuflw xmm1, xmm2/mem128, imm8
6607 *
6608 * Proper alignment of the 128-bit operand is enforced.
6609 * Exceptions type 4. SSE2 cpuid checks.
6610 */
6611FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6612{
6613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6614 if (IEM_IS_MODRM_REG_MODE(bRm))
6615 {
6616 /*
6617 * Register, register.
6618 */
6619 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6621
6622 IEM_MC_BEGIN(3, 0);
6623 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6624 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6625 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6627 IEM_MC_PREPARE_SSE_USAGE();
6628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6629 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6630 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6631 IEM_MC_ADVANCE_RIP();
6632 IEM_MC_END();
6633 }
6634 else
6635 {
6636 /*
6637 * Register, memory.
6638 */
6639 IEM_MC_BEGIN(3, 2);
6640 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6641 IEM_MC_LOCAL(RTUINT128U, uSrc);
6642 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6644
6645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6646 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6647 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6650
6651 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6652 IEM_MC_PREPARE_SSE_USAGE();
6653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6654 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6655
6656 IEM_MC_ADVANCE_RIP();
6657 IEM_MC_END();
6658 }
6659 return VINF_SUCCESS;
6660}
6661
6662
6663/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6664FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6665{
6666 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6667 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6668}
6669
6670
6671/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6672FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6673{
6674 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6675 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6676}
6677
6678
6679/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6680FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6681{
6682 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6683 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6684}
6685
6686
6687/**
6688 * Common worker for MMX instructions of the form:
6689 * psrlw mm, imm8
6690 * psraw mm, imm8
6691 * psllw mm, imm8
6692 * psrld mm, imm8
6693 * psrad mm, imm8
6694 * pslld mm, imm8
6695 * psrlq mm, imm8
6696 * psllq mm, imm8
6697 *
6698 */
6699FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6700{
6701 if (IEM_IS_MODRM_REG_MODE(bRm))
6702 {
6703 /*
6704 * Register, immediate.
6705 */
6706 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6708
6709 IEM_MC_BEGIN(2, 0);
6710 IEM_MC_ARG(uint64_t *, pDst, 0);
6711 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6712 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6713 IEM_MC_PREPARE_FPU_USAGE();
6714 IEM_MC_FPU_TO_MMX_MODE();
6715
6716 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6718 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6719
6720 IEM_MC_ADVANCE_RIP();
6721 IEM_MC_END();
6722 }
6723 else
6724 {
6725 /*
6726 * Register, memory not supported.
6727 */
6728 /// @todo Caller already enforced register mode?!
6729 }
6730 return VINF_SUCCESS;
6731}
6732
6733
6734/**
6735 * Common worker for SSE2 instructions of the form:
6736 * psrlw xmm, imm8
6737 * psraw xmm, imm8
6738 * psllw xmm, imm8
6739 * psrld xmm, imm8
6740 * psrad xmm, imm8
6741 * pslld xmm, imm8
6742 * psrlq xmm, imm8
6743 * psllq xmm, imm8
6744 *
6745 */
6746FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6747{
6748 if (IEM_IS_MODRM_REG_MODE(bRm))
6749 {
6750 /*
6751 * Register, immediate.
6752 */
6753 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6755
6756 IEM_MC_BEGIN(2, 0);
6757 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6758 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6760 IEM_MC_PREPARE_SSE_USAGE();
6761 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6762 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6763 IEM_MC_ADVANCE_RIP();
6764 IEM_MC_END();
6765 }
6766 else
6767 {
6768 /*
6769 * Register, memory.
6770 */
6771 /// @todo Caller already enforced register mode?!
6772 }
6773 return VINF_SUCCESS;
6774}
6775
6776
6777/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6778FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6779{
6780// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6781 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6782}
6783
6784
6785/** Opcode 0x66 0x0f 0x71 11/2. */
6786FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6787{
6788// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6789 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6790}
6791
6792
6793/** Opcode 0x0f 0x71 11/4. */
6794FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6795{
6796// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6797 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6798}
6799
6800
6801/** Opcode 0x66 0x0f 0x71 11/4. */
6802FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6803{
6804// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6805 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6806}
6807
6808
6809/** Opcode 0x0f 0x71 11/6. */
6810FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6811{
6812// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6813 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6814}
6815
6816
6817/** Opcode 0x66 0x0f 0x71 11/6. */
6818FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6819{
6820// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6821 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6822}
6823
6824
6825/**
6826 * Group 12 jump table for register variant.
6827 */
6828IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6829{
6830 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6831 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6832 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6833 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6834 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6835 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6836 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6837 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6838};
6839AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6840
6841
6842/** Opcode 0x0f 0x71. */
6843FNIEMOP_DEF(iemOp_Grp12)
6844{
6845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6846 if (IEM_IS_MODRM_REG_MODE(bRm))
6847 /* register, register */
6848 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6849 + pVCpu->iem.s.idxPrefix], bRm);
6850 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6851}
6852
6853
6854/** Opcode 0x0f 0x72 11/2. */
6855FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6856{
6857// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6858 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6859}
6860
6861
6862/** Opcode 0x66 0x0f 0x72 11/2. */
6863FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6864{
6865// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6866 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6867}
6868
6869
6870/** Opcode 0x0f 0x72 11/4. */
6871FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6872{
6873// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6874 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6875}
6876
6877
6878/** Opcode 0x66 0x0f 0x72 11/4. */
6879FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6880{
6881// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6882 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6883}
6884
6885
6886/** Opcode 0x0f 0x72 11/6. */
6887FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6888{
6889// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6890 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6891}
6892
6893/** Opcode 0x66 0x0f 0x72 11/6. */
6894FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6895{
6896// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6897 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6898}
6899
6900
6901/**
6902 * Group 13 jump table for register variant.
6903 */
6904IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6905{
6906 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6907 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6908 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6909 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6910 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6911 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6912 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6913 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6914};
6915AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6916
6917/** Opcode 0x0f 0x72. */
6918FNIEMOP_DEF(iemOp_Grp13)
6919{
6920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6921 if (IEM_IS_MODRM_REG_MODE(bRm))
6922 /* register, register */
6923 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6924 + pVCpu->iem.s.idxPrefix], bRm);
6925 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6926}
6927
6928
6929/** Opcode 0x0f 0x73 11/2. */
6930FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6931{
6932// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6933 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6934}
6935
6936
6937/** Opcode 0x66 0x0f 0x73 11/2. */
6938FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6939{
6940// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6941 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6942}
6943
6944
6945/** Opcode 0x66 0x0f 0x73 11/3. */
6946FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6947{
6948// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6949 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6950}
6951
6952
6953/** Opcode 0x0f 0x73 11/6. */
6954FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6955{
6956// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6957 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6958}
6959
6960
6961/** Opcode 0x66 0x0f 0x73 11/6. */
6962FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6963{
6964// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6965 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6966}
6967
6968
6969/** Opcode 0x66 0x0f 0x73 11/7. */
6970FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6971{
6972// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6973 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6974}
6975
6976/**
6977 * Group 14 jump table for register variant.
6978 */
6979IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6980{
6981 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6982 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6983 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6984 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6985 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6986 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6987 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6988 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6989};
6990AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6991
6992
6993/** Opcode 0x0f 0x73. */
6994FNIEMOP_DEF(iemOp_Grp14)
6995{
6996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6997 if (IEM_IS_MODRM_REG_MODE(bRm))
6998 /* register, register */
6999 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7000 + pVCpu->iem.s.idxPrefix], bRm);
7001 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7002}
7003
7004
7005/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7006FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7007{
7008 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7009 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7010}
7011
7012
7013/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7014FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7015{
7016 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7017 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7018}
7019
7020
7021/* Opcode 0xf3 0x0f 0x74 - invalid */
7022/* Opcode 0xf2 0x0f 0x74 - invalid */
7023
7024
7025/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7026FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7027{
7028 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7029 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7030}
7031
7032
7033/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7034FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7035{
7036 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7037 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7038}
7039
7040
7041/* Opcode 0xf3 0x0f 0x75 - invalid */
7042/* Opcode 0xf2 0x0f 0x75 - invalid */
7043
7044
7045/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7046FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7047{
7048 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7049 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7050}
7051
7052
7053/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7054FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7055{
7056 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7057 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7058}
7059
7060
7061/* Opcode 0xf3 0x0f 0x76 - invalid */
7062/* Opcode 0xf2 0x0f 0x76 - invalid */
7063
7064
7065/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7066FNIEMOP_DEF(iemOp_emms)
7067{
7068 IEMOP_MNEMONIC(emms, "emms");
7069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7070
7071 IEM_MC_BEGIN(0,0);
7072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7074 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7075 IEM_MC_FPU_FROM_MMX_MODE();
7076 IEM_MC_ADVANCE_RIP();
7077 IEM_MC_END();
7078 return VINF_SUCCESS;
7079}
7080
7081/* Opcode 0x66 0x0f 0x77 - invalid */
7082/* Opcode 0xf3 0x0f 0x77 - invalid */
7083/* Opcode 0xf2 0x0f 0x77 - invalid */
7084
7085/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7086#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7087FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7088{
7089 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7090 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7091 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7092 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7093
7094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7095 if (IEM_IS_MODRM_REG_MODE(bRm))
7096 {
7097 /*
7098 * Register, register.
7099 */
7100 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7101 if (enmEffOpSize == IEMMODE_64BIT)
7102 {
7103 IEM_MC_BEGIN(2, 0);
7104 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7105 IEM_MC_ARG(uint64_t, u64Enc, 1);
7106 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7107 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7108 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7109 IEM_MC_END();
7110 }
7111 else
7112 {
7113 IEM_MC_BEGIN(2, 0);
7114 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7115 IEM_MC_ARG(uint32_t, u32Enc, 1);
7116 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7117 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7118 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7119 IEM_MC_END();
7120 }
7121 }
7122 else
7123 {
7124 /*
7125 * Memory, register.
7126 */
7127 if (enmEffOpSize == IEMMODE_64BIT)
7128 {
7129 IEM_MC_BEGIN(3, 0);
7130 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7131 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7132 IEM_MC_ARG(uint64_t, u64Enc, 2);
7133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7134 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7135 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7136 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7137 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7138 IEM_MC_END();
7139 }
7140 else
7141 {
7142 IEM_MC_BEGIN(3, 0);
7143 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7144 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7145 IEM_MC_ARG(uint32_t, u32Enc, 2);
7146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7147 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7148 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7149 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7150 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7151 IEM_MC_END();
7152 }
7153 }
7154 return VINF_SUCCESS;
7155}
7156#else
7157FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7158#endif
7159
7160/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7161FNIEMOP_STUB(iemOp_AmdGrp17);
7162/* Opcode 0xf3 0x0f 0x78 - invalid */
7163/* Opcode 0xf2 0x0f 0x78 - invalid */
7164
7165/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7166#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7167FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7168{
7169 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7170 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7171 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7172 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7173
7174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7175 if (IEM_IS_MODRM_REG_MODE(bRm))
7176 {
7177 /*
7178 * Register, register.
7179 */
7180 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7181 if (enmEffOpSize == IEMMODE_64BIT)
7182 {
7183 IEM_MC_BEGIN(2, 0);
7184 IEM_MC_ARG(uint64_t, u64Val, 0);
7185 IEM_MC_ARG(uint64_t, u64Enc, 1);
7186 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7187 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7188 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7189 IEM_MC_END();
7190 }
7191 else
7192 {
7193 IEM_MC_BEGIN(2, 0);
7194 IEM_MC_ARG(uint32_t, u32Val, 0);
7195 IEM_MC_ARG(uint32_t, u32Enc, 1);
7196 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7197 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7198 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7199 IEM_MC_END();
7200 }
7201 }
7202 else
7203 {
7204 /*
7205 * Register, memory.
7206 */
7207 if (enmEffOpSize == IEMMODE_64BIT)
7208 {
7209 IEM_MC_BEGIN(3, 0);
7210 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7211 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7212 IEM_MC_ARG(uint64_t, u64Enc, 2);
7213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7214 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7215 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7216 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7217 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7218 IEM_MC_END();
7219 }
7220 else
7221 {
7222 IEM_MC_BEGIN(3, 0);
7223 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7224 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7225 IEM_MC_ARG(uint32_t, u32Enc, 2);
7226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7227 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7228 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7229 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7230 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7231 IEM_MC_END();
7232 }
7233 }
7234 return VINF_SUCCESS;
7235}
7236#else
7237FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7238#endif
7239/* Opcode 0x66 0x0f 0x79 - invalid */
7240/* Opcode 0xf3 0x0f 0x79 - invalid */
7241/* Opcode 0xf2 0x0f 0x79 - invalid */
7242
7243/* Opcode 0x0f 0x7a - invalid */
7244/* Opcode 0x66 0x0f 0x7a - invalid */
7245/* Opcode 0xf3 0x0f 0x7a - invalid */
7246/* Opcode 0xf2 0x0f 0x7a - invalid */
7247
7248/* Opcode 0x0f 0x7b - invalid */
7249/* Opcode 0x66 0x0f 0x7b - invalid */
7250/* Opcode 0xf3 0x0f 0x7b - invalid */
7251/* Opcode 0xf2 0x0f 0x7b - invalid */
7252
7253/* Opcode 0x0f 0x7c - invalid */
7254
7255
7256/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7257FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7258{
7259 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7260 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7261}
7262
7263
7264/* Opcode 0xf3 0x0f 0x7c - invalid */
7265
7266
7267/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7268FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7269{
7270 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7271 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7272}
7273
7274
7275/* Opcode 0x0f 0x7d - invalid */
7276
7277
7278/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7279FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7280{
7281 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7282 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7283}
7284
7285
7286/* Opcode 0xf3 0x0f 0x7d - invalid */
7287
7288
7289/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7290FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7291{
7292 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7293 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7294}
7295
7296
7297/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7298FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7299{
7300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7302 {
7303 /**
7304 * @opcode 0x7e
7305 * @opcodesub rex.w=1
7306 * @oppfx none
7307 * @opcpuid mmx
7308 * @opgroup og_mmx_datamove
7309 * @opxcpttype 5
7310 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7311 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7312 */
7313 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7314 if (IEM_IS_MODRM_REG_MODE(bRm))
7315 {
7316 /* greg64, MMX */
7317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7318 IEM_MC_BEGIN(0, 1);
7319 IEM_MC_LOCAL(uint64_t, u64Tmp);
7320
7321 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7322 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7323 IEM_MC_FPU_TO_MMX_MODE();
7324
7325 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7326 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7327
7328 IEM_MC_ADVANCE_RIP();
7329 IEM_MC_END();
7330 }
7331 else
7332 {
7333 /* [mem64], MMX */
7334 IEM_MC_BEGIN(0, 2);
7335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7336 IEM_MC_LOCAL(uint64_t, u64Tmp);
7337
7338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7340 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7341 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7342 IEM_MC_FPU_TO_MMX_MODE();
7343
7344 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7345 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7346
7347 IEM_MC_ADVANCE_RIP();
7348 IEM_MC_END();
7349 }
7350 }
7351 else
7352 {
7353 /**
7354 * @opdone
7355 * @opcode 0x7e
7356 * @opcodesub rex.w=0
7357 * @oppfx none
7358 * @opcpuid mmx
7359 * @opgroup og_mmx_datamove
7360 * @opxcpttype 5
7361 * @opfunction iemOp_movd_q_Pd_Ey
7362 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7363 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7364 */
7365 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7366 if (IEM_IS_MODRM_REG_MODE(bRm))
7367 {
7368 /* greg32, MMX */
7369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7370 IEM_MC_BEGIN(0, 1);
7371 IEM_MC_LOCAL(uint32_t, u32Tmp);
7372
7373 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7374 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7375 IEM_MC_FPU_TO_MMX_MODE();
7376
7377 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7378 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7379
7380 IEM_MC_ADVANCE_RIP();
7381 IEM_MC_END();
7382 }
7383 else
7384 {
7385 /* [mem32], MMX */
7386 IEM_MC_BEGIN(0, 2);
7387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7388 IEM_MC_LOCAL(uint32_t, u32Tmp);
7389
7390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7392 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7393 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7394 IEM_MC_FPU_TO_MMX_MODE();
7395
7396 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7397 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7398
7399 IEM_MC_ADVANCE_RIP();
7400 IEM_MC_END();
7401 }
7402 }
7403 return VINF_SUCCESS;
7404
7405}
7406
7407
7408FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7409{
7410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7411 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7412 {
7413 /**
7414 * @opcode 0x7e
7415 * @opcodesub rex.w=1
7416 * @oppfx 0x66
7417 * @opcpuid sse2
7418 * @opgroup og_sse2_simdint_datamove
7419 * @opxcpttype 5
7420 * @optest 64-bit / op1=1 op2=2 -> op1=2
7421 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7422 */
7423 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7424 if (IEM_IS_MODRM_REG_MODE(bRm))
7425 {
7426 /* greg64, XMM */
7427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7428 IEM_MC_BEGIN(0, 1);
7429 IEM_MC_LOCAL(uint64_t, u64Tmp);
7430
7431 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7433
7434 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7435 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7436
7437 IEM_MC_ADVANCE_RIP();
7438 IEM_MC_END();
7439 }
7440 else
7441 {
7442 /* [mem64], XMM */
7443 IEM_MC_BEGIN(0, 2);
7444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7445 IEM_MC_LOCAL(uint64_t, u64Tmp);
7446
7447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7449 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7450 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7451
7452 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7453 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7454
7455 IEM_MC_ADVANCE_RIP();
7456 IEM_MC_END();
7457 }
7458 }
7459 else
7460 {
7461 /**
7462 * @opdone
7463 * @opcode 0x7e
7464 * @opcodesub rex.w=0
7465 * @oppfx 0x66
7466 * @opcpuid sse2
7467 * @opgroup og_sse2_simdint_datamove
7468 * @opxcpttype 5
7469 * @opfunction iemOp_movd_q_Vy_Ey
7470 * @optest op1=1 op2=2 -> op1=2
7471 * @optest op1=0 op2=-42 -> op1=-42
7472 */
7473 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7474 if (IEM_IS_MODRM_REG_MODE(bRm))
7475 {
7476 /* greg32, XMM */
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7478 IEM_MC_BEGIN(0, 1);
7479 IEM_MC_LOCAL(uint32_t, u32Tmp);
7480
7481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7483
7484 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7486
7487 IEM_MC_ADVANCE_RIP();
7488 IEM_MC_END();
7489 }
7490 else
7491 {
7492 /* [mem32], XMM */
7493 IEM_MC_BEGIN(0, 2);
7494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7495 IEM_MC_LOCAL(uint32_t, u32Tmp);
7496
7497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7499 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7501
7502 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7503 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7504
7505 IEM_MC_ADVANCE_RIP();
7506 IEM_MC_END();
7507 }
7508 }
7509 return VINF_SUCCESS;
7510
7511}
7512
7513/**
7514 * @opcode 0x7e
7515 * @oppfx 0xf3
7516 * @opcpuid sse2
7517 * @opgroup og_sse2_pcksclr_datamove
7518 * @opxcpttype none
7519 * @optest op1=1 op2=2 -> op1=2
7520 * @optest op1=0 op2=-42 -> op1=-42
7521 */
7522FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7523{
7524 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7526 if (IEM_IS_MODRM_REG_MODE(bRm))
7527 {
7528 /*
7529 * Register, register.
7530 */
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532 IEM_MC_BEGIN(0, 2);
7533 IEM_MC_LOCAL(uint64_t, uSrc);
7534
7535 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7536 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7537
7538 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7539 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7540
7541 IEM_MC_ADVANCE_RIP();
7542 IEM_MC_END();
7543 }
7544 else
7545 {
7546 /*
7547 * Memory, register.
7548 */
7549 IEM_MC_BEGIN(0, 2);
7550 IEM_MC_LOCAL(uint64_t, uSrc);
7551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7552
7553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7557
7558 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7559 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7560
7561 IEM_MC_ADVANCE_RIP();
7562 IEM_MC_END();
7563 }
7564 return VINF_SUCCESS;
7565}
7566
7567/* Opcode 0xf2 0x0f 0x7e - invalid */
7568
7569
7570/** Opcode 0x0f 0x7f - movq Qq, Pq */
7571FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7572{
7573 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7575 if (IEM_IS_MODRM_REG_MODE(bRm))
7576 {
7577 /*
7578 * Register, register.
7579 */
7580 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7581 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7583 IEM_MC_BEGIN(0, 1);
7584 IEM_MC_LOCAL(uint64_t, u64Tmp);
7585 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7586 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7587 IEM_MC_FPU_TO_MMX_MODE();
7588
7589 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7590 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7591
7592 IEM_MC_ADVANCE_RIP();
7593 IEM_MC_END();
7594 }
7595 else
7596 {
7597 /*
7598 * Memory, Register.
7599 */
7600 IEM_MC_BEGIN(0, 2);
7601 IEM_MC_LOCAL(uint64_t, u64Tmp);
7602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7603
7604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7606 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7607 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7608 IEM_MC_FPU_TO_MMX_MODE();
7609
7610 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7611 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7612
7613 IEM_MC_ADVANCE_RIP();
7614 IEM_MC_END();
7615 }
7616 return VINF_SUCCESS;
7617}
7618
7619/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7620FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7621{
7622 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7624 if (IEM_IS_MODRM_REG_MODE(bRm))
7625 {
7626 /*
7627 * Register, register.
7628 */
7629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7630 IEM_MC_BEGIN(0, 0);
7631 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7633 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7634 IEM_GET_MODRM_REG(pVCpu, bRm));
7635 IEM_MC_ADVANCE_RIP();
7636 IEM_MC_END();
7637 }
7638 else
7639 {
7640 /*
7641 * Register, memory.
7642 */
7643 IEM_MC_BEGIN(0, 2);
7644 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7646
7647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7649 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7651
7652 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7653 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7654
7655 IEM_MC_ADVANCE_RIP();
7656 IEM_MC_END();
7657 }
7658 return VINF_SUCCESS;
7659}
7660
7661/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7662FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7663{
7664 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7666 if (IEM_IS_MODRM_REG_MODE(bRm))
7667 {
7668 /*
7669 * Register, register.
7670 */
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7672 IEM_MC_BEGIN(0, 0);
7673 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7674 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7675 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7676 IEM_GET_MODRM_REG(pVCpu, bRm));
7677 IEM_MC_ADVANCE_RIP();
7678 IEM_MC_END();
7679 }
7680 else
7681 {
7682 /*
7683 * Register, memory.
7684 */
7685 IEM_MC_BEGIN(0, 2);
7686 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7688
7689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7692 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7693
7694 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7695 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7696
7697 IEM_MC_ADVANCE_RIP();
7698 IEM_MC_END();
7699 }
7700 return VINF_SUCCESS;
7701}
7702
7703/* Opcode 0xf2 0x0f 0x7f - invalid */
7704
7705
7706
7707/** Opcode 0x0f 0x80. */
7708FNIEMOP_DEF(iemOp_jo_Jv)
7709{
7710 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7711 IEMOP_HLP_MIN_386();
7712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7713 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7714 {
7715 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717
7718 IEM_MC_BEGIN(0, 0);
7719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7720 IEM_MC_REL_JMP_S16(i16Imm);
7721 } IEM_MC_ELSE() {
7722 IEM_MC_ADVANCE_RIP();
7723 } IEM_MC_ENDIF();
7724 IEM_MC_END();
7725 }
7726 else
7727 {
7728 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7730
7731 IEM_MC_BEGIN(0, 0);
7732 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7733 IEM_MC_REL_JMP_S32(i32Imm);
7734 } IEM_MC_ELSE() {
7735 IEM_MC_ADVANCE_RIP();
7736 } IEM_MC_ENDIF();
7737 IEM_MC_END();
7738 }
7739 return VINF_SUCCESS;
7740}
7741
7742
7743/** Opcode 0x0f 0x81. */
7744FNIEMOP_DEF(iemOp_jno_Jv)
7745{
7746 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7747 IEMOP_HLP_MIN_386();
7748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7749 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7750 {
7751 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753
7754 IEM_MC_BEGIN(0, 0);
7755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7756 IEM_MC_ADVANCE_RIP();
7757 } IEM_MC_ELSE() {
7758 IEM_MC_REL_JMP_S16(i16Imm);
7759 } IEM_MC_ENDIF();
7760 IEM_MC_END();
7761 }
7762 else
7763 {
7764 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7766
7767 IEM_MC_BEGIN(0, 0);
7768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7769 IEM_MC_ADVANCE_RIP();
7770 } IEM_MC_ELSE() {
7771 IEM_MC_REL_JMP_S32(i32Imm);
7772 } IEM_MC_ENDIF();
7773 IEM_MC_END();
7774 }
7775 return VINF_SUCCESS;
7776}
7777
7778
7779/** Opcode 0x0f 0x82. */
7780FNIEMOP_DEF(iemOp_jc_Jv)
7781{
7782 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7783 IEMOP_HLP_MIN_386();
7784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7785 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7786 {
7787 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789
7790 IEM_MC_BEGIN(0, 0);
7791 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7792 IEM_MC_REL_JMP_S16(i16Imm);
7793 } IEM_MC_ELSE() {
7794 IEM_MC_ADVANCE_RIP();
7795 } IEM_MC_ENDIF();
7796 IEM_MC_END();
7797 }
7798 else
7799 {
7800 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7802
7803 IEM_MC_BEGIN(0, 0);
7804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7805 IEM_MC_REL_JMP_S32(i32Imm);
7806 } IEM_MC_ELSE() {
7807 IEM_MC_ADVANCE_RIP();
7808 } IEM_MC_ENDIF();
7809 IEM_MC_END();
7810 }
7811 return VINF_SUCCESS;
7812}
7813
7814
7815/** Opcode 0x0f 0x83. */
7816FNIEMOP_DEF(iemOp_jnc_Jv)
7817{
7818 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7819 IEMOP_HLP_MIN_386();
7820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7821 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7822 {
7823 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825
7826 IEM_MC_BEGIN(0, 0);
7827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7828 IEM_MC_ADVANCE_RIP();
7829 } IEM_MC_ELSE() {
7830 IEM_MC_REL_JMP_S16(i16Imm);
7831 } IEM_MC_ENDIF();
7832 IEM_MC_END();
7833 }
7834 else
7835 {
7836 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838
7839 IEM_MC_BEGIN(0, 0);
7840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7841 IEM_MC_ADVANCE_RIP();
7842 } IEM_MC_ELSE() {
7843 IEM_MC_REL_JMP_S32(i32Imm);
7844 } IEM_MC_ENDIF();
7845 IEM_MC_END();
7846 }
7847 return VINF_SUCCESS;
7848}
7849
7850
7851/** Opcode 0x0f 0x84. */
7852FNIEMOP_DEF(iemOp_je_Jv)
7853{
7854 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7855 IEMOP_HLP_MIN_386();
7856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7857 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7858 {
7859 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7861
7862 IEM_MC_BEGIN(0, 0);
7863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7864 IEM_MC_REL_JMP_S16(i16Imm);
7865 } IEM_MC_ELSE() {
7866 IEM_MC_ADVANCE_RIP();
7867 } IEM_MC_ENDIF();
7868 IEM_MC_END();
7869 }
7870 else
7871 {
7872 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7874
7875 IEM_MC_BEGIN(0, 0);
7876 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7877 IEM_MC_REL_JMP_S32(i32Imm);
7878 } IEM_MC_ELSE() {
7879 IEM_MC_ADVANCE_RIP();
7880 } IEM_MC_ENDIF();
7881 IEM_MC_END();
7882 }
7883 return VINF_SUCCESS;
7884}
7885
7886
7887/** Opcode 0x0f 0x85. */
7888FNIEMOP_DEF(iemOp_jne_Jv)
7889{
7890 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7891 IEMOP_HLP_MIN_386();
7892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7893 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7894 {
7895 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897
7898 IEM_MC_BEGIN(0, 0);
7899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7900 IEM_MC_ADVANCE_RIP();
7901 } IEM_MC_ELSE() {
7902 IEM_MC_REL_JMP_S16(i16Imm);
7903 } IEM_MC_ENDIF();
7904 IEM_MC_END();
7905 }
7906 else
7907 {
7908 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910
7911 IEM_MC_BEGIN(0, 0);
7912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7913 IEM_MC_ADVANCE_RIP();
7914 } IEM_MC_ELSE() {
7915 IEM_MC_REL_JMP_S32(i32Imm);
7916 } IEM_MC_ENDIF();
7917 IEM_MC_END();
7918 }
7919 return VINF_SUCCESS;
7920}
7921
7922
7923/** Opcode 0x0f 0x86. */
7924FNIEMOP_DEF(iemOp_jbe_Jv)
7925{
7926 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7927 IEMOP_HLP_MIN_386();
7928 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7929 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7930 {
7931 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933
7934 IEM_MC_BEGIN(0, 0);
7935 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7936 IEM_MC_REL_JMP_S16(i16Imm);
7937 } IEM_MC_ELSE() {
7938 IEM_MC_ADVANCE_RIP();
7939 } IEM_MC_ENDIF();
7940 IEM_MC_END();
7941 }
7942 else
7943 {
7944 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946
7947 IEM_MC_BEGIN(0, 0);
7948 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7949 IEM_MC_REL_JMP_S32(i32Imm);
7950 } IEM_MC_ELSE() {
7951 IEM_MC_ADVANCE_RIP();
7952 } IEM_MC_ENDIF();
7953 IEM_MC_END();
7954 }
7955 return VINF_SUCCESS;
7956}
7957
7958
7959/** Opcode 0x0f 0x87. */
7960FNIEMOP_DEF(iemOp_jnbe_Jv)
7961{
7962 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7963 IEMOP_HLP_MIN_386();
7964 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7965 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7966 {
7967 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969
7970 IEM_MC_BEGIN(0, 0);
7971 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7972 IEM_MC_ADVANCE_RIP();
7973 } IEM_MC_ELSE() {
7974 IEM_MC_REL_JMP_S16(i16Imm);
7975 } IEM_MC_ENDIF();
7976 IEM_MC_END();
7977 }
7978 else
7979 {
7980 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7982
7983 IEM_MC_BEGIN(0, 0);
7984 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7985 IEM_MC_ADVANCE_RIP();
7986 } IEM_MC_ELSE() {
7987 IEM_MC_REL_JMP_S32(i32Imm);
7988 } IEM_MC_ENDIF();
7989 IEM_MC_END();
7990 }
7991 return VINF_SUCCESS;
7992}
7993
7994
7995/** Opcode 0x0f 0x88. */
7996FNIEMOP_DEF(iemOp_js_Jv)
7997{
7998 IEMOP_MNEMONIC(js_Jv, "js Jv");
7999 IEMOP_HLP_MIN_386();
8000 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8001 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8002 {
8003 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8005
8006 IEM_MC_BEGIN(0, 0);
8007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8008 IEM_MC_REL_JMP_S16(i16Imm);
8009 } IEM_MC_ELSE() {
8010 IEM_MC_ADVANCE_RIP();
8011 } IEM_MC_ENDIF();
8012 IEM_MC_END();
8013 }
8014 else
8015 {
8016 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018
8019 IEM_MC_BEGIN(0, 0);
8020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8021 IEM_MC_REL_JMP_S32(i32Imm);
8022 } IEM_MC_ELSE() {
8023 IEM_MC_ADVANCE_RIP();
8024 } IEM_MC_ENDIF();
8025 IEM_MC_END();
8026 }
8027 return VINF_SUCCESS;
8028}
8029
8030
8031/** Opcode 0x0f 0x89. */
8032FNIEMOP_DEF(iemOp_jns_Jv)
8033{
8034 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8035 IEMOP_HLP_MIN_386();
8036 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8037 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8038 {
8039 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041
8042 IEM_MC_BEGIN(0, 0);
8043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8044 IEM_MC_ADVANCE_RIP();
8045 } IEM_MC_ELSE() {
8046 IEM_MC_REL_JMP_S16(i16Imm);
8047 } IEM_MC_ENDIF();
8048 IEM_MC_END();
8049 }
8050 else
8051 {
8052 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054
8055 IEM_MC_BEGIN(0, 0);
8056 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8057 IEM_MC_ADVANCE_RIP();
8058 } IEM_MC_ELSE() {
8059 IEM_MC_REL_JMP_S32(i32Imm);
8060 } IEM_MC_ENDIF();
8061 IEM_MC_END();
8062 }
8063 return VINF_SUCCESS;
8064}
8065
8066
8067/** Opcode 0x0f 0x8a. */
8068FNIEMOP_DEF(iemOp_jp_Jv)
8069{
8070 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8071 IEMOP_HLP_MIN_386();
8072 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8073 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8074 {
8075 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077
8078 IEM_MC_BEGIN(0, 0);
8079 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8080 IEM_MC_REL_JMP_S16(i16Imm);
8081 } IEM_MC_ELSE() {
8082 IEM_MC_ADVANCE_RIP();
8083 } IEM_MC_ENDIF();
8084 IEM_MC_END();
8085 }
8086 else
8087 {
8088 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8090
8091 IEM_MC_BEGIN(0, 0);
8092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8093 IEM_MC_REL_JMP_S32(i32Imm);
8094 } IEM_MC_ELSE() {
8095 IEM_MC_ADVANCE_RIP();
8096 } IEM_MC_ENDIF();
8097 IEM_MC_END();
8098 }
8099 return VINF_SUCCESS;
8100}
8101
8102
8103/** Opcode 0x0f 0x8b. */
8104FNIEMOP_DEF(iemOp_jnp_Jv)
8105{
8106 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8107 IEMOP_HLP_MIN_386();
8108 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8109 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8110 {
8111 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8113
8114 IEM_MC_BEGIN(0, 0);
8115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8116 IEM_MC_ADVANCE_RIP();
8117 } IEM_MC_ELSE() {
8118 IEM_MC_REL_JMP_S16(i16Imm);
8119 } IEM_MC_ENDIF();
8120 IEM_MC_END();
8121 }
8122 else
8123 {
8124 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126
8127 IEM_MC_BEGIN(0, 0);
8128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8129 IEM_MC_ADVANCE_RIP();
8130 } IEM_MC_ELSE() {
8131 IEM_MC_REL_JMP_S32(i32Imm);
8132 } IEM_MC_ENDIF();
8133 IEM_MC_END();
8134 }
8135 return VINF_SUCCESS;
8136}
8137
8138
8139/** Opcode 0x0f 0x8c. */
8140FNIEMOP_DEF(iemOp_jl_Jv)
8141{
8142 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8143 IEMOP_HLP_MIN_386();
8144 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8145 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8146 {
8147 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8149
8150 IEM_MC_BEGIN(0, 0);
8151 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8152 IEM_MC_REL_JMP_S16(i16Imm);
8153 } IEM_MC_ELSE() {
8154 IEM_MC_ADVANCE_RIP();
8155 } IEM_MC_ENDIF();
8156 IEM_MC_END();
8157 }
8158 else
8159 {
8160 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8162
8163 IEM_MC_BEGIN(0, 0);
8164 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8165 IEM_MC_REL_JMP_S32(i32Imm);
8166 } IEM_MC_ELSE() {
8167 IEM_MC_ADVANCE_RIP();
8168 } IEM_MC_ENDIF();
8169 IEM_MC_END();
8170 }
8171 return VINF_SUCCESS;
8172}
8173
8174
8175/** Opcode 0x0f 0x8d. */
8176FNIEMOP_DEF(iemOp_jnl_Jv)
8177{
8178 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8179 IEMOP_HLP_MIN_386();
8180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8181 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8182 {
8183 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8185
8186 IEM_MC_BEGIN(0, 0);
8187 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8188 IEM_MC_ADVANCE_RIP();
8189 } IEM_MC_ELSE() {
8190 IEM_MC_REL_JMP_S16(i16Imm);
8191 } IEM_MC_ENDIF();
8192 IEM_MC_END();
8193 }
8194 else
8195 {
8196 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8198
8199 IEM_MC_BEGIN(0, 0);
8200 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8201 IEM_MC_ADVANCE_RIP();
8202 } IEM_MC_ELSE() {
8203 IEM_MC_REL_JMP_S32(i32Imm);
8204 } IEM_MC_ENDIF();
8205 IEM_MC_END();
8206 }
8207 return VINF_SUCCESS;
8208}
8209
8210
8211/** Opcode 0x0f 0x8e. */
8212FNIEMOP_DEF(iemOp_jle_Jv)
8213{
8214 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8215 IEMOP_HLP_MIN_386();
8216 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8217 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8218 {
8219 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221
8222 IEM_MC_BEGIN(0, 0);
8223 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8224 IEM_MC_REL_JMP_S16(i16Imm);
8225 } IEM_MC_ELSE() {
8226 IEM_MC_ADVANCE_RIP();
8227 } IEM_MC_ENDIF();
8228 IEM_MC_END();
8229 }
8230 else
8231 {
8232 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8234
8235 IEM_MC_BEGIN(0, 0);
8236 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8237 IEM_MC_REL_JMP_S32(i32Imm);
8238 } IEM_MC_ELSE() {
8239 IEM_MC_ADVANCE_RIP();
8240 } IEM_MC_ENDIF();
8241 IEM_MC_END();
8242 }
8243 return VINF_SUCCESS;
8244}
8245
8246
8247/** Opcode 0x0f 0x8f. */
8248FNIEMOP_DEF(iemOp_jnle_Jv)
8249{
8250 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8251 IEMOP_HLP_MIN_386();
8252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8253 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8254 {
8255 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8257
8258 IEM_MC_BEGIN(0, 0);
8259 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8260 IEM_MC_ADVANCE_RIP();
8261 } IEM_MC_ELSE() {
8262 IEM_MC_REL_JMP_S16(i16Imm);
8263 } IEM_MC_ENDIF();
8264 IEM_MC_END();
8265 }
8266 else
8267 {
8268 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8270
8271 IEM_MC_BEGIN(0, 0);
8272 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8273 IEM_MC_ADVANCE_RIP();
8274 } IEM_MC_ELSE() {
8275 IEM_MC_REL_JMP_S32(i32Imm);
8276 } IEM_MC_ENDIF();
8277 IEM_MC_END();
8278 }
8279 return VINF_SUCCESS;
8280}
8281
8282
8283/** Opcode 0x0f 0x90. */
8284FNIEMOP_DEF(iemOp_seto_Eb)
8285{
8286 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8287 IEMOP_HLP_MIN_386();
8288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8289
8290 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8291 * any way. AMD says it's "unused", whatever that means. We're
8292 * ignoring for now. */
8293 if (IEM_IS_MODRM_REG_MODE(bRm))
8294 {
8295 /* register target */
8296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8297 IEM_MC_BEGIN(0, 0);
8298 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8299 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8300 } IEM_MC_ELSE() {
8301 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8302 } IEM_MC_ENDIF();
8303 IEM_MC_ADVANCE_RIP();
8304 IEM_MC_END();
8305 }
8306 else
8307 {
8308 /* memory target */
8309 IEM_MC_BEGIN(0, 1);
8310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8314 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8315 } IEM_MC_ELSE() {
8316 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8317 } IEM_MC_ENDIF();
8318 IEM_MC_ADVANCE_RIP();
8319 IEM_MC_END();
8320 }
8321 return VINF_SUCCESS;
8322}
8323
8324
8325/** Opcode 0x0f 0x91. */
8326FNIEMOP_DEF(iemOp_setno_Eb)
8327{
8328 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8329 IEMOP_HLP_MIN_386();
8330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8331
8332 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8333 * any way. AMD says it's "unused", whatever that means. We're
8334 * ignoring for now. */
8335 if (IEM_IS_MODRM_REG_MODE(bRm))
8336 {
8337 /* register target */
8338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8339 IEM_MC_BEGIN(0, 0);
8340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8341 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8342 } IEM_MC_ELSE() {
8343 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8344 } IEM_MC_ENDIF();
8345 IEM_MC_ADVANCE_RIP();
8346 IEM_MC_END();
8347 }
8348 else
8349 {
8350 /* memory target */
8351 IEM_MC_BEGIN(0, 1);
8352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8355 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8356 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8357 } IEM_MC_ELSE() {
8358 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8359 } IEM_MC_ENDIF();
8360 IEM_MC_ADVANCE_RIP();
8361 IEM_MC_END();
8362 }
8363 return VINF_SUCCESS;
8364}
8365
8366
8367/** Opcode 0x0f 0x92. */
8368FNIEMOP_DEF(iemOp_setc_Eb)
8369{
8370 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8371 IEMOP_HLP_MIN_386();
8372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8373
8374 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8375 * any way. AMD says it's "unused", whatever that means. We're
8376 * ignoring for now. */
8377 if (IEM_IS_MODRM_REG_MODE(bRm))
8378 {
8379 /* register target */
8380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8381 IEM_MC_BEGIN(0, 0);
8382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8383 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8384 } IEM_MC_ELSE() {
8385 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8386 } IEM_MC_ENDIF();
8387 IEM_MC_ADVANCE_RIP();
8388 IEM_MC_END();
8389 }
8390 else
8391 {
8392 /* memory target */
8393 IEM_MC_BEGIN(0, 1);
8394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8398 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8399 } IEM_MC_ELSE() {
8400 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8401 } IEM_MC_ENDIF();
8402 IEM_MC_ADVANCE_RIP();
8403 IEM_MC_END();
8404 }
8405 return VINF_SUCCESS;
8406}
8407
8408
8409/** Opcode 0x0f 0x93. */
8410FNIEMOP_DEF(iemOp_setnc_Eb)
8411{
8412 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8413 IEMOP_HLP_MIN_386();
8414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8415
8416 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8417 * any way. AMD says it's "unused", whatever that means. We're
8418 * ignoring for now. */
8419 if (IEM_IS_MODRM_REG_MODE(bRm))
8420 {
8421 /* register target */
8422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8423 IEM_MC_BEGIN(0, 0);
8424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8425 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8426 } IEM_MC_ELSE() {
8427 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8428 } IEM_MC_ENDIF();
8429 IEM_MC_ADVANCE_RIP();
8430 IEM_MC_END();
8431 }
8432 else
8433 {
8434 /* memory target */
8435 IEM_MC_BEGIN(0, 1);
8436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8441 } IEM_MC_ELSE() {
8442 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8443 } IEM_MC_ENDIF();
8444 IEM_MC_ADVANCE_RIP();
8445 IEM_MC_END();
8446 }
8447 return VINF_SUCCESS;
8448}
8449
8450
8451/** Opcode 0x0f 0x94. */
8452FNIEMOP_DEF(iemOp_sete_Eb)
8453{
8454 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8455 IEMOP_HLP_MIN_386();
8456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8457
8458 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8459 * any way. AMD says it's "unused", whatever that means. We're
8460 * ignoring for now. */
8461 if (IEM_IS_MODRM_REG_MODE(bRm))
8462 {
8463 /* register target */
8464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8465 IEM_MC_BEGIN(0, 0);
8466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8467 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8468 } IEM_MC_ELSE() {
8469 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8470 } IEM_MC_ENDIF();
8471 IEM_MC_ADVANCE_RIP();
8472 IEM_MC_END();
8473 }
8474 else
8475 {
8476 /* memory target */
8477 IEM_MC_BEGIN(0, 1);
8478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8483 } IEM_MC_ELSE() {
8484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8485 } IEM_MC_ENDIF();
8486 IEM_MC_ADVANCE_RIP();
8487 IEM_MC_END();
8488 }
8489 return VINF_SUCCESS;
8490}
8491
8492
8493/** Opcode 0x0f 0x95. */
8494FNIEMOP_DEF(iemOp_setne_Eb)
8495{
8496 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8497 IEMOP_HLP_MIN_386();
8498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8499
8500 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8501 * any way. AMD says it's "unused", whatever that means. We're
8502 * ignoring for now. */
8503 if (IEM_IS_MODRM_REG_MODE(bRm))
8504 {
8505 /* register target */
8506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8507 IEM_MC_BEGIN(0, 0);
8508 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8509 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8510 } IEM_MC_ELSE() {
8511 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8512 } IEM_MC_ENDIF();
8513 IEM_MC_ADVANCE_RIP();
8514 IEM_MC_END();
8515 }
8516 else
8517 {
8518 /* memory target */
8519 IEM_MC_BEGIN(0, 1);
8520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8525 } IEM_MC_ELSE() {
8526 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8527 } IEM_MC_ENDIF();
8528 IEM_MC_ADVANCE_RIP();
8529 IEM_MC_END();
8530 }
8531 return VINF_SUCCESS;
8532}
8533
8534
8535/** Opcode 0x0f 0x96. */
8536FNIEMOP_DEF(iemOp_setbe_Eb)
8537{
8538 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8539 IEMOP_HLP_MIN_386();
8540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8541
8542 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8543 * any way. AMD says it's "unused", whatever that means. We're
8544 * ignoring for now. */
8545 if (IEM_IS_MODRM_REG_MODE(bRm))
8546 {
8547 /* register target */
8548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8549 IEM_MC_BEGIN(0, 0);
8550 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8551 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8552 } IEM_MC_ELSE() {
8553 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8554 } IEM_MC_ENDIF();
8555 IEM_MC_ADVANCE_RIP();
8556 IEM_MC_END();
8557 }
8558 else
8559 {
8560 /* memory target */
8561 IEM_MC_BEGIN(0, 1);
8562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8567 } IEM_MC_ELSE() {
8568 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8569 } IEM_MC_ENDIF();
8570 IEM_MC_ADVANCE_RIP();
8571 IEM_MC_END();
8572 }
8573 return VINF_SUCCESS;
8574}
8575
8576
8577/** Opcode 0x0f 0x97. */
8578FNIEMOP_DEF(iemOp_setnbe_Eb)
8579{
8580 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8581 IEMOP_HLP_MIN_386();
8582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8583
8584 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8585 * any way. AMD says it's "unused", whatever that means. We're
8586 * ignoring for now. */
8587 if (IEM_IS_MODRM_REG_MODE(bRm))
8588 {
8589 /* register target */
8590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8591 IEM_MC_BEGIN(0, 0);
8592 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8593 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8594 } IEM_MC_ELSE() {
8595 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8596 } IEM_MC_ENDIF();
8597 IEM_MC_ADVANCE_RIP();
8598 IEM_MC_END();
8599 }
8600 else
8601 {
8602 /* memory target */
8603 IEM_MC_BEGIN(0, 1);
8604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8607 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8609 } IEM_MC_ELSE() {
8610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8611 } IEM_MC_ENDIF();
8612 IEM_MC_ADVANCE_RIP();
8613 IEM_MC_END();
8614 }
8615 return VINF_SUCCESS;
8616}
8617
8618
8619/** Opcode 0x0f 0x98. */
8620FNIEMOP_DEF(iemOp_sets_Eb)
8621{
8622 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8623 IEMOP_HLP_MIN_386();
8624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8625
8626 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8627 * any way. AMD says it's "unused", whatever that means. We're
8628 * ignoring for now. */
8629 if (IEM_IS_MODRM_REG_MODE(bRm))
8630 {
8631 /* register target */
8632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8633 IEM_MC_BEGIN(0, 0);
8634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8635 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8636 } IEM_MC_ELSE() {
8637 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8638 } IEM_MC_ENDIF();
8639 IEM_MC_ADVANCE_RIP();
8640 IEM_MC_END();
8641 }
8642 else
8643 {
8644 /* memory target */
8645 IEM_MC_BEGIN(0, 1);
8646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8651 } IEM_MC_ELSE() {
8652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8653 } IEM_MC_ENDIF();
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 }
8657 return VINF_SUCCESS;
8658}
8659
8660
8661/** Opcode 0x0f 0x99. */
8662FNIEMOP_DEF(iemOp_setns_Eb)
8663{
8664 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8665 IEMOP_HLP_MIN_386();
8666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8667
8668 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8669 * any way. AMD says it's "unused", whatever that means. We're
8670 * ignoring for now. */
8671 if (IEM_IS_MODRM_REG_MODE(bRm))
8672 {
8673 /* register target */
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_BEGIN(0, 0);
8676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8677 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8678 } IEM_MC_ELSE() {
8679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8680 } IEM_MC_ENDIF();
8681 IEM_MC_ADVANCE_RIP();
8682 IEM_MC_END();
8683 }
8684 else
8685 {
8686 /* memory target */
8687 IEM_MC_BEGIN(0, 1);
8688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8693 } IEM_MC_ELSE() {
8694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8695 } IEM_MC_ENDIF();
8696 IEM_MC_ADVANCE_RIP();
8697 IEM_MC_END();
8698 }
8699 return VINF_SUCCESS;
8700}
8701
8702
8703/** Opcode 0x0f 0x9a. */
8704FNIEMOP_DEF(iemOp_setp_Eb)
8705{
8706 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8707 IEMOP_HLP_MIN_386();
8708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8709
8710 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8711 * any way. AMD says it's "unused", whatever that means. We're
8712 * ignoring for now. */
8713 if (IEM_IS_MODRM_REG_MODE(bRm))
8714 {
8715 /* register target */
8716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8717 IEM_MC_BEGIN(0, 0);
8718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8719 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8720 } IEM_MC_ELSE() {
8721 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8722 } IEM_MC_ENDIF();
8723 IEM_MC_ADVANCE_RIP();
8724 IEM_MC_END();
8725 }
8726 else
8727 {
8728 /* memory target */
8729 IEM_MC_BEGIN(0, 1);
8730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8734 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8735 } IEM_MC_ELSE() {
8736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8737 } IEM_MC_ENDIF();
8738 IEM_MC_ADVANCE_RIP();
8739 IEM_MC_END();
8740 }
8741 return VINF_SUCCESS;
8742}
8743
8744
8745/** Opcode 0x0f 0x9b. */
8746FNIEMOP_DEF(iemOp_setnp_Eb)
8747{
8748 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8749 IEMOP_HLP_MIN_386();
8750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8751
8752 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8753 * any way. AMD says it's "unused", whatever that means. We're
8754 * ignoring for now. */
8755 if (IEM_IS_MODRM_REG_MODE(bRm))
8756 {
8757 /* register target */
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759 IEM_MC_BEGIN(0, 0);
8760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8761 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8762 } IEM_MC_ELSE() {
8763 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8764 } IEM_MC_ENDIF();
8765 IEM_MC_ADVANCE_RIP();
8766 IEM_MC_END();
8767 }
8768 else
8769 {
8770 /* memory target */
8771 IEM_MC_BEGIN(0, 1);
8772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8777 } IEM_MC_ELSE() {
8778 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8779 } IEM_MC_ENDIF();
8780 IEM_MC_ADVANCE_RIP();
8781 IEM_MC_END();
8782 }
8783 return VINF_SUCCESS;
8784}
8785
8786
8787/** Opcode 0x0f 0x9c. */
8788FNIEMOP_DEF(iemOp_setl_Eb)
8789{
8790 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8791 IEMOP_HLP_MIN_386();
8792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8793
8794 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8795 * any way. AMD says it's "unused", whatever that means. We're
8796 * ignoring for now. */
8797 if (IEM_IS_MODRM_REG_MODE(bRm))
8798 {
8799 /* register target */
8800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8801 IEM_MC_BEGIN(0, 0);
8802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8803 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8804 } IEM_MC_ELSE() {
8805 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8806 } IEM_MC_ENDIF();
8807 IEM_MC_ADVANCE_RIP();
8808 IEM_MC_END();
8809 }
8810 else
8811 {
8812 /* memory target */
8813 IEM_MC_BEGIN(0, 1);
8814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8817 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8818 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8819 } IEM_MC_ELSE() {
8820 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8821 } IEM_MC_ENDIF();
8822 IEM_MC_ADVANCE_RIP();
8823 IEM_MC_END();
8824 }
8825 return VINF_SUCCESS;
8826}
8827
8828
8829/** Opcode 0x0f 0x9d. */
8830FNIEMOP_DEF(iemOp_setnl_Eb)
8831{
8832 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8833 IEMOP_HLP_MIN_386();
8834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8835
8836 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8837 * any way. AMD says it's "unused", whatever that means. We're
8838 * ignoring for now. */
8839 if (IEM_IS_MODRM_REG_MODE(bRm))
8840 {
8841 /* register target */
8842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8843 IEM_MC_BEGIN(0, 0);
8844 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8845 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8846 } IEM_MC_ELSE() {
8847 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8848 } IEM_MC_ENDIF();
8849 IEM_MC_ADVANCE_RIP();
8850 IEM_MC_END();
8851 }
8852 else
8853 {
8854 /* memory target */
8855 IEM_MC_BEGIN(0, 1);
8856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8859 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8860 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8861 } IEM_MC_ELSE() {
8862 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8863 } IEM_MC_ENDIF();
8864 IEM_MC_ADVANCE_RIP();
8865 IEM_MC_END();
8866 }
8867 return VINF_SUCCESS;
8868}
8869
8870
8871/** Opcode 0x0f 0x9e. */
8872FNIEMOP_DEF(iemOp_setle_Eb)
8873{
8874 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8875 IEMOP_HLP_MIN_386();
8876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8877
8878 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8879 * any way. AMD says it's "unused", whatever that means. We're
8880 * ignoring for now. */
8881 if (IEM_IS_MODRM_REG_MODE(bRm))
8882 {
8883 /* register target */
8884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8885 IEM_MC_BEGIN(0, 0);
8886 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8887 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8888 } IEM_MC_ELSE() {
8889 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8890 } IEM_MC_ENDIF();
8891 IEM_MC_ADVANCE_RIP();
8892 IEM_MC_END();
8893 }
8894 else
8895 {
8896 /* memory target */
8897 IEM_MC_BEGIN(0, 1);
8898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8902 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8903 } IEM_MC_ELSE() {
8904 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8905 } IEM_MC_ENDIF();
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 }
8909 return VINF_SUCCESS;
8910}
8911
8912
8913/** Opcode 0x0f 0x9f. */
8914FNIEMOP_DEF(iemOp_setnle_Eb)
8915{
8916 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8917 IEMOP_HLP_MIN_386();
8918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8919
8920 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8921 * any way. AMD says it's "unused", whatever that means. We're
8922 * ignoring for now. */
8923 if (IEM_IS_MODRM_REG_MODE(bRm))
8924 {
8925 /* register target */
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_BEGIN(0, 0);
8928 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8929 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8930 } IEM_MC_ELSE() {
8931 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8932 } IEM_MC_ENDIF();
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 }
8936 else
8937 {
8938 /* memory target */
8939 IEM_MC_BEGIN(0, 1);
8940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8943 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8944 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8945 } IEM_MC_ELSE() {
8946 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8947 } IEM_MC_ENDIF();
8948 IEM_MC_ADVANCE_RIP();
8949 IEM_MC_END();
8950 }
8951 return VINF_SUCCESS;
8952}
8953
8954
8955/**
8956 * Common 'push segment-register' helper.
8957 */
8958FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8959{
8960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8961 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8963
8964 switch (pVCpu->iem.s.enmEffOpSize)
8965 {
8966 case IEMMODE_16BIT:
8967 IEM_MC_BEGIN(0, 1);
8968 IEM_MC_LOCAL(uint16_t, u16Value);
8969 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8970 IEM_MC_PUSH_U16(u16Value);
8971 IEM_MC_ADVANCE_RIP();
8972 IEM_MC_END();
8973 break;
8974
8975 case IEMMODE_32BIT:
8976 IEM_MC_BEGIN(0, 1);
8977 IEM_MC_LOCAL(uint32_t, u32Value);
8978 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8979 IEM_MC_PUSH_U32_SREG(u32Value);
8980 IEM_MC_ADVANCE_RIP();
8981 IEM_MC_END();
8982 break;
8983
8984 case IEMMODE_64BIT:
8985 IEM_MC_BEGIN(0, 1);
8986 IEM_MC_LOCAL(uint64_t, u64Value);
8987 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8988 IEM_MC_PUSH_U64(u64Value);
8989 IEM_MC_ADVANCE_RIP();
8990 IEM_MC_END();
8991 break;
8992 }
8993
8994 return VINF_SUCCESS;
8995}
8996
8997
8998/** Opcode 0x0f 0xa0. */
8999FNIEMOP_DEF(iemOp_push_fs)
9000{
9001 IEMOP_MNEMONIC(push_fs, "push fs");
9002 IEMOP_HLP_MIN_386();
9003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9004 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
9005}
9006
9007
9008/** Opcode 0x0f 0xa1. */
9009FNIEMOP_DEF(iemOp_pop_fs)
9010{
9011 IEMOP_MNEMONIC(pop_fs, "pop fs");
9012 IEMOP_HLP_MIN_386();
9013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9014 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
9015}
9016
9017
9018/** Opcode 0x0f 0xa2. */
9019FNIEMOP_DEF(iemOp_cpuid)
9020{
9021 IEMOP_MNEMONIC(cpuid, "cpuid");
9022 IEMOP_HLP_MIN_486(); /* not all 486es. */
9023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
9025}
9026
9027
9028/**
9029 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9030 * iemOp_bts_Ev_Gv.
9031 */
9032FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
9033{
9034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9035 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9036
9037 if (IEM_IS_MODRM_REG_MODE(bRm))
9038 {
9039 /* register destination. */
9040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9041 switch (pVCpu->iem.s.enmEffOpSize)
9042 {
9043 case IEMMODE_16BIT:
9044 IEM_MC_BEGIN(3, 0);
9045 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9046 IEM_MC_ARG(uint16_t, u16Src, 1);
9047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9048
9049 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9050 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9051 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9052 IEM_MC_REF_EFLAGS(pEFlags);
9053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9054
9055 IEM_MC_ADVANCE_RIP();
9056 IEM_MC_END();
9057 return VINF_SUCCESS;
9058
9059 case IEMMODE_32BIT:
9060 IEM_MC_BEGIN(3, 0);
9061 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9062 IEM_MC_ARG(uint32_t, u32Src, 1);
9063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9064
9065 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9066 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9067 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9068 IEM_MC_REF_EFLAGS(pEFlags);
9069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9070
9071 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9072 IEM_MC_ADVANCE_RIP();
9073 IEM_MC_END();
9074 return VINF_SUCCESS;
9075
9076 case IEMMODE_64BIT:
9077 IEM_MC_BEGIN(3, 0);
9078 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9079 IEM_MC_ARG(uint64_t, u64Src, 1);
9080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9081
9082 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9083 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9084 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9085 IEM_MC_REF_EFLAGS(pEFlags);
9086 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9087
9088 IEM_MC_ADVANCE_RIP();
9089 IEM_MC_END();
9090 return VINF_SUCCESS;
9091
9092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9093 }
9094 }
9095 else
9096 {
9097 /* memory destination. */
9098
9099 uint32_t fAccess;
9100 if (pImpl->pfnLockedU16)
9101 fAccess = IEM_ACCESS_DATA_RW;
9102 else /* BT */
9103 fAccess = IEM_ACCESS_DATA_R;
9104
9105 /** @todo test negative bit offsets! */
9106 switch (pVCpu->iem.s.enmEffOpSize)
9107 {
9108 case IEMMODE_16BIT:
9109 IEM_MC_BEGIN(3, 2);
9110 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9111 IEM_MC_ARG(uint16_t, u16Src, 1);
9112 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9114 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9115
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9117 if (pImpl->pfnLockedU16)
9118 IEMOP_HLP_DONE_DECODING();
9119 else
9120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9121 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9122 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9123 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9124 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9125 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9126 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9127 IEM_MC_FETCH_EFLAGS(EFlags);
9128
9129 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9130 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9132 else
9133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9135
9136 IEM_MC_COMMIT_EFLAGS(EFlags);
9137 IEM_MC_ADVANCE_RIP();
9138 IEM_MC_END();
9139 return VINF_SUCCESS;
9140
9141 case IEMMODE_32BIT:
9142 IEM_MC_BEGIN(3, 2);
9143 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9144 IEM_MC_ARG(uint32_t, u32Src, 1);
9145 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9147 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9148
9149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9150 if (pImpl->pfnLockedU16)
9151 IEMOP_HLP_DONE_DECODING();
9152 else
9153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9154 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9155 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9156 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9157 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9158 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9159 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9160 IEM_MC_FETCH_EFLAGS(EFlags);
9161
9162 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9165 else
9166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9168
9169 IEM_MC_COMMIT_EFLAGS(EFlags);
9170 IEM_MC_ADVANCE_RIP();
9171 IEM_MC_END();
9172 return VINF_SUCCESS;
9173
9174 case IEMMODE_64BIT:
9175 IEM_MC_BEGIN(3, 2);
9176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9177 IEM_MC_ARG(uint64_t, u64Src, 1);
9178 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9180 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9181
9182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9183 if (pImpl->pfnLockedU16)
9184 IEMOP_HLP_DONE_DECODING();
9185 else
9186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9187 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9188 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9189 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9190 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9191 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9192 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9193 IEM_MC_FETCH_EFLAGS(EFlags);
9194
9195 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9196 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9198 else
9199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9201
9202 IEM_MC_COMMIT_EFLAGS(EFlags);
9203 IEM_MC_ADVANCE_RIP();
9204 IEM_MC_END();
9205 return VINF_SUCCESS;
9206
9207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9208 }
9209 }
9210}
9211
9212
9213/** Opcode 0x0f 0xa3. */
9214FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9215{
9216 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9217 IEMOP_HLP_MIN_386();
9218 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9219}
9220
9221
9222/**
9223 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9224 */
9225FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9226{
9227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9229
9230 if (IEM_IS_MODRM_REG_MODE(bRm))
9231 {
9232 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9234
9235 switch (pVCpu->iem.s.enmEffOpSize)
9236 {
9237 case IEMMODE_16BIT:
9238 IEM_MC_BEGIN(4, 0);
9239 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9240 IEM_MC_ARG(uint16_t, u16Src, 1);
9241 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9242 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9243
9244 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9245 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9246 IEM_MC_REF_EFLAGS(pEFlags);
9247 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9248
9249 IEM_MC_ADVANCE_RIP();
9250 IEM_MC_END();
9251 return VINF_SUCCESS;
9252
9253 case IEMMODE_32BIT:
9254 IEM_MC_BEGIN(4, 0);
9255 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9256 IEM_MC_ARG(uint32_t, u32Src, 1);
9257 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9258 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9259
9260 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9261 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9262 IEM_MC_REF_EFLAGS(pEFlags);
9263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9264
9265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9266 IEM_MC_ADVANCE_RIP();
9267 IEM_MC_END();
9268 return VINF_SUCCESS;
9269
9270 case IEMMODE_64BIT:
9271 IEM_MC_BEGIN(4, 0);
9272 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9273 IEM_MC_ARG(uint64_t, u64Src, 1);
9274 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9275 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9276
9277 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9278 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9279 IEM_MC_REF_EFLAGS(pEFlags);
9280 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9281
9282 IEM_MC_ADVANCE_RIP();
9283 IEM_MC_END();
9284 return VINF_SUCCESS;
9285
9286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9287 }
9288 }
9289 else
9290 {
9291 switch (pVCpu->iem.s.enmEffOpSize)
9292 {
9293 case IEMMODE_16BIT:
9294 IEM_MC_BEGIN(4, 2);
9295 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9296 IEM_MC_ARG(uint16_t, u16Src, 1);
9297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9300
9301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9302 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9303 IEM_MC_ASSIGN(cShiftArg, cShift);
9304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9305 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9306 IEM_MC_FETCH_EFLAGS(EFlags);
9307 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9309
9310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9311 IEM_MC_COMMIT_EFLAGS(EFlags);
9312 IEM_MC_ADVANCE_RIP();
9313 IEM_MC_END();
9314 return VINF_SUCCESS;
9315
9316 case IEMMODE_32BIT:
9317 IEM_MC_BEGIN(4, 2);
9318 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9319 IEM_MC_ARG(uint32_t, u32Src, 1);
9320 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9323
9324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9325 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9326 IEM_MC_ASSIGN(cShiftArg, cShift);
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9328 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9329 IEM_MC_FETCH_EFLAGS(EFlags);
9330 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9331 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9332
9333 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9334 IEM_MC_COMMIT_EFLAGS(EFlags);
9335 IEM_MC_ADVANCE_RIP();
9336 IEM_MC_END();
9337 return VINF_SUCCESS;
9338
9339 case IEMMODE_64BIT:
9340 IEM_MC_BEGIN(4, 2);
9341 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9342 IEM_MC_ARG(uint64_t, u64Src, 1);
9343 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9346
9347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9348 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9349 IEM_MC_ASSIGN(cShiftArg, cShift);
9350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9351 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9352 IEM_MC_FETCH_EFLAGS(EFlags);
9353 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9354 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9355
9356 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9357 IEM_MC_COMMIT_EFLAGS(EFlags);
9358 IEM_MC_ADVANCE_RIP();
9359 IEM_MC_END();
9360 return VINF_SUCCESS;
9361
9362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9363 }
9364 }
9365}
9366
9367
9368/**
9369 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9370 */
9371FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9372{
9373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9375
9376 if (IEM_IS_MODRM_REG_MODE(bRm))
9377 {
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9379
9380 switch (pVCpu->iem.s.enmEffOpSize)
9381 {
9382 case IEMMODE_16BIT:
9383 IEM_MC_BEGIN(4, 0);
9384 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9385 IEM_MC_ARG(uint16_t, u16Src, 1);
9386 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9387 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9388
9389 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9390 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9391 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9392 IEM_MC_REF_EFLAGS(pEFlags);
9393 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9394
9395 IEM_MC_ADVANCE_RIP();
9396 IEM_MC_END();
9397 return VINF_SUCCESS;
9398
9399 case IEMMODE_32BIT:
9400 IEM_MC_BEGIN(4, 0);
9401 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9402 IEM_MC_ARG(uint32_t, u32Src, 1);
9403 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9404 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9405
9406 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9407 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9408 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9409 IEM_MC_REF_EFLAGS(pEFlags);
9410 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9411
9412 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9413 IEM_MC_ADVANCE_RIP();
9414 IEM_MC_END();
9415 return VINF_SUCCESS;
9416
9417 case IEMMODE_64BIT:
9418 IEM_MC_BEGIN(4, 0);
9419 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9420 IEM_MC_ARG(uint64_t, u64Src, 1);
9421 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9422 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9423
9424 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9426 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9427 IEM_MC_REF_EFLAGS(pEFlags);
9428 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9429
9430 IEM_MC_ADVANCE_RIP();
9431 IEM_MC_END();
9432 return VINF_SUCCESS;
9433
9434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9435 }
9436 }
9437 else
9438 {
9439 switch (pVCpu->iem.s.enmEffOpSize)
9440 {
9441 case IEMMODE_16BIT:
9442 IEM_MC_BEGIN(4, 2);
9443 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9444 IEM_MC_ARG(uint16_t, u16Src, 1);
9445 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9448
9449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9451 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9452 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9453 IEM_MC_FETCH_EFLAGS(EFlags);
9454 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9456
9457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9458 IEM_MC_COMMIT_EFLAGS(EFlags);
9459 IEM_MC_ADVANCE_RIP();
9460 IEM_MC_END();
9461 return VINF_SUCCESS;
9462
9463 case IEMMODE_32BIT:
9464 IEM_MC_BEGIN(4, 2);
9465 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9466 IEM_MC_ARG(uint32_t, u32Src, 1);
9467 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9470
9471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9473 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9474 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9475 IEM_MC_FETCH_EFLAGS(EFlags);
9476 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9477 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9478
9479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9480 IEM_MC_COMMIT_EFLAGS(EFlags);
9481 IEM_MC_ADVANCE_RIP();
9482 IEM_MC_END();
9483 return VINF_SUCCESS;
9484
9485 case IEMMODE_64BIT:
9486 IEM_MC_BEGIN(4, 2);
9487 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9488 IEM_MC_ARG(uint64_t, u64Src, 1);
9489 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9490 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9492
9493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9495 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9496 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9497 IEM_MC_FETCH_EFLAGS(EFlags);
9498 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9499 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9500
9501 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9502 IEM_MC_COMMIT_EFLAGS(EFlags);
9503 IEM_MC_ADVANCE_RIP();
9504 IEM_MC_END();
9505 return VINF_SUCCESS;
9506
9507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9508 }
9509 }
9510}
9511
9512
9513
9514/** Opcode 0x0f 0xa4. */
9515FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9516{
9517 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9518 IEMOP_HLP_MIN_386();
9519 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9520}
9521
9522
9523/** Opcode 0x0f 0xa5. */
9524FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9525{
9526 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9527 IEMOP_HLP_MIN_386();
9528 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9529}
9530
9531
9532/** Opcode 0x0f 0xa8. */
9533FNIEMOP_DEF(iemOp_push_gs)
9534{
9535 IEMOP_MNEMONIC(push_gs, "push gs");
9536 IEMOP_HLP_MIN_386();
9537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9539}
9540
9541
9542/** Opcode 0x0f 0xa9. */
9543FNIEMOP_DEF(iemOp_pop_gs)
9544{
9545 IEMOP_MNEMONIC(pop_gs, "pop gs");
9546 IEMOP_HLP_MIN_386();
9547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9548 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9549}
9550
9551
9552/** Opcode 0x0f 0xaa. */
9553FNIEMOP_DEF(iemOp_rsm)
9554{
9555 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9556 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9558 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9559}
9560
9561
9562
9563/** Opcode 0x0f 0xab. */
9564FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9565{
9566 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9567 IEMOP_HLP_MIN_386();
9568 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9569}
9570
9571
9572/** Opcode 0x0f 0xac. */
9573FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9574{
9575 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9576 IEMOP_HLP_MIN_386();
9577 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9578}
9579
9580
9581/** Opcode 0x0f 0xad. */
9582FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9583{
9584 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9585 IEMOP_HLP_MIN_386();
9586 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9587}
9588
9589
9590/** Opcode 0x0f 0xae mem/0. */
9591FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9592{
9593 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9594 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9595 return IEMOP_RAISE_INVALID_OPCODE();
9596
9597 IEM_MC_BEGIN(3, 1);
9598 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9599 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9600 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9603 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9604 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9605 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9606 IEM_MC_END();
9607 return VINF_SUCCESS;
9608}
9609
9610
9611/** Opcode 0x0f 0xae mem/1. */
9612FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9613{
9614 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9615 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9616 return IEMOP_RAISE_INVALID_OPCODE();
9617
9618 IEM_MC_BEGIN(3, 1);
9619 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9620 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9621 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9625 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9626 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9627 IEM_MC_END();
9628 return VINF_SUCCESS;
9629}
9630
9631
9632/**
9633 * @opmaps grp15
9634 * @opcode !11/2
9635 * @oppfx none
9636 * @opcpuid sse
9637 * @opgroup og_sse_mxcsrsm
9638 * @opxcpttype 5
9639 * @optest op1=0 -> mxcsr=0
9640 * @optest op1=0x2083 -> mxcsr=0x2083
9641 * @optest op1=0xfffffffe -> value.xcpt=0xd
9642 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9643 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9644 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9645 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9646 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9647 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9648 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9649 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9650 */
9651FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9652{
9653 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9654 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9655 return IEMOP_RAISE_INVALID_OPCODE();
9656
9657 IEM_MC_BEGIN(2, 0);
9658 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9659 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9662 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9663 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9664 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9665 IEM_MC_END();
9666 return VINF_SUCCESS;
9667}
9668
9669
9670/**
9671 * @opmaps grp15
9672 * @opcode !11/3
9673 * @oppfx none
9674 * @opcpuid sse
9675 * @opgroup og_sse_mxcsrsm
9676 * @opxcpttype 5
9677 * @optest mxcsr=0 -> op1=0
9678 * @optest mxcsr=0x2083 -> op1=0x2083
9679 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9680 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9681 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9682 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9683 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9684 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9685 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9686 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9687 */
9688FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9689{
9690 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9691 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9692 return IEMOP_RAISE_INVALID_OPCODE();
9693
9694 IEM_MC_BEGIN(2, 0);
9695 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9696 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9700 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9701 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9702 IEM_MC_END();
9703 return VINF_SUCCESS;
9704}
9705
9706
9707/**
9708 * @opmaps grp15
9709 * @opcode !11/4
9710 * @oppfx none
9711 * @opcpuid xsave
9712 * @opgroup og_system
9713 * @opxcpttype none
9714 */
9715FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9716{
9717 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9718 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9719 return IEMOP_RAISE_INVALID_OPCODE();
9720
9721 IEM_MC_BEGIN(3, 0);
9722 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9723 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9724 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9727 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9728 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9729 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9730 IEM_MC_END();
9731 return VINF_SUCCESS;
9732}
9733
9734
9735/**
9736 * @opmaps grp15
9737 * @opcode !11/5
9738 * @oppfx none
9739 * @opcpuid xsave
9740 * @opgroup og_system
9741 * @opxcpttype none
9742 */
9743FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9744{
9745 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9746 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9747 return IEMOP_RAISE_INVALID_OPCODE();
9748
9749 IEM_MC_BEGIN(3, 0);
9750 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9751 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9752 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9755 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9756 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9757 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9758 IEM_MC_END();
9759 return VINF_SUCCESS;
9760}
9761
9762/** Opcode 0x0f 0xae mem/6. */
9763FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9764
9765/**
9766 * @opmaps grp15
9767 * @opcode !11/7
9768 * @oppfx none
9769 * @opcpuid clfsh
9770 * @opgroup og_cachectl
9771 * @optest op1=1 ->
9772 */
9773FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9774{
9775 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9776 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9777 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9778
9779 IEM_MC_BEGIN(2, 0);
9780 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9781 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9784 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9785 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9786 IEM_MC_END();
9787 return VINF_SUCCESS;
9788}
9789
9790/**
9791 * @opmaps grp15
9792 * @opcode !11/7
9793 * @oppfx 0x66
9794 * @opcpuid clflushopt
9795 * @opgroup og_cachectl
9796 * @optest op1=1 ->
9797 */
9798FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9799{
9800 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9801 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9802 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9803
9804 IEM_MC_BEGIN(2, 0);
9805 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9806 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9810 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9811 IEM_MC_END();
9812 return VINF_SUCCESS;
9813}
9814
9815
9816/** Opcode 0x0f 0xae 11b/5. */
9817FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9818{
9819 RT_NOREF_PV(bRm);
9820 IEMOP_MNEMONIC(lfence, "lfence");
9821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9822 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9823 return IEMOP_RAISE_INVALID_OPCODE();
9824
9825 IEM_MC_BEGIN(0, 0);
9826#ifndef RT_ARCH_ARM64
9827 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9828#endif
9829 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9830#ifndef RT_ARCH_ARM64
9831 else
9832 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9833#endif
9834 IEM_MC_ADVANCE_RIP();
9835 IEM_MC_END();
9836 return VINF_SUCCESS;
9837}
9838
9839
9840/** Opcode 0x0f 0xae 11b/6. */
9841FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9842{
9843 RT_NOREF_PV(bRm);
9844 IEMOP_MNEMONIC(mfence, "mfence");
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9847 return IEMOP_RAISE_INVALID_OPCODE();
9848
9849 IEM_MC_BEGIN(0, 0);
9850#ifndef RT_ARCH_ARM64
9851 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9852#endif
9853 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9854#ifndef RT_ARCH_ARM64
9855 else
9856 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9857#endif
9858 IEM_MC_ADVANCE_RIP();
9859 IEM_MC_END();
9860 return VINF_SUCCESS;
9861}
9862
9863
9864/** Opcode 0x0f 0xae 11b/7. */
9865FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9866{
9867 RT_NOREF_PV(bRm);
9868 IEMOP_MNEMONIC(sfence, "sfence");
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9870 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9871 return IEMOP_RAISE_INVALID_OPCODE();
9872
9873 IEM_MC_BEGIN(0, 0);
9874#ifndef RT_ARCH_ARM64
9875 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9876#endif
9877 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9878#ifndef RT_ARCH_ARM64
9879 else
9880 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9881#endif
9882 IEM_MC_ADVANCE_RIP();
9883 IEM_MC_END();
9884 return VINF_SUCCESS;
9885}
9886
9887
9888/** Opcode 0xf3 0x0f 0xae 11b/0. */
9889FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9890{
9891 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9893 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9894 {
9895 IEM_MC_BEGIN(1, 0);
9896 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9897 IEM_MC_ARG(uint64_t, u64Dst, 0);
9898 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9899 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9900 IEM_MC_ADVANCE_RIP();
9901 IEM_MC_END();
9902 }
9903 else
9904 {
9905 IEM_MC_BEGIN(1, 0);
9906 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9907 IEM_MC_ARG(uint32_t, u32Dst, 0);
9908 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9909 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9910 IEM_MC_ADVANCE_RIP();
9911 IEM_MC_END();
9912 }
9913 return VINF_SUCCESS;
9914}
9915
9916
9917/** Opcode 0xf3 0x0f 0xae 11b/1. */
9918FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9919{
9920 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9922 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9923 {
9924 IEM_MC_BEGIN(1, 0);
9925 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9926 IEM_MC_ARG(uint64_t, u64Dst, 0);
9927 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9928 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9929 IEM_MC_ADVANCE_RIP();
9930 IEM_MC_END();
9931 }
9932 else
9933 {
9934 IEM_MC_BEGIN(1, 0);
9935 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9936 IEM_MC_ARG(uint32_t, u32Dst, 0);
9937 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9938 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9939 IEM_MC_ADVANCE_RIP();
9940 IEM_MC_END();
9941 }
9942 return VINF_SUCCESS;
9943}
9944
9945
9946/** Opcode 0xf3 0x0f 0xae 11b/2. */
9947FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9948{
9949 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9952 {
9953 IEM_MC_BEGIN(1, 0);
9954 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9955 IEM_MC_ARG(uint64_t, u64Dst, 0);
9956 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9957 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9958 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9959 IEM_MC_ADVANCE_RIP();
9960 IEM_MC_END();
9961 }
9962 else
9963 {
9964 IEM_MC_BEGIN(1, 0);
9965 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9966 IEM_MC_ARG(uint32_t, u32Dst, 0);
9967 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9968 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9969 IEM_MC_ADVANCE_RIP();
9970 IEM_MC_END();
9971 }
9972 return VINF_SUCCESS;
9973}
9974
9975
9976/** Opcode 0xf3 0x0f 0xae 11b/3. */
9977FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9978{
9979 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9982 {
9983 IEM_MC_BEGIN(1, 0);
9984 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9985 IEM_MC_ARG(uint64_t, u64Dst, 0);
9986 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9987 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9988 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9989 IEM_MC_ADVANCE_RIP();
9990 IEM_MC_END();
9991 }
9992 else
9993 {
9994 IEM_MC_BEGIN(1, 0);
9995 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9996 IEM_MC_ARG(uint32_t, u32Dst, 0);
9997 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9998 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9999 IEM_MC_ADVANCE_RIP();
10000 IEM_MC_END();
10001 }
10002 return VINF_SUCCESS;
10003}
10004
10005
10006/**
10007 * Group 15 jump table for register variant.
10008 */
10009IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10010{ /* pfx: none, 066h, 0f3h, 0f2h */
10011 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10012 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10013 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10014 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10015 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10016 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10017 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10018 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10019};
10020AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10021
10022
10023/**
10024 * Group 15 jump table for memory variant.
10025 */
10026IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10027{ /* pfx: none, 066h, 0f3h, 0f2h */
10028 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10029 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10030 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10031 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10032 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10033 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10034 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10035 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10036};
10037AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10038
10039
10040/** Opcode 0x0f 0xae. */
10041FNIEMOP_DEF(iemOp_Grp15)
10042{
10043 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10045 if (IEM_IS_MODRM_REG_MODE(bRm))
10046 /* register, register */
10047 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10048 + pVCpu->iem.s.idxPrefix], bRm);
10049 /* memory, register */
10050 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10051 + pVCpu->iem.s.idxPrefix], bRm);
10052}
10053
10054
10055/** Opcode 0x0f 0xaf. */
10056FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10057{
10058 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10059 IEMOP_HLP_MIN_386();
10060 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10061 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10062}
10063
10064
10065/** Opcode 0x0f 0xb0. */
10066FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10067{
10068 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10069 IEMOP_HLP_MIN_486();
10070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10071
10072 if (IEM_IS_MODRM_REG_MODE(bRm))
10073 {
10074 IEMOP_HLP_DONE_DECODING();
10075 IEM_MC_BEGIN(4, 0);
10076 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10077 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10078 IEM_MC_ARG(uint8_t, u8Src, 2);
10079 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10080
10081 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10082 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10083 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10084 IEM_MC_REF_EFLAGS(pEFlags);
10085 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10087 else
10088 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10089
10090 IEM_MC_ADVANCE_RIP();
10091 IEM_MC_END();
10092 }
10093 else
10094 {
10095 IEM_MC_BEGIN(4, 3);
10096 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10097 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10098 IEM_MC_ARG(uint8_t, u8Src, 2);
10099 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10101 IEM_MC_LOCAL(uint8_t, u8Al);
10102
10103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10104 IEMOP_HLP_DONE_DECODING();
10105 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10107 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10108 IEM_MC_FETCH_EFLAGS(EFlags);
10109 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10111 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10112 else
10113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10114
10115 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10116 IEM_MC_COMMIT_EFLAGS(EFlags);
10117 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10118 IEM_MC_ADVANCE_RIP();
10119 IEM_MC_END();
10120 }
10121 return VINF_SUCCESS;
10122}
10123
10124/** Opcode 0x0f 0xb1. */
10125FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10126{
10127 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10128 IEMOP_HLP_MIN_486();
10129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10130
10131 if (IEM_IS_MODRM_REG_MODE(bRm))
10132 {
10133 IEMOP_HLP_DONE_DECODING();
10134 switch (pVCpu->iem.s.enmEffOpSize)
10135 {
10136 case IEMMODE_16BIT:
10137 IEM_MC_BEGIN(4, 0);
10138 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10139 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10140 IEM_MC_ARG(uint16_t, u16Src, 2);
10141 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10142
10143 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10144 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10145 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10146 IEM_MC_REF_EFLAGS(pEFlags);
10147 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10148 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10149 else
10150 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10151
10152 IEM_MC_ADVANCE_RIP();
10153 IEM_MC_END();
10154 return VINF_SUCCESS;
10155
10156 case IEMMODE_32BIT:
10157 IEM_MC_BEGIN(4, 0);
10158 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10159 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10160 IEM_MC_ARG(uint32_t, u32Src, 2);
10161 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10162
10163 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10164 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10165 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10166 IEM_MC_REF_EFLAGS(pEFlags);
10167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10169 else
10170 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10171
10172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10173 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10174 } IEM_MC_ELSE() {
10175 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10176 } IEM_MC_ENDIF();
10177
10178 IEM_MC_ADVANCE_RIP();
10179 IEM_MC_END();
10180 return VINF_SUCCESS;
10181
10182 case IEMMODE_64BIT:
10183 IEM_MC_BEGIN(4, 0);
10184 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10185 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10186#ifdef RT_ARCH_X86
10187 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10188#else
10189 IEM_MC_ARG(uint64_t, u64Src, 2);
10190#endif
10191 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10192
10193 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10194 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10195 IEM_MC_REF_EFLAGS(pEFlags);
10196#ifdef RT_ARCH_X86
10197 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10200 else
10201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10202#else
10203 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10204 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10206 else
10207 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10208#endif
10209
10210 IEM_MC_ADVANCE_RIP();
10211 IEM_MC_END();
10212 return VINF_SUCCESS;
10213
10214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10215 }
10216 }
10217 else
10218 {
10219 switch (pVCpu->iem.s.enmEffOpSize)
10220 {
10221 case IEMMODE_16BIT:
10222 IEM_MC_BEGIN(4, 3);
10223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10224 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10225 IEM_MC_ARG(uint16_t, u16Src, 2);
10226 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10228 IEM_MC_LOCAL(uint16_t, u16Ax);
10229
10230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10231 IEMOP_HLP_DONE_DECODING();
10232 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10233 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10234 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10235 IEM_MC_FETCH_EFLAGS(EFlags);
10236 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10238 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10239 else
10240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10241
10242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10243 IEM_MC_COMMIT_EFLAGS(EFlags);
10244 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10245 IEM_MC_ADVANCE_RIP();
10246 IEM_MC_END();
10247 return VINF_SUCCESS;
10248
10249 case IEMMODE_32BIT:
10250 IEM_MC_BEGIN(4, 3);
10251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10252 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10253 IEM_MC_ARG(uint32_t, u32Src, 2);
10254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10256 IEM_MC_LOCAL(uint32_t, u32Eax);
10257
10258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10259 IEMOP_HLP_DONE_DECODING();
10260 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10261 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10262 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10263 IEM_MC_FETCH_EFLAGS(EFlags);
10264 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10267 else
10268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10269
10270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10271 IEM_MC_COMMIT_EFLAGS(EFlags);
10272 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10273 IEM_MC_ADVANCE_RIP();
10274 IEM_MC_END();
10275 return VINF_SUCCESS;
10276
10277 case IEMMODE_64BIT:
10278 IEM_MC_BEGIN(4, 3);
10279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10280 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10281#ifdef RT_ARCH_X86
10282 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10283#else
10284 IEM_MC_ARG(uint64_t, u64Src, 2);
10285#endif
10286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10288 IEM_MC_LOCAL(uint64_t, u64Rax);
10289
10290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10291 IEMOP_HLP_DONE_DECODING();
10292 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10293 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10294 IEM_MC_FETCH_EFLAGS(EFlags);
10295 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10296#ifdef RT_ARCH_X86
10297 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10299 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10300 else
10301 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10302#else
10303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10304 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10305 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10306 else
10307 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10308#endif
10309
10310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10311 IEM_MC_COMMIT_EFLAGS(EFlags);
10312 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10313 IEM_MC_ADVANCE_RIP();
10314 IEM_MC_END();
10315 return VINF_SUCCESS;
10316
10317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10318 }
10319 }
10320}
10321
10322
10323FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10324{
10325 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10326 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10327
10328 switch (pVCpu->iem.s.enmEffOpSize)
10329 {
10330 case IEMMODE_16BIT:
10331 IEM_MC_BEGIN(5, 1);
10332 IEM_MC_ARG(uint16_t, uSel, 0);
10333 IEM_MC_ARG(uint16_t, offSeg, 1);
10334 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10335 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10336 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10337 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10340 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10341 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10342 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10343 IEM_MC_END();
10344 return VINF_SUCCESS;
10345
10346 case IEMMODE_32BIT:
10347 IEM_MC_BEGIN(5, 1);
10348 IEM_MC_ARG(uint16_t, uSel, 0);
10349 IEM_MC_ARG(uint32_t, offSeg, 1);
10350 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10351 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10352 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10353 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10356 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10357 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10358 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10359 IEM_MC_END();
10360 return VINF_SUCCESS;
10361
10362 case IEMMODE_64BIT:
10363 IEM_MC_BEGIN(5, 1);
10364 IEM_MC_ARG(uint16_t, uSel, 0);
10365 IEM_MC_ARG(uint64_t, offSeg, 1);
10366 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10367 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10368 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10369 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10372 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10373 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10374 else
10375 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10376 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10377 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10378 IEM_MC_END();
10379 return VINF_SUCCESS;
10380
10381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10382 }
10383}
10384
10385
10386/** Opcode 0x0f 0xb2. */
10387FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10388{
10389 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10390 IEMOP_HLP_MIN_386();
10391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10392 if (IEM_IS_MODRM_REG_MODE(bRm))
10393 return IEMOP_RAISE_INVALID_OPCODE();
10394 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10395}
10396
10397
10398/** Opcode 0x0f 0xb3. */
10399FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10400{
10401 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10402 IEMOP_HLP_MIN_386();
10403 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10404}
10405
10406
10407/** Opcode 0x0f 0xb4. */
10408FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10409{
10410 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10411 IEMOP_HLP_MIN_386();
10412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10413 if (IEM_IS_MODRM_REG_MODE(bRm))
10414 return IEMOP_RAISE_INVALID_OPCODE();
10415 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10416}
10417
10418
10419/** Opcode 0x0f 0xb5. */
10420FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10421{
10422 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10423 IEMOP_HLP_MIN_386();
10424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10425 if (IEM_IS_MODRM_REG_MODE(bRm))
10426 return IEMOP_RAISE_INVALID_OPCODE();
10427 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10428}
10429
10430
10431/** Opcode 0x0f 0xb6. */
10432FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10433{
10434 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10435 IEMOP_HLP_MIN_386();
10436
10437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10438
10439 /*
10440 * If rm is denoting a register, no more instruction bytes.
10441 */
10442 if (IEM_IS_MODRM_REG_MODE(bRm))
10443 {
10444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10445 switch (pVCpu->iem.s.enmEffOpSize)
10446 {
10447 case IEMMODE_16BIT:
10448 IEM_MC_BEGIN(0, 1);
10449 IEM_MC_LOCAL(uint16_t, u16Value);
10450 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10451 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10452 IEM_MC_ADVANCE_RIP();
10453 IEM_MC_END();
10454 return VINF_SUCCESS;
10455
10456 case IEMMODE_32BIT:
10457 IEM_MC_BEGIN(0, 1);
10458 IEM_MC_LOCAL(uint32_t, u32Value);
10459 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10460 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10461 IEM_MC_ADVANCE_RIP();
10462 IEM_MC_END();
10463 return VINF_SUCCESS;
10464
10465 case IEMMODE_64BIT:
10466 IEM_MC_BEGIN(0, 1);
10467 IEM_MC_LOCAL(uint64_t, u64Value);
10468 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10469 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10470 IEM_MC_ADVANCE_RIP();
10471 IEM_MC_END();
10472 return VINF_SUCCESS;
10473
10474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10475 }
10476 }
10477 else
10478 {
10479 /*
10480 * We're loading a register from memory.
10481 */
10482 switch (pVCpu->iem.s.enmEffOpSize)
10483 {
10484 case IEMMODE_16BIT:
10485 IEM_MC_BEGIN(0, 2);
10486 IEM_MC_LOCAL(uint16_t, u16Value);
10487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10492 IEM_MC_ADVANCE_RIP();
10493 IEM_MC_END();
10494 return VINF_SUCCESS;
10495
10496 case IEMMODE_32BIT:
10497 IEM_MC_BEGIN(0, 2);
10498 IEM_MC_LOCAL(uint32_t, u32Value);
10499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10502 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10503 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10504 IEM_MC_ADVANCE_RIP();
10505 IEM_MC_END();
10506 return VINF_SUCCESS;
10507
10508 case IEMMODE_64BIT:
10509 IEM_MC_BEGIN(0, 2);
10510 IEM_MC_LOCAL(uint64_t, u64Value);
10511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10514 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10515 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10516 IEM_MC_ADVANCE_RIP();
10517 IEM_MC_END();
10518 return VINF_SUCCESS;
10519
10520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10521 }
10522 }
10523}
10524
10525
10526/** Opcode 0x0f 0xb7. */
10527FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10528{
10529 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10530 IEMOP_HLP_MIN_386();
10531
10532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10533
10534 /** @todo Not entirely sure how the operand size prefix is handled here,
10535 * assuming that it will be ignored. Would be nice to have a few
10536 * test for this. */
10537 /*
10538 * If rm is denoting a register, no more instruction bytes.
10539 */
10540 if (IEM_IS_MODRM_REG_MODE(bRm))
10541 {
10542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10543 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10544 {
10545 IEM_MC_BEGIN(0, 1);
10546 IEM_MC_LOCAL(uint32_t, u32Value);
10547 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10548 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10549 IEM_MC_ADVANCE_RIP();
10550 IEM_MC_END();
10551 }
10552 else
10553 {
10554 IEM_MC_BEGIN(0, 1);
10555 IEM_MC_LOCAL(uint64_t, u64Value);
10556 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10557 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10558 IEM_MC_ADVANCE_RIP();
10559 IEM_MC_END();
10560 }
10561 }
10562 else
10563 {
10564 /*
10565 * We're loading a register from memory.
10566 */
10567 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10568 {
10569 IEM_MC_BEGIN(0, 2);
10570 IEM_MC_LOCAL(uint32_t, u32Value);
10571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10575 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10576 IEM_MC_ADVANCE_RIP();
10577 IEM_MC_END();
10578 }
10579 else
10580 {
10581 IEM_MC_BEGIN(0, 2);
10582 IEM_MC_LOCAL(uint64_t, u64Value);
10583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10586 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10587 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10588 IEM_MC_ADVANCE_RIP();
10589 IEM_MC_END();
10590 }
10591 }
10592 return VINF_SUCCESS;
10593}
10594
10595
10596/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10597FNIEMOP_UD_STUB(iemOp_jmpe);
10598
10599
10600/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10601FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10602{
10603 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10604 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10605 return iemOp_InvalidNeedRM(pVCpu);
10606#ifndef TST_IEM_CHECK_MC
10607# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10608 static const IEMOPBINSIZES s_Native =
10609 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10610# endif
10611 static const IEMOPBINSIZES s_Fallback =
10612 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10613#endif
10614 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10615}
10616
10617
10618/**
10619 * @opcode 0xb9
10620 * @opinvalid intel-modrm
10621 * @optest ->
10622 */
10623FNIEMOP_DEF(iemOp_Grp10)
10624{
10625 /*
10626 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10627 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10628 */
10629 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10630 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10631 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10632}
10633
10634
10635/** Opcode 0x0f 0xba. */
10636FNIEMOP_DEF(iemOp_Grp8)
10637{
10638 IEMOP_HLP_MIN_386();
10639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10640 PCIEMOPBINSIZES pImpl;
10641 switch (IEM_GET_MODRM_REG_8(bRm))
10642 {
10643 case 0: case 1: case 2: case 3:
10644 /* Both AMD and Intel want full modr/m decoding and imm8. */
10645 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10646 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10647 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10648 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10649 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10651 }
10652 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10653
10654 if (IEM_IS_MODRM_REG_MODE(bRm))
10655 {
10656 /* register destination. */
10657 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659
10660 switch (pVCpu->iem.s.enmEffOpSize)
10661 {
10662 case IEMMODE_16BIT:
10663 IEM_MC_BEGIN(3, 0);
10664 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10665 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10666 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10667
10668 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10669 IEM_MC_REF_EFLAGS(pEFlags);
10670 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10671
10672 IEM_MC_ADVANCE_RIP();
10673 IEM_MC_END();
10674 return VINF_SUCCESS;
10675
10676 case IEMMODE_32BIT:
10677 IEM_MC_BEGIN(3, 0);
10678 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10679 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10681
10682 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10683 IEM_MC_REF_EFLAGS(pEFlags);
10684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10685
10686 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10687 IEM_MC_ADVANCE_RIP();
10688 IEM_MC_END();
10689 return VINF_SUCCESS;
10690
10691 case IEMMODE_64BIT:
10692 IEM_MC_BEGIN(3, 0);
10693 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10694 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10695 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10696
10697 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10698 IEM_MC_REF_EFLAGS(pEFlags);
10699 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10700
10701 IEM_MC_ADVANCE_RIP();
10702 IEM_MC_END();
10703 return VINF_SUCCESS;
10704
10705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10706 }
10707 }
10708 else
10709 {
10710 /* memory destination. */
10711
10712 uint32_t fAccess;
10713 if (pImpl->pfnLockedU16)
10714 fAccess = IEM_ACCESS_DATA_RW;
10715 else /* BT */
10716 fAccess = IEM_ACCESS_DATA_R;
10717
10718 /** @todo test negative bit offsets! */
10719 switch (pVCpu->iem.s.enmEffOpSize)
10720 {
10721 case IEMMODE_16BIT:
10722 IEM_MC_BEGIN(3, 1);
10723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10724 IEM_MC_ARG(uint16_t, u16Src, 1);
10725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10727
10728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10729 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10730 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10731 if (pImpl->pfnLockedU16)
10732 IEMOP_HLP_DONE_DECODING();
10733 else
10734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10735 IEM_MC_FETCH_EFLAGS(EFlags);
10736 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10737 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10738 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10739 else
10740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10742
10743 IEM_MC_COMMIT_EFLAGS(EFlags);
10744 IEM_MC_ADVANCE_RIP();
10745 IEM_MC_END();
10746 return VINF_SUCCESS;
10747
10748 case IEMMODE_32BIT:
10749 IEM_MC_BEGIN(3, 1);
10750 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10751 IEM_MC_ARG(uint32_t, u32Src, 1);
10752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10754
10755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10756 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10757 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10758 if (pImpl->pfnLockedU16)
10759 IEMOP_HLP_DONE_DECODING();
10760 else
10761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10762 IEM_MC_FETCH_EFLAGS(EFlags);
10763 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10764 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10765 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10766 else
10767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10769
10770 IEM_MC_COMMIT_EFLAGS(EFlags);
10771 IEM_MC_ADVANCE_RIP();
10772 IEM_MC_END();
10773 return VINF_SUCCESS;
10774
10775 case IEMMODE_64BIT:
10776 IEM_MC_BEGIN(3, 1);
10777 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10778 IEM_MC_ARG(uint64_t, u64Src, 1);
10779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10781
10782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10783 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10784 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10785 if (pImpl->pfnLockedU16)
10786 IEMOP_HLP_DONE_DECODING();
10787 else
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789 IEM_MC_FETCH_EFLAGS(EFlags);
10790 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10793 else
10794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10796
10797 IEM_MC_COMMIT_EFLAGS(EFlags);
10798 IEM_MC_ADVANCE_RIP();
10799 IEM_MC_END();
10800 return VINF_SUCCESS;
10801
10802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10803 }
10804 }
10805}
10806
10807
10808/** Opcode 0x0f 0xbb. */
10809FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10810{
10811 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10812 IEMOP_HLP_MIN_386();
10813 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10814}
10815
10816
10817/**
10818 * Common worker for BSF and BSR instructions.
10819 *
10820 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10821 * the destination register, which means that for 32-bit operations the high
10822 * bits must be left alone.
10823 *
10824 * @param pImpl Pointer to the instruction implementation (assembly).
10825 */
10826FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10827{
10828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10829
10830 /*
10831 * If rm is denoting a register, no more instruction bytes.
10832 */
10833 if (IEM_IS_MODRM_REG_MODE(bRm))
10834 {
10835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10836 switch (pVCpu->iem.s.enmEffOpSize)
10837 {
10838 case IEMMODE_16BIT:
10839 IEM_MC_BEGIN(3, 0);
10840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10841 IEM_MC_ARG(uint16_t, u16Src, 1);
10842 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10843
10844 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10845 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10846 IEM_MC_REF_EFLAGS(pEFlags);
10847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10848
10849 IEM_MC_ADVANCE_RIP();
10850 IEM_MC_END();
10851 break;
10852
10853 case IEMMODE_32BIT:
10854 IEM_MC_BEGIN(3, 0);
10855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10856 IEM_MC_ARG(uint32_t, u32Src, 1);
10857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10858
10859 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10860 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10861 IEM_MC_REF_EFLAGS(pEFlags);
10862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10863 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10864 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10865 IEM_MC_ENDIF();
10866 IEM_MC_ADVANCE_RIP();
10867 IEM_MC_END();
10868 break;
10869
10870 case IEMMODE_64BIT:
10871 IEM_MC_BEGIN(3, 0);
10872 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10873 IEM_MC_ARG(uint64_t, u64Src, 1);
10874 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10875
10876 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10877 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10878 IEM_MC_REF_EFLAGS(pEFlags);
10879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10880
10881 IEM_MC_ADVANCE_RIP();
10882 IEM_MC_END();
10883 break;
10884 }
10885 }
10886 else
10887 {
10888 /*
10889 * We're accessing memory.
10890 */
10891 switch (pVCpu->iem.s.enmEffOpSize)
10892 {
10893 case IEMMODE_16BIT:
10894 IEM_MC_BEGIN(3, 1);
10895 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10896 IEM_MC_ARG(uint16_t, u16Src, 1);
10897 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10899
10900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10902 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10903 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10904 IEM_MC_REF_EFLAGS(pEFlags);
10905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10906
10907 IEM_MC_ADVANCE_RIP();
10908 IEM_MC_END();
10909 break;
10910
10911 case IEMMODE_32BIT:
10912 IEM_MC_BEGIN(3, 1);
10913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10914 IEM_MC_ARG(uint32_t, u32Src, 1);
10915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10917
10918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10920 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10921 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10922 IEM_MC_REF_EFLAGS(pEFlags);
10923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10924
10925 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10926 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10927 IEM_MC_ENDIF();
10928 IEM_MC_ADVANCE_RIP();
10929 IEM_MC_END();
10930 break;
10931
10932 case IEMMODE_64BIT:
10933 IEM_MC_BEGIN(3, 1);
10934 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10935 IEM_MC_ARG(uint64_t, u64Src, 1);
10936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10938
10939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10941 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10942 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10943 IEM_MC_REF_EFLAGS(pEFlags);
10944 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10945
10946 IEM_MC_ADVANCE_RIP();
10947 IEM_MC_END();
10948 break;
10949 }
10950 }
10951 return VINF_SUCCESS;
10952}
10953
10954
10955/** Opcode 0x0f 0xbc. */
10956FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10957{
10958 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10959 IEMOP_HLP_MIN_386();
10960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10961 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10962}
10963
10964
10965/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10966FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10967{
10968 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10969 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10970 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10971
10972#ifndef TST_IEM_CHECK_MC
10973 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10974 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10975 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10976 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10977 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10978 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10979 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10980 {
10981 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10982 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10983 };
10984#endif
10985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10986 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10987 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10988}
10989
10990
10991/** Opcode 0x0f 0xbd. */
10992FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10993{
10994 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10995 IEMOP_HLP_MIN_386();
10996 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10997 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10998}
10999
11000
11001/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11002FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11003{
11004 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11005 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11006 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11007
11008#ifndef TST_IEM_CHECK_MC
11009 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11010 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11011 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11012 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11013 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11014 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11015 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11016 {
11017 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11018 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11019 };
11020#endif
11021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11022 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
11023 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
11024}
11025
11026
11027
11028/** Opcode 0x0f 0xbe. */
11029FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11030{
11031 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11032 IEMOP_HLP_MIN_386();
11033
11034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11035
11036 /*
11037 * If rm is denoting a register, no more instruction bytes.
11038 */
11039 if (IEM_IS_MODRM_REG_MODE(bRm))
11040 {
11041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11042 switch (pVCpu->iem.s.enmEffOpSize)
11043 {
11044 case IEMMODE_16BIT:
11045 IEM_MC_BEGIN(0, 1);
11046 IEM_MC_LOCAL(uint16_t, u16Value);
11047 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11048 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11049 IEM_MC_ADVANCE_RIP();
11050 IEM_MC_END();
11051 return VINF_SUCCESS;
11052
11053 case IEMMODE_32BIT:
11054 IEM_MC_BEGIN(0, 1);
11055 IEM_MC_LOCAL(uint32_t, u32Value);
11056 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11057 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11058 IEM_MC_ADVANCE_RIP();
11059 IEM_MC_END();
11060 return VINF_SUCCESS;
11061
11062 case IEMMODE_64BIT:
11063 IEM_MC_BEGIN(0, 1);
11064 IEM_MC_LOCAL(uint64_t, u64Value);
11065 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11066 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11067 IEM_MC_ADVANCE_RIP();
11068 IEM_MC_END();
11069 return VINF_SUCCESS;
11070
11071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11072 }
11073 }
11074 else
11075 {
11076 /*
11077 * We're loading a register from memory.
11078 */
11079 switch (pVCpu->iem.s.enmEffOpSize)
11080 {
11081 case IEMMODE_16BIT:
11082 IEM_MC_BEGIN(0, 2);
11083 IEM_MC_LOCAL(uint16_t, u16Value);
11084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11088 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11089 IEM_MC_ADVANCE_RIP();
11090 IEM_MC_END();
11091 return VINF_SUCCESS;
11092
11093 case IEMMODE_32BIT:
11094 IEM_MC_BEGIN(0, 2);
11095 IEM_MC_LOCAL(uint32_t, u32Value);
11096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11099 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11100 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11101 IEM_MC_ADVANCE_RIP();
11102 IEM_MC_END();
11103 return VINF_SUCCESS;
11104
11105 case IEMMODE_64BIT:
11106 IEM_MC_BEGIN(0, 2);
11107 IEM_MC_LOCAL(uint64_t, u64Value);
11108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11111 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11112 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11113 IEM_MC_ADVANCE_RIP();
11114 IEM_MC_END();
11115 return VINF_SUCCESS;
11116
11117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11118 }
11119 }
11120}
11121
11122
11123/** Opcode 0x0f 0xbf. */
11124FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11125{
11126 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11127 IEMOP_HLP_MIN_386();
11128
11129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11130
11131 /** @todo Not entirely sure how the operand size prefix is handled here,
11132 * assuming that it will be ignored. Would be nice to have a few
11133 * test for this. */
11134 /*
11135 * If rm is denoting a register, no more instruction bytes.
11136 */
11137 if (IEM_IS_MODRM_REG_MODE(bRm))
11138 {
11139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11140 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11141 {
11142 IEM_MC_BEGIN(0, 1);
11143 IEM_MC_LOCAL(uint32_t, u32Value);
11144 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11145 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11146 IEM_MC_ADVANCE_RIP();
11147 IEM_MC_END();
11148 }
11149 else
11150 {
11151 IEM_MC_BEGIN(0, 1);
11152 IEM_MC_LOCAL(uint64_t, u64Value);
11153 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11154 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11155 IEM_MC_ADVANCE_RIP();
11156 IEM_MC_END();
11157 }
11158 }
11159 else
11160 {
11161 /*
11162 * We're loading a register from memory.
11163 */
11164 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11165 {
11166 IEM_MC_BEGIN(0, 2);
11167 IEM_MC_LOCAL(uint32_t, u32Value);
11168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11171 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11172 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11173 IEM_MC_ADVANCE_RIP();
11174 IEM_MC_END();
11175 }
11176 else
11177 {
11178 IEM_MC_BEGIN(0, 2);
11179 IEM_MC_LOCAL(uint64_t, u64Value);
11180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11183 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11184 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11185 IEM_MC_ADVANCE_RIP();
11186 IEM_MC_END();
11187 }
11188 }
11189 return VINF_SUCCESS;
11190}
11191
11192
11193/** Opcode 0x0f 0xc0. */
11194FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11195{
11196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11197 IEMOP_HLP_MIN_486();
11198 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11199
11200 /*
11201 * If rm is denoting a register, no more instruction bytes.
11202 */
11203 if (IEM_IS_MODRM_REG_MODE(bRm))
11204 {
11205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11206
11207 IEM_MC_BEGIN(3, 0);
11208 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11209 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11211
11212 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11213 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11214 IEM_MC_REF_EFLAGS(pEFlags);
11215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11216
11217 IEM_MC_ADVANCE_RIP();
11218 IEM_MC_END();
11219 }
11220 else
11221 {
11222 /*
11223 * We're accessing memory.
11224 */
11225 IEM_MC_BEGIN(3, 3);
11226 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11227 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11228 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11229 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11231
11232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11233 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11234 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11235 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11236 IEM_MC_FETCH_EFLAGS(EFlags);
11237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11238 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11239 else
11240 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11241
11242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11243 IEM_MC_COMMIT_EFLAGS(EFlags);
11244 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11245 IEM_MC_ADVANCE_RIP();
11246 IEM_MC_END();
11247 return VINF_SUCCESS;
11248 }
11249 return VINF_SUCCESS;
11250}
11251
11252
11253/** Opcode 0x0f 0xc1. */
11254FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11255{
11256 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11257 IEMOP_HLP_MIN_486();
11258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11259
11260 /*
11261 * If rm is denoting a register, no more instruction bytes.
11262 */
11263 if (IEM_IS_MODRM_REG_MODE(bRm))
11264 {
11265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11266
11267 switch (pVCpu->iem.s.enmEffOpSize)
11268 {
11269 case IEMMODE_16BIT:
11270 IEM_MC_BEGIN(3, 0);
11271 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11272 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11273 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11274
11275 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11276 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11277 IEM_MC_REF_EFLAGS(pEFlags);
11278 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11279
11280 IEM_MC_ADVANCE_RIP();
11281 IEM_MC_END();
11282 return VINF_SUCCESS;
11283
11284 case IEMMODE_32BIT:
11285 IEM_MC_BEGIN(3, 0);
11286 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11287 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11288 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11289
11290 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11291 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11292 IEM_MC_REF_EFLAGS(pEFlags);
11293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11294
11295 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11296 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11297 IEM_MC_ADVANCE_RIP();
11298 IEM_MC_END();
11299 return VINF_SUCCESS;
11300
11301 case IEMMODE_64BIT:
11302 IEM_MC_BEGIN(3, 0);
11303 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11304 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11305 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11306
11307 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11308 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11309 IEM_MC_REF_EFLAGS(pEFlags);
11310 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11311
11312 IEM_MC_ADVANCE_RIP();
11313 IEM_MC_END();
11314 return VINF_SUCCESS;
11315
11316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11317 }
11318 }
11319 else
11320 {
11321 /*
11322 * We're accessing memory.
11323 */
11324 switch (pVCpu->iem.s.enmEffOpSize)
11325 {
11326 case IEMMODE_16BIT:
11327 IEM_MC_BEGIN(3, 3);
11328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11329 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11331 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11333
11334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11335 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11336 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11337 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11338 IEM_MC_FETCH_EFLAGS(EFlags);
11339 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11340 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11341 else
11342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11343
11344 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11345 IEM_MC_COMMIT_EFLAGS(EFlags);
11346 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11347 IEM_MC_ADVANCE_RIP();
11348 IEM_MC_END();
11349 return VINF_SUCCESS;
11350
11351 case IEMMODE_32BIT:
11352 IEM_MC_BEGIN(3, 3);
11353 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11354 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11356 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11358
11359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11360 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11361 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11362 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11363 IEM_MC_FETCH_EFLAGS(EFlags);
11364 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11365 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11366 else
11367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11368
11369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11370 IEM_MC_COMMIT_EFLAGS(EFlags);
11371 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11372 IEM_MC_ADVANCE_RIP();
11373 IEM_MC_END();
11374 return VINF_SUCCESS;
11375
11376 case IEMMODE_64BIT:
11377 IEM_MC_BEGIN(3, 3);
11378 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11379 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11380 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11381 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11383
11384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11385 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11386 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11387 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11388 IEM_MC_FETCH_EFLAGS(EFlags);
11389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11390 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11391 else
11392 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11393
11394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11395 IEM_MC_COMMIT_EFLAGS(EFlags);
11396 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11397 IEM_MC_ADVANCE_RIP();
11398 IEM_MC_END();
11399 return VINF_SUCCESS;
11400
11401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11402 }
11403 }
11404}
11405
11406
11407/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11408FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11409{
11410 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11411
11412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11413 if (IEM_IS_MODRM_REG_MODE(bRm))
11414 {
11415 /*
11416 * Register, register.
11417 */
11418 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11420 IEM_MC_BEGIN(4, 2);
11421 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11422 IEM_MC_LOCAL(X86XMMREG, Dst);
11423 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11424 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11425 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11426 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11427 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11428 IEM_MC_PREPARE_SSE_USAGE();
11429 IEM_MC_REF_MXCSR(pfMxcsr);
11430 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11431 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11432 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11433 IEM_MC_IF_MXCSR_XCPT_PENDING()
11434 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11435 IEM_MC_ELSE()
11436 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11437 IEM_MC_ENDIF();
11438
11439 IEM_MC_ADVANCE_RIP();
11440 IEM_MC_END();
11441 }
11442 else
11443 {
11444 /*
11445 * Register, memory.
11446 */
11447 IEM_MC_BEGIN(4, 3);
11448 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11449 IEM_MC_LOCAL(X86XMMREG, Dst);
11450 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11451 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11452 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11454
11455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11456 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11457 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11459 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11460 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11461
11462 IEM_MC_PREPARE_SSE_USAGE();
11463 IEM_MC_REF_MXCSR(pfMxcsr);
11464 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11465 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11466 IEM_MC_IF_MXCSR_XCPT_PENDING()
11467 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11468 IEM_MC_ELSE()
11469 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11470 IEM_MC_ENDIF();
11471
11472 IEM_MC_ADVANCE_RIP();
11473 IEM_MC_END();
11474 }
11475 return VINF_SUCCESS;
11476}
11477
11478
11479/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11480FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11481{
11482 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11483
11484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11485 if (IEM_IS_MODRM_REG_MODE(bRm))
11486 {
11487 /*
11488 * Register, register.
11489 */
11490 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11492 IEM_MC_BEGIN(4, 2);
11493 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11494 IEM_MC_LOCAL(X86XMMREG, Dst);
11495 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11496 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11497 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11498 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11499 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11500 IEM_MC_PREPARE_SSE_USAGE();
11501 IEM_MC_REF_MXCSR(pfMxcsr);
11502 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11503 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11504 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11505 IEM_MC_IF_MXCSR_XCPT_PENDING()
11506 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11507 IEM_MC_ELSE()
11508 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11509 IEM_MC_ENDIF();
11510
11511 IEM_MC_ADVANCE_RIP();
11512 IEM_MC_END();
11513 }
11514 else
11515 {
11516 /*
11517 * Register, memory.
11518 */
11519 IEM_MC_BEGIN(4, 3);
11520 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11521 IEM_MC_LOCAL(X86XMMREG, Dst);
11522 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11523 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11524 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11526
11527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11528 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11529 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11532 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11533
11534 IEM_MC_PREPARE_SSE_USAGE();
11535 IEM_MC_REF_MXCSR(pfMxcsr);
11536 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11537 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11538 IEM_MC_IF_MXCSR_XCPT_PENDING()
11539 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11540 IEM_MC_ELSE()
11541 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11542 IEM_MC_ENDIF();
11543
11544 IEM_MC_ADVANCE_RIP();
11545 IEM_MC_END();
11546 }
11547 return VINF_SUCCESS;
11548}
11549
11550
11551/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11552FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11553{
11554 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11555
11556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11557 if (IEM_IS_MODRM_REG_MODE(bRm))
11558 {
11559 /*
11560 * Register, register.
11561 */
11562 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11564 IEM_MC_BEGIN(4, 2);
11565 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11566 IEM_MC_LOCAL(X86XMMREG, Dst);
11567 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11568 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11569 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11570 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11571 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11572 IEM_MC_PREPARE_SSE_USAGE();
11573 IEM_MC_REF_MXCSR(pfMxcsr);
11574 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11575 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11576 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11577 IEM_MC_IF_MXCSR_XCPT_PENDING()
11578 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11579 IEM_MC_ELSE()
11580 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11581 IEM_MC_ENDIF();
11582
11583 IEM_MC_ADVANCE_RIP();
11584 IEM_MC_END();
11585 }
11586 else
11587 {
11588 /*
11589 * Register, memory.
11590 */
11591 IEM_MC_BEGIN(4, 3);
11592 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11593 IEM_MC_LOCAL(X86XMMREG, Dst);
11594 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11595 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11596 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11598
11599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11600 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11601 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11604 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11605
11606 IEM_MC_PREPARE_SSE_USAGE();
11607 IEM_MC_REF_MXCSR(pfMxcsr);
11608 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11609 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11610 IEM_MC_IF_MXCSR_XCPT_PENDING()
11611 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11612 IEM_MC_ELSE()
11613 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11614 IEM_MC_ENDIF();
11615
11616 IEM_MC_ADVANCE_RIP();
11617 IEM_MC_END();
11618 }
11619 return VINF_SUCCESS;
11620}
11621
11622
11623/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11624FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11625{
11626 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11627
11628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11629 if (IEM_IS_MODRM_REG_MODE(bRm))
11630 {
11631 /*
11632 * Register, register.
11633 */
11634 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11636 IEM_MC_BEGIN(4, 2);
11637 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11638 IEM_MC_LOCAL(X86XMMREG, Dst);
11639 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11640 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11641 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11642 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11643 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11644 IEM_MC_PREPARE_SSE_USAGE();
11645 IEM_MC_REF_MXCSR(pfMxcsr);
11646 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11647 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11648 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11649 IEM_MC_IF_MXCSR_XCPT_PENDING()
11650 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11651 IEM_MC_ELSE()
11652 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11653 IEM_MC_ENDIF();
11654
11655 IEM_MC_ADVANCE_RIP();
11656 IEM_MC_END();
11657 }
11658 else
11659 {
11660 /*
11661 * Register, memory.
11662 */
11663 IEM_MC_BEGIN(4, 3);
11664 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11665 IEM_MC_LOCAL(X86XMMREG, Dst);
11666 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11667 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11668 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11670
11671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11672 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11673 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11676 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11677
11678 IEM_MC_PREPARE_SSE_USAGE();
11679 IEM_MC_REF_MXCSR(pfMxcsr);
11680 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11681 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11682 IEM_MC_IF_MXCSR_XCPT_PENDING()
11683 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11684 IEM_MC_ELSE()
11685 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11686 IEM_MC_ENDIF();
11687
11688 IEM_MC_ADVANCE_RIP();
11689 IEM_MC_END();
11690 }
11691 return VINF_SUCCESS;
11692}
11693
11694
11695/** Opcode 0x0f 0xc3. */
11696FNIEMOP_DEF(iemOp_movnti_My_Gy)
11697{
11698 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11699
11700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11701
11702 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11703 if (IEM_IS_MODRM_MEM_MODE(bRm))
11704 {
11705 switch (pVCpu->iem.s.enmEffOpSize)
11706 {
11707 case IEMMODE_32BIT:
11708 IEM_MC_BEGIN(0, 2);
11709 IEM_MC_LOCAL(uint32_t, u32Value);
11710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11711
11712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11714 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11715 return IEMOP_RAISE_INVALID_OPCODE();
11716
11717 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11718 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11719 IEM_MC_ADVANCE_RIP();
11720 IEM_MC_END();
11721 break;
11722
11723 case IEMMODE_64BIT:
11724 IEM_MC_BEGIN(0, 2);
11725 IEM_MC_LOCAL(uint64_t, u64Value);
11726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11727
11728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11730 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11731 return IEMOP_RAISE_INVALID_OPCODE();
11732
11733 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11734 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11735 IEM_MC_ADVANCE_RIP();
11736 IEM_MC_END();
11737 break;
11738
11739 case IEMMODE_16BIT:
11740 /** @todo check this form. */
11741 return IEMOP_RAISE_INVALID_OPCODE();
11742 }
11743 }
11744 else
11745 return IEMOP_RAISE_INVALID_OPCODE();
11746 return VINF_SUCCESS;
11747}
11748
11749
11750/* Opcode 0x66 0x0f 0xc3 - invalid */
11751/* Opcode 0xf3 0x0f 0xc3 - invalid */
11752/* Opcode 0xf2 0x0f 0xc3 - invalid */
11753
11754
11755/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11756FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11757{
11758 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11760 if (IEM_IS_MODRM_REG_MODE(bRm))
11761 {
11762 /*
11763 * Register, register.
11764 */
11765 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11767 IEM_MC_BEGIN(3, 0);
11768 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11769 IEM_MC_ARG(uint16_t, u16Src, 1);
11770 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11771 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11772 IEM_MC_PREPARE_FPU_USAGE();
11773 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11774 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11775 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11776 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11777 IEM_MC_FPU_TO_MMX_MODE();
11778 IEM_MC_ADVANCE_RIP();
11779 IEM_MC_END();
11780 }
11781 else
11782 {
11783 /*
11784 * Register, memory.
11785 */
11786 IEM_MC_BEGIN(3, 2);
11787 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11788 IEM_MC_ARG(uint16_t, u16Src, 1);
11789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11790
11791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11792 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11793 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11795 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11796 IEM_MC_PREPARE_FPU_USAGE();
11797
11798 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11799 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11800 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11801 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11802 IEM_MC_FPU_TO_MMX_MODE();
11803 IEM_MC_ADVANCE_RIP();
11804 IEM_MC_END();
11805 }
11806 return VINF_SUCCESS;
11807}
11808
11809
11810/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11811FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11812{
11813 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11815 if (IEM_IS_MODRM_REG_MODE(bRm))
11816 {
11817 /*
11818 * Register, register.
11819 */
11820 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11822 IEM_MC_BEGIN(3, 0);
11823 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11824 IEM_MC_ARG(uint16_t, u16Src, 1);
11825 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11826 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11827 IEM_MC_PREPARE_SSE_USAGE();
11828 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11829 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11830 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11831 IEM_MC_ADVANCE_RIP();
11832 IEM_MC_END();
11833 }
11834 else
11835 {
11836 /*
11837 * Register, memory.
11838 */
11839 IEM_MC_BEGIN(3, 2);
11840 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11841 IEM_MC_ARG(uint16_t, u16Src, 1);
11842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11843
11844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11845 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11846 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11848 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11849 IEM_MC_PREPARE_SSE_USAGE();
11850
11851 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11852 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11853 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11854 IEM_MC_ADVANCE_RIP();
11855 IEM_MC_END();
11856 }
11857 return VINF_SUCCESS;
11858}
11859
11860
11861/* Opcode 0xf3 0x0f 0xc4 - invalid */
11862/* Opcode 0xf2 0x0f 0xc4 - invalid */
11863
11864
11865/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11866FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11867{
11868 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11870 if (IEM_IS_MODRM_REG_MODE(bRm))
11871 {
11872 /*
11873 * Register, register.
11874 */
11875 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11877 IEM_MC_BEGIN(3, 1);
11878 IEM_MC_LOCAL(uint16_t, u16Dst);
11879 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11880 IEM_MC_ARG(uint64_t, u64Src, 1);
11881 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11882 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11883 IEM_MC_PREPARE_FPU_USAGE();
11884 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11885 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11886 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11887 IEM_MC_FPU_TO_MMX_MODE();
11888 IEM_MC_ADVANCE_RIP();
11889 IEM_MC_END();
11890 return VINF_SUCCESS;
11891 }
11892
11893 /* No memory operand. */
11894 return IEMOP_RAISE_INVALID_OPCODE();
11895}
11896
11897
11898/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11899FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11900{
11901 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11903 if (IEM_IS_MODRM_REG_MODE(bRm))
11904 {
11905 /*
11906 * Register, register.
11907 */
11908 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11910 IEM_MC_BEGIN(3, 1);
11911 IEM_MC_LOCAL(uint16_t, u16Dst);
11912 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11913 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11914 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11916 IEM_MC_PREPARE_SSE_USAGE();
11917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11919 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11920 IEM_MC_ADVANCE_RIP();
11921 IEM_MC_END();
11922 return VINF_SUCCESS;
11923 }
11924
11925 /* No memory operand. */
11926 return IEMOP_RAISE_INVALID_OPCODE();
11927}
11928
11929
11930/* Opcode 0xf3 0x0f 0xc5 - invalid */
11931/* Opcode 0xf2 0x0f 0xc5 - invalid */
11932
11933
11934/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11935FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11936{
11937 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11939 if (IEM_IS_MODRM_REG_MODE(bRm))
11940 {
11941 /*
11942 * Register, register.
11943 */
11944 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11946 IEM_MC_BEGIN(3, 0);
11947 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11948 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11949 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11950 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11951 IEM_MC_PREPARE_SSE_USAGE();
11952 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11953 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11955 IEM_MC_ADVANCE_RIP();
11956 IEM_MC_END();
11957 }
11958 else
11959 {
11960 /*
11961 * Register, memory.
11962 */
11963 IEM_MC_BEGIN(3, 2);
11964 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11965 IEM_MC_LOCAL(RTUINT128U, uSrc);
11966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11968
11969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11970 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11971 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11973 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11974 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11975
11976 IEM_MC_PREPARE_SSE_USAGE();
11977 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11979
11980 IEM_MC_ADVANCE_RIP();
11981 IEM_MC_END();
11982 }
11983 return VINF_SUCCESS;
11984}
11985
11986
11987/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11988FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11989{
11990 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11992 if (IEM_IS_MODRM_REG_MODE(bRm))
11993 {
11994 /*
11995 * Register, register.
11996 */
11997 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11999 IEM_MC_BEGIN(3, 0);
12000 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12001 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12002 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12004 IEM_MC_PREPARE_SSE_USAGE();
12005 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12006 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12008 IEM_MC_ADVANCE_RIP();
12009 IEM_MC_END();
12010 }
12011 else
12012 {
12013 /*
12014 * Register, memory.
12015 */
12016 IEM_MC_BEGIN(3, 2);
12017 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12018 IEM_MC_LOCAL(RTUINT128U, uSrc);
12019 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12021
12022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12023 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12024 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12026 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12027 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12028
12029 IEM_MC_PREPARE_SSE_USAGE();
12030 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12032
12033 IEM_MC_ADVANCE_RIP();
12034 IEM_MC_END();
12035 }
12036 return VINF_SUCCESS;
12037}
12038
12039
12040/* Opcode 0xf3 0x0f 0xc6 - invalid */
12041/* Opcode 0xf2 0x0f 0xc6 - invalid */
12042
12043
12044/** Opcode 0x0f 0xc7 !11/1. */
12045FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12046{
12047 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12048
12049 IEM_MC_BEGIN(4, 3);
12050 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12051 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12052 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12053 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12054 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12055 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12057
12058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12059 IEMOP_HLP_DONE_DECODING();
12060 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12061
12062 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12063 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12064 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12065
12066 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12067 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12068 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12069
12070 IEM_MC_FETCH_EFLAGS(EFlags);
12071 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12072 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12073 else
12074 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12075
12076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12077 IEM_MC_COMMIT_EFLAGS(EFlags);
12078 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12079 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
12080 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12081 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12082 IEM_MC_ENDIF();
12083 IEM_MC_ADVANCE_RIP();
12084
12085 IEM_MC_END();
12086 return VINF_SUCCESS;
12087}
12088
12089
12090/** Opcode REX.W 0x0f 0xc7 !11/1. */
12091FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12092{
12093 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12094 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12095 {
12096#if 0
12097 RT_NOREF(bRm);
12098 IEMOP_BITCH_ABOUT_STUB();
12099 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12100#else
12101 IEM_MC_BEGIN(4, 3);
12102 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12103 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12104 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12105 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12106 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12107 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12109
12110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12111 IEMOP_HLP_DONE_DECODING();
12112 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12113 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12114
12115 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12116 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12117 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12118
12119 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12120 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12121 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12122
12123 IEM_MC_FETCH_EFLAGS(EFlags);
12124# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12125# if defined(RT_ARCH_AMD64)
12126 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12127# endif
12128 {
12129 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12130 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12131 else
12132 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12133 }
12134# if defined(RT_ARCH_AMD64)
12135 else
12136# endif
12137# endif
12138# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12139 {
12140 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12141 accesses and not all all atomic, which works fine on in UNI CPU guest
12142 configuration (ignoring DMA). If guest SMP is active we have no choice
12143 but to use a rendezvous callback here. Sigh. */
12144 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12145 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12146 else
12147 {
12148 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12149 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12150 }
12151 }
12152# endif
12153
12154 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12155 IEM_MC_COMMIT_EFLAGS(EFlags);
12156 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12157 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12158 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12159 IEM_MC_ENDIF();
12160 IEM_MC_ADVANCE_RIP();
12161
12162 IEM_MC_END();
12163 return VINF_SUCCESS;
12164#endif
12165 }
12166 Log(("cmpxchg16b -> #UD\n"));
12167 return IEMOP_RAISE_INVALID_OPCODE();
12168}
12169
12170FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12171{
12172 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12173 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12174 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12175}
12176
12177/** Opcode 0x0f 0xc7 11/6. */
12178FNIEMOP_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
12179
12180/** Opcode 0x0f 0xc7 !11/6. */
12181#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12182FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12183{
12184 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12185 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12186 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12187 IEM_MC_BEGIN(2, 0);
12188 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12189 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12191 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12192 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12193 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12194 IEM_MC_END();
12195 return VINF_SUCCESS;
12196}
12197#else
12198FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12199#endif
12200
12201/** Opcode 0x66 0x0f 0xc7 !11/6. */
12202#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12203FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12204{
12205 IEMOP_MNEMONIC(vmclear, "vmclear");
12206 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12207 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12208 IEM_MC_BEGIN(2, 0);
12209 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12210 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12212 IEMOP_HLP_DONE_DECODING();
12213 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12214 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12215 IEM_MC_END();
12216 return VINF_SUCCESS;
12217}
12218#else
12219FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12220#endif
12221
12222/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12223#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12224FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12225{
12226 IEMOP_MNEMONIC(vmxon, "vmxon");
12227 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12228 IEM_MC_BEGIN(2, 0);
12229 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12230 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12232 IEMOP_HLP_DONE_DECODING();
12233 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12234 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12235 IEM_MC_END();
12236 return VINF_SUCCESS;
12237}
12238#else
12239FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12240#endif
12241
12242/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12243#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12244FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12245{
12246 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12247 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12248 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12249 IEM_MC_BEGIN(2, 0);
12250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12251 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12253 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12254 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12255 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12256 IEM_MC_END();
12257 return VINF_SUCCESS;
12258}
12259#else
12260FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12261#endif
12262
12263/** Opcode 0x0f 0xc7 11/7. */
12264FNIEMOP_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
12265
12266
12267/**
12268 * Group 9 jump table for register variant.
12269 */
12270IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12271{ /* pfx: none, 066h, 0f3h, 0f2h */
12272 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12273 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12274 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12275 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12276 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12277 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12278 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12279 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12280};
12281AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12282
12283
12284/**
12285 * Group 9 jump table for memory variant.
12286 */
12287IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12288{ /* pfx: none, 066h, 0f3h, 0f2h */
12289 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12290 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12291 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12292 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12293 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12294 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12295 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12296 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12297};
12298AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12299
12300
12301/** Opcode 0x0f 0xc7. */
12302FNIEMOP_DEF(iemOp_Grp9)
12303{
12304 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12305 if (IEM_IS_MODRM_REG_MODE(bRm))
12306 /* register, register */
12307 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12308 + pVCpu->iem.s.idxPrefix], bRm);
12309 /* memory, register */
12310 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12311 + pVCpu->iem.s.idxPrefix], bRm);
12312}
12313
12314
12315/**
12316 * Common 'bswap register' helper.
12317 */
12318FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12319{
12320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12321 switch (pVCpu->iem.s.enmEffOpSize)
12322 {
12323 case IEMMODE_16BIT:
12324 IEM_MC_BEGIN(1, 0);
12325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12326 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12327 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12328 IEM_MC_ADVANCE_RIP();
12329 IEM_MC_END();
12330 return VINF_SUCCESS;
12331
12332 case IEMMODE_32BIT:
12333 IEM_MC_BEGIN(1, 0);
12334 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12335 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12337 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12338 IEM_MC_ADVANCE_RIP();
12339 IEM_MC_END();
12340 return VINF_SUCCESS;
12341
12342 case IEMMODE_64BIT:
12343 IEM_MC_BEGIN(1, 0);
12344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12345 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12346 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12347 IEM_MC_ADVANCE_RIP();
12348 IEM_MC_END();
12349 return VINF_SUCCESS;
12350
12351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12352 }
12353}
12354
12355
12356/** Opcode 0x0f 0xc8. */
12357FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12358{
12359 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12360 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12361 prefix. REX.B is the correct prefix it appears. For a parallel
12362 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12363 IEMOP_HLP_MIN_486();
12364 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12365}
12366
12367
12368/** Opcode 0x0f 0xc9. */
12369FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12370{
12371 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12372 IEMOP_HLP_MIN_486();
12373 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12374}
12375
12376
12377/** Opcode 0x0f 0xca. */
12378FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12379{
12380 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
12381 IEMOP_HLP_MIN_486();
12382 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12383}
12384
12385
12386/** Opcode 0x0f 0xcb. */
12387FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12388{
12389 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
12390 IEMOP_HLP_MIN_486();
12391 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12392}
12393
12394
12395/** Opcode 0x0f 0xcc. */
12396FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12397{
12398 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12399 IEMOP_HLP_MIN_486();
12400 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12401}
12402
12403
12404/** Opcode 0x0f 0xcd. */
12405FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12406{
12407 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12408 IEMOP_HLP_MIN_486();
12409 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12410}
12411
12412
12413/** Opcode 0x0f 0xce. */
12414FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12415{
12416 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12417 IEMOP_HLP_MIN_486();
12418 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12419}
12420
12421
12422/** Opcode 0x0f 0xcf. */
12423FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12424{
12425 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12426 IEMOP_HLP_MIN_486();
12427 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12428}
12429
12430
12431/* Opcode 0x0f 0xd0 - invalid */
12432
12433
12434/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12435FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12436{
12437 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12438 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12439}
12440
12441
12442/* Opcode 0xf3 0x0f 0xd0 - invalid */
12443
12444
12445/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12446FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12447{
12448 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12449 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12450}
12451
12452
12453
12454/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12455FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12456{
12457 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12458 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12459}
12460
12461/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12462FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12463{
12464 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12465 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12466}
12467
12468/* Opcode 0xf3 0x0f 0xd1 - invalid */
12469/* Opcode 0xf2 0x0f 0xd1 - invalid */
12470
12471/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12472FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12473{
12474 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12475 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12476}
12477
12478
12479/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12480FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12481{
12482 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12483 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12484}
12485
12486
12487/* Opcode 0xf3 0x0f 0xd2 - invalid */
12488/* Opcode 0xf2 0x0f 0xd2 - invalid */
12489
12490/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12491FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12492{
12493 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12494 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12495}
12496
12497
12498/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12499FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12500{
12501 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12502 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12503}
12504
12505
12506/* Opcode 0xf3 0x0f 0xd3 - invalid */
12507/* Opcode 0xf2 0x0f 0xd3 - invalid */
12508
12509
12510/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12511FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12512{
12513 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12514 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12515}
12516
12517
12518/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12519FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12520{
12521 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12522 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12523}
12524
12525
12526/* Opcode 0xf3 0x0f 0xd4 - invalid */
12527/* Opcode 0xf2 0x0f 0xd4 - invalid */
12528
12529/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12530FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12531{
12532 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12533 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12534}
12535
12536/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12537FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12538{
12539 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12540 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12541}
12542
12543
12544/* Opcode 0xf3 0x0f 0xd5 - invalid */
12545/* Opcode 0xf2 0x0f 0xd5 - invalid */
12546
12547/* Opcode 0x0f 0xd6 - invalid */
12548
12549/**
12550 * @opcode 0xd6
12551 * @oppfx 0x66
12552 * @opcpuid sse2
12553 * @opgroup og_sse2_pcksclr_datamove
12554 * @opxcpttype none
12555 * @optest op1=-1 op2=2 -> op1=2
12556 * @optest op1=0 op2=-42 -> op1=-42
12557 */
12558FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12559{
12560 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12562 if (IEM_IS_MODRM_REG_MODE(bRm))
12563 {
12564 /*
12565 * Register, register.
12566 */
12567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12568 IEM_MC_BEGIN(0, 2);
12569 IEM_MC_LOCAL(uint64_t, uSrc);
12570
12571 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12572 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12573
12574 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12575 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12576
12577 IEM_MC_ADVANCE_RIP();
12578 IEM_MC_END();
12579 }
12580 else
12581 {
12582 /*
12583 * Memory, register.
12584 */
12585 IEM_MC_BEGIN(0, 2);
12586 IEM_MC_LOCAL(uint64_t, uSrc);
12587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12588
12589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12593
12594 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12595 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12596
12597 IEM_MC_ADVANCE_RIP();
12598 IEM_MC_END();
12599 }
12600 return VINF_SUCCESS;
12601}
12602
12603
12604/**
12605 * @opcode 0xd6
12606 * @opcodesub 11 mr/reg
12607 * @oppfx f3
12608 * @opcpuid sse2
12609 * @opgroup og_sse2_simdint_datamove
12610 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12611 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12612 */
12613FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12614{
12615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12616 if (IEM_IS_MODRM_REG_MODE(bRm))
12617 {
12618 /*
12619 * Register, register.
12620 */
12621 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12623 IEM_MC_BEGIN(0, 1);
12624 IEM_MC_LOCAL(uint64_t, uSrc);
12625
12626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12627 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12628 IEM_MC_FPU_TO_MMX_MODE();
12629
12630 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12631 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12632
12633 IEM_MC_ADVANCE_RIP();
12634 IEM_MC_END();
12635 return VINF_SUCCESS;
12636 }
12637
12638 /**
12639 * @opdone
12640 * @opmnemonic udf30fd6mem
12641 * @opcode 0xd6
12642 * @opcodesub !11 mr/reg
12643 * @oppfx f3
12644 * @opunused intel-modrm
12645 * @opcpuid sse
12646 * @optest ->
12647 */
12648 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12649}
12650
12651
12652/**
12653 * @opcode 0xd6
12654 * @opcodesub 11 mr/reg
12655 * @oppfx f2
12656 * @opcpuid sse2
12657 * @opgroup og_sse2_simdint_datamove
12658 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12659 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12660 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12661 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12662 * @optest op1=-42 op2=0xfedcba9876543210
12663 * -> op1=0xfedcba9876543210 ftw=0xff
12664 */
12665FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12666{
12667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12668 if (IEM_IS_MODRM_REG_MODE(bRm))
12669 {
12670 /*
12671 * Register, register.
12672 */
12673 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12675 IEM_MC_BEGIN(0, 1);
12676 IEM_MC_LOCAL(uint64_t, uSrc);
12677
12678 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12679 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12680 IEM_MC_FPU_TO_MMX_MODE();
12681
12682 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12683 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12684
12685 IEM_MC_ADVANCE_RIP();
12686 IEM_MC_END();
12687 return VINF_SUCCESS;
12688 }
12689
12690 /**
12691 * @opdone
12692 * @opmnemonic udf20fd6mem
12693 * @opcode 0xd6
12694 * @opcodesub !11 mr/reg
12695 * @oppfx f2
12696 * @opunused intel-modrm
12697 * @opcpuid sse
12698 * @optest ->
12699 */
12700 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12701}
12702
12703
12704/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12705FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12706{
12707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12708 /* Docs says register only. */
12709 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12710 {
12711 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12712 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12714 IEM_MC_BEGIN(2, 0);
12715 IEM_MC_ARG(uint64_t *, puDst, 0);
12716 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12717 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12718 IEM_MC_PREPARE_FPU_USAGE();
12719 IEM_MC_FPU_TO_MMX_MODE();
12720
12721 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12722 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12723 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12724
12725 IEM_MC_ADVANCE_RIP();
12726 IEM_MC_END();
12727 return VINF_SUCCESS;
12728 }
12729 return IEMOP_RAISE_INVALID_OPCODE();
12730}
12731
12732
12733/** Opcode 0x66 0x0f 0xd7 - */
12734FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12735{
12736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12737 /* Docs says register only. */
12738 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12739 {
12740 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12741 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12743 IEM_MC_BEGIN(2, 0);
12744 IEM_MC_ARG(uint64_t *, puDst, 0);
12745 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12746 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12747 IEM_MC_PREPARE_SSE_USAGE();
12748 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12749 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12750 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12751 IEM_MC_ADVANCE_RIP();
12752 IEM_MC_END();
12753 return VINF_SUCCESS;
12754 }
12755 return IEMOP_RAISE_INVALID_OPCODE();
12756}
12757
12758
12759/* Opcode 0xf3 0x0f 0xd7 - invalid */
12760/* Opcode 0xf2 0x0f 0xd7 - invalid */
12761
12762
12763/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12764FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12765{
12766 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12767 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12768}
12769
12770
12771/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12772FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12773{
12774 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12775 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12776}
12777
12778
12779/* Opcode 0xf3 0x0f 0xd8 - invalid */
12780/* Opcode 0xf2 0x0f 0xd8 - invalid */
12781
12782/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12783FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12784{
12785 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12786 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12787}
12788
12789
12790/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12791FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12792{
12793 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12795}
12796
12797
12798/* Opcode 0xf3 0x0f 0xd9 - invalid */
12799/* Opcode 0xf2 0x0f 0xd9 - invalid */
12800
12801/** Opcode 0x0f 0xda - pminub Pq, Qq */
12802FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12803{
12804 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12805 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12806}
12807
12808
12809/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12810FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12811{
12812 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12813 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12814}
12815
12816/* Opcode 0xf3 0x0f 0xda - invalid */
12817/* Opcode 0xf2 0x0f 0xda - invalid */
12818
12819/** Opcode 0x0f 0xdb - pand Pq, Qq */
12820FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12821{
12822 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12823 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12824}
12825
12826
12827/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12828FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12829{
12830 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12831 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12832}
12833
12834
12835/* Opcode 0xf3 0x0f 0xdb - invalid */
12836/* Opcode 0xf2 0x0f 0xdb - invalid */
12837
12838/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12839FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12840{
12841 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12842 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12843}
12844
12845
12846/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12847FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12848{
12849 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12850 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12851}
12852
12853
12854/* Opcode 0xf3 0x0f 0xdc - invalid */
12855/* Opcode 0xf2 0x0f 0xdc - invalid */
12856
12857/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12858FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12859{
12860 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12861 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12862}
12863
12864
12865/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12866FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12867{
12868 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12869 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12870}
12871
12872
12873/* Opcode 0xf3 0x0f 0xdd - invalid */
12874/* Opcode 0xf2 0x0f 0xdd - invalid */
12875
12876/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12877FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12878{
12879 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12880 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12881}
12882
12883
12884/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12885FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12886{
12887 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12888 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12889}
12890
12891/* Opcode 0xf3 0x0f 0xde - invalid */
12892/* Opcode 0xf2 0x0f 0xde - invalid */
12893
12894
12895/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12896FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12897{
12898 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12899 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12900}
12901
12902
12903/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12904FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12905{
12906 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12907 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12908}
12909
12910
12911/* Opcode 0xf3 0x0f 0xdf - invalid */
12912/* Opcode 0xf2 0x0f 0xdf - invalid */
12913
12914/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12915FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12916{
12917 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12918 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12919}
12920
12921
12922/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12923FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12924{
12925 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12926 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12927}
12928
12929
12930/* Opcode 0xf3 0x0f 0xe0 - invalid */
12931/* Opcode 0xf2 0x0f 0xe0 - invalid */
12932
12933/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12934FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12935{
12936 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12937 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12938}
12939
12940
12941/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12942FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12943{
12944 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12945 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12946}
12947
12948
12949/* Opcode 0xf3 0x0f 0xe1 - invalid */
12950/* Opcode 0xf2 0x0f 0xe1 - invalid */
12951
12952/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12953FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12954{
12955 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12956 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12957}
12958
12959
12960/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12961FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12962{
12963 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12964 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12965}
12966
12967
12968/* Opcode 0xf3 0x0f 0xe2 - invalid */
12969/* Opcode 0xf2 0x0f 0xe2 - invalid */
12970
12971/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12972FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12973{
12974 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12975 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12976}
12977
12978
12979/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12980FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12981{
12982 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12983 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12984}
12985
12986
12987/* Opcode 0xf3 0x0f 0xe3 - invalid */
12988/* Opcode 0xf2 0x0f 0xe3 - invalid */
12989
12990/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12991FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12992{
12993 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12994 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
12995}
12996
12997
12998/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
12999FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13000{
13001 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13002 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13003}
13004
13005
13006/* Opcode 0xf3 0x0f 0xe4 - invalid */
13007/* Opcode 0xf2 0x0f 0xe4 - invalid */
13008
13009/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13010FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13011{
13012 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13013 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13014}
13015
13016
13017/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13018FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13019{
13020 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13021 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13022}
13023
13024
13025/* Opcode 0xf3 0x0f 0xe5 - invalid */
13026/* Opcode 0xf2 0x0f 0xe5 - invalid */
13027/* Opcode 0x0f 0xe6 - invalid */
13028
13029
13030/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13031FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13032{
13033 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13034 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13035}
13036
13037
13038/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13039FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13040{
13041 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13042 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13043}
13044
13045
13046/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13047FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13048{
13049 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13050 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13051}
13052
13053
13054/**
13055 * @opcode 0xe7
13056 * @opcodesub !11 mr/reg
13057 * @oppfx none
13058 * @opcpuid sse
13059 * @opgroup og_sse1_cachect
13060 * @opxcpttype none
13061 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13062 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13063 */
13064FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13065{
13066 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13068 if (IEM_IS_MODRM_MEM_MODE(bRm))
13069 {
13070 /* Register, memory. */
13071 IEM_MC_BEGIN(0, 2);
13072 IEM_MC_LOCAL(uint64_t, uSrc);
13073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13074
13075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13077 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13079 IEM_MC_FPU_TO_MMX_MODE();
13080
13081 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13082 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13083
13084 IEM_MC_ADVANCE_RIP();
13085 IEM_MC_END();
13086 return VINF_SUCCESS;
13087 }
13088 /**
13089 * @opdone
13090 * @opmnemonic ud0fe7reg
13091 * @opcode 0xe7
13092 * @opcodesub 11 mr/reg
13093 * @oppfx none
13094 * @opunused immediate
13095 * @opcpuid sse
13096 * @optest ->
13097 */
13098 return IEMOP_RAISE_INVALID_OPCODE();
13099}
13100
13101/**
13102 * @opcode 0xe7
13103 * @opcodesub !11 mr/reg
13104 * @oppfx 0x66
13105 * @opcpuid sse2
13106 * @opgroup og_sse2_cachect
13107 * @opxcpttype 1
13108 * @optest op1=-1 op2=2 -> op1=2
13109 * @optest op1=0 op2=-42 -> op1=-42
13110 */
13111FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13112{
13113 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13115 if (IEM_IS_MODRM_MEM_MODE(bRm))
13116 {
13117 /* Register, memory. */
13118 IEM_MC_BEGIN(0, 2);
13119 IEM_MC_LOCAL(RTUINT128U, uSrc);
13120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13121
13122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13124 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13126
13127 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13128 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13129
13130 IEM_MC_ADVANCE_RIP();
13131 IEM_MC_END();
13132 return VINF_SUCCESS;
13133 }
13134
13135 /**
13136 * @opdone
13137 * @opmnemonic ud660fe7reg
13138 * @opcode 0xe7
13139 * @opcodesub 11 mr/reg
13140 * @oppfx 0x66
13141 * @opunused immediate
13142 * @opcpuid sse
13143 * @optest ->
13144 */
13145 return IEMOP_RAISE_INVALID_OPCODE();
13146}
13147
13148/* Opcode 0xf3 0x0f 0xe7 - invalid */
13149/* Opcode 0xf2 0x0f 0xe7 - invalid */
13150
13151
13152/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13153FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13154{
13155 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13156 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13157}
13158
13159
13160/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13161FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13162{
13163 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13164 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13165}
13166
13167
13168/* Opcode 0xf3 0x0f 0xe8 - invalid */
13169/* Opcode 0xf2 0x0f 0xe8 - invalid */
13170
13171/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13172FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13173{
13174 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13175 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13176}
13177
13178
13179/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13180FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13181{
13182 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13183 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13184}
13185
13186
13187/* Opcode 0xf3 0x0f 0xe9 - invalid */
13188/* Opcode 0xf2 0x0f 0xe9 - invalid */
13189
13190
13191/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13192FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13193{
13194 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13195 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13196}
13197
13198
13199/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13200FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13201{
13202 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13203 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13204}
13205
13206
13207/* Opcode 0xf3 0x0f 0xea - invalid */
13208/* Opcode 0xf2 0x0f 0xea - invalid */
13209
13210
13211/** Opcode 0x0f 0xeb - por Pq, Qq */
13212FNIEMOP_DEF(iemOp_por_Pq_Qq)
13213{
13214 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13215 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13216}
13217
13218
13219/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13220FNIEMOP_DEF(iemOp_por_Vx_Wx)
13221{
13222 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13223 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13224}
13225
13226
13227/* Opcode 0xf3 0x0f 0xeb - invalid */
13228/* Opcode 0xf2 0x0f 0xeb - invalid */
13229
13230/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13231FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13232{
13233 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13234 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13235}
13236
13237
13238/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13239FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13240{
13241 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13242 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13243}
13244
13245
13246/* Opcode 0xf3 0x0f 0xec - invalid */
13247/* Opcode 0xf2 0x0f 0xec - invalid */
13248
13249/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13250FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13251{
13252 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13253 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13254}
13255
13256
13257/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13258FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13259{
13260 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13261 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13262}
13263
13264
13265/* Opcode 0xf3 0x0f 0xed - invalid */
13266/* Opcode 0xf2 0x0f 0xed - invalid */
13267
13268
13269/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13270FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13271{
13272 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13273 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13274}
13275
13276
13277/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13278FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13279{
13280 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13281 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13282}
13283
13284
13285/* Opcode 0xf3 0x0f 0xee - invalid */
13286/* Opcode 0xf2 0x0f 0xee - invalid */
13287
13288
13289/** Opcode 0x0f 0xef - pxor Pq, Qq */
13290FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13291{
13292 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13293 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13294}
13295
13296
13297/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13298FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13299{
13300 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13301 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13302}
13303
13304
13305/* Opcode 0xf3 0x0f 0xef - invalid */
13306/* Opcode 0xf2 0x0f 0xef - invalid */
13307
13308/* Opcode 0x0f 0xf0 - invalid */
13309/* Opcode 0x66 0x0f 0xf0 - invalid */
13310
13311
13312/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13313FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13314{
13315 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13317 if (IEM_IS_MODRM_REG_MODE(bRm))
13318 {
13319 /*
13320 * Register, register - (not implemented, assuming it raises \#UD).
13321 */
13322 return IEMOP_RAISE_INVALID_OPCODE();
13323 }
13324 else
13325 {
13326 /*
13327 * Register, memory.
13328 */
13329 IEM_MC_BEGIN(0, 2);
13330 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13332
13333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13335 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13337 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13338 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13339
13340 IEM_MC_ADVANCE_RIP();
13341 IEM_MC_END();
13342 }
13343 return VINF_SUCCESS;
13344}
13345
13346
13347/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13348FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13349{
13350 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13351 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13352}
13353
13354
13355/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13356FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13357{
13358 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13359 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13360}
13361
13362
13363/* Opcode 0xf2 0x0f 0xf1 - invalid */
13364
13365/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13366FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13367{
13368 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13369 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13370}
13371
13372
13373/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13374FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13375{
13376 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13377 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13378}
13379
13380
13381/* Opcode 0xf2 0x0f 0xf2 - invalid */
13382
13383/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13384FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13385{
13386 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13387 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13388}
13389
13390
13391/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13392FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13393{
13394 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13395 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13396}
13397
13398/* Opcode 0xf2 0x0f 0xf3 - invalid */
13399
13400/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13401FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13402{
13403 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13404 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13405}
13406
13407
13408/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13409FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13410{
13411 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13412 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13413}
13414
13415
13416/* Opcode 0xf2 0x0f 0xf4 - invalid */
13417
13418/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13419FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13420{
13421 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13422 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13423}
13424
13425
13426/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13427FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13428{
13429 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13430 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13431}
13432
13433/* Opcode 0xf2 0x0f 0xf5 - invalid */
13434
13435/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13436FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13437{
13438 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13439 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13440}
13441
13442
13443/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13444FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13445{
13446 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13447 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13448}
13449
13450
13451/* Opcode 0xf2 0x0f 0xf6 - invalid */
13452
13453/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13454FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13455/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13456FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13457/* Opcode 0xf2 0x0f 0xf7 - invalid */
13458
13459
13460/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13461FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13462{
13463 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13464 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13465}
13466
13467
13468/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13469FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13470{
13471 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13472 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13473}
13474
13475
13476/* Opcode 0xf2 0x0f 0xf8 - invalid */
13477
13478
13479/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13480FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13481{
13482 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13483 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13484}
13485
13486
13487/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13488FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13489{
13490 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13491 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13492}
13493
13494
13495/* Opcode 0xf2 0x0f 0xf9 - invalid */
13496
13497
13498/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13499FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13500{
13501 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13502 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13503}
13504
13505
13506/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13507FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13508{
13509 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13510 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13511}
13512
13513
13514/* Opcode 0xf2 0x0f 0xfa - invalid */
13515
13516
13517/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13518FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13519{
13520 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13521 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13522}
13523
13524
13525/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13526FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13527{
13528 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13529 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13530}
13531
13532
13533/* Opcode 0xf2 0x0f 0xfb - invalid */
13534
13535
13536/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13537FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13538{
13539 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13540 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13541}
13542
13543
13544/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13545FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13546{
13547 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13548 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13549}
13550
13551
13552/* Opcode 0xf2 0x0f 0xfc - invalid */
13553
13554
13555/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13556FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13557{
13558 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13559 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13560}
13561
13562
13563/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13564FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13565{
13566 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13567 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13568}
13569
13570
13571/* Opcode 0xf2 0x0f 0xfd - invalid */
13572
13573
13574/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13575FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13576{
13577 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13578 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13579}
13580
13581
13582/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13583FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13584{
13585 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13586 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13587}
13588
13589
13590/* Opcode 0xf2 0x0f 0xfe - invalid */
13591
13592
13593/** Opcode **** 0x0f 0xff - UD0 */
13594FNIEMOP_DEF(iemOp_ud0)
13595{
13596 IEMOP_MNEMONIC(ud0, "ud0");
13597 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13598 {
13599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13600#ifndef TST_IEM_CHECK_MC
13601 if (IEM_IS_MODRM_MEM_MODE(bRm))
13602 {
13603 RTGCPTR GCPtrEff;
13604 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13605 if (rcStrict != VINF_SUCCESS)
13606 return rcStrict;
13607 }
13608#endif
13609 IEMOP_HLP_DONE_DECODING();
13610 }
13611 return IEMOP_RAISE_INVALID_OPCODE();
13612}
13613
13614
13615
13616/**
13617 * Two byte opcode map, first byte 0x0f.
13618 *
13619 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13620 * check if it needs updating as well when making changes.
13621 */
13622IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13623{
13624 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13625 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13626 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13627 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13628 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13629 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13630 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13631 /* 0x06 */ IEMOP_X4(iemOp_clts),
13632 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13633 /* 0x08 */ IEMOP_X4(iemOp_invd),
13634 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13635 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13636 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13637 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13638 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13639 /* 0x0e */ IEMOP_X4(iemOp_femms),
13640 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13641
13642 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13643 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13644 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13645 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13646 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13647 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13648 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13649 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13650 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13651 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13652 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13653 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13654 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13655 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13656 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13657 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13658
13659 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13660 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13661 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13662 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13663 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13664 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13665 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13666 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13667 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13668 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13669 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13670 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13671 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13672 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13673 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13674 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13675
13676 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13677 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13678 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13679 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13680 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13681 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13682 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13683 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13684 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13685 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13686 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13687 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13688 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13689 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13690 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13691 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13692
13693 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13694 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13695 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13696 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13697 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13698 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13699 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13700 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13701 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13702 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13703 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13704 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13705 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13706 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13707 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13708 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13709
13710 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13711 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13712 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13713 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13714 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13715 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13716 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13717 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13718 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13719 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13720 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13721 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13722 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13723 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13724 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13725 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13726
13727 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13728 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13729 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13730 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13731 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13732 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13733 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13734 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13735 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13736 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13737 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13738 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13739 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13740 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13741 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13742 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13743
13744 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13745 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13746 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13747 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13748 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13749 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13750 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13751 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13752
13753 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13754 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13755 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13756 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13757 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13758 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13759 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13760 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13761
13762 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13763 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13764 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13765 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13766 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13767 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13768 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13769 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13770 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13771 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13772 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13773 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13774 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13775 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13776 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13777 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13778
13779 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13780 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13781 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13782 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13783 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13784 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13785 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13786 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13787 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13788 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13789 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13790 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13791 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13792 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13793 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13794 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13795
13796 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13797 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13798 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13799 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13800 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13801 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13802 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13803 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13804 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13805 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13806 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13807 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13808 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13809 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13810 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13811 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13812
13813 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13814 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13815 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13816 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13817 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13818 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13819 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13820 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13821 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13822 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13823 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13824 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13825 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13826 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13827 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13828 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13829
13830 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13831 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13832 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13833 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13834 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13835 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13836 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13837 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13838 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13839 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13840 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13841 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13842 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13843 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13844 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13845 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13846
13847 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13848 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13849 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13850 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13851 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13852 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13853 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13854 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13855 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13856 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13857 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13858 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13859 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13860 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13861 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13862 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13863
13864 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13865 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13866 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13867 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13868 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13869 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13870 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13871 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13872 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13873 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13874 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13875 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13876 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13877 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13878 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13879 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13880
13881 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13882 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13883 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13884 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13885 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13886 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13887 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13888 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13889 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13890 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13891 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13892 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13893 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13894 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13895 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13896 /* 0xff */ IEMOP_X4(iemOp_ud0),
13897};
13898AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13899
13900/** @} */
13901
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use