VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96104

Last change on this file since 96104 was 96104, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]pmuludq instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 371.6 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96104 2022-08-08 09:10:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/** Opcode 0x0f 0x00 /0. */
438FNIEMOPRM_DEF(iemOp_Grp6_sldt)
439{
440 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
441 IEMOP_HLP_MIN_286();
442 IEMOP_HLP_NO_REAL_OR_V86_MODE();
443
444 if (IEM_IS_MODRM_REG_MODE(bRm))
445 {
446 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
447 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
448 }
449
450 /* Ignore operand size here, memory refs are always 16-bit. */
451 IEM_MC_BEGIN(2, 0);
452 IEM_MC_ARG(uint16_t, iEffSeg, 0);
453 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
455 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
456 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
457 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
458 IEM_MC_END();
459 return VINF_SUCCESS;
460}
461
462
463/** Opcode 0x0f 0x00 /1. */
464FNIEMOPRM_DEF(iemOp_Grp6_str)
465{
466 IEMOP_MNEMONIC(str, "str Rv/Mw");
467 IEMOP_HLP_MIN_286();
468 IEMOP_HLP_NO_REAL_OR_V86_MODE();
469
470
471 if (IEM_IS_MODRM_REG_MODE(bRm))
472 {
473 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
474 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
475 }
476
477 /* Ignore operand size here, memory refs are always 16-bit. */
478 IEM_MC_BEGIN(2, 0);
479 IEM_MC_ARG(uint16_t, iEffSeg, 0);
480 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
482 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
483 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
484 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
485 IEM_MC_END();
486 return VINF_SUCCESS;
487}
488
489
490/** Opcode 0x0f 0x00 /2. */
491FNIEMOPRM_DEF(iemOp_Grp6_lldt)
492{
493 IEMOP_MNEMONIC(lldt, "lldt Ew");
494 IEMOP_HLP_MIN_286();
495 IEMOP_HLP_NO_REAL_OR_V86_MODE();
496
497 if (IEM_IS_MODRM_REG_MODE(bRm))
498 {
499 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
500 IEM_MC_BEGIN(1, 0);
501 IEM_MC_ARG(uint16_t, u16Sel, 0);
502 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
503 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
504 IEM_MC_END();
505 }
506 else
507 {
508 IEM_MC_BEGIN(1, 1);
509 IEM_MC_ARG(uint16_t, u16Sel, 0);
510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
512 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
513 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
514 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
515 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
516 IEM_MC_END();
517 }
518 return VINF_SUCCESS;
519}
520
521
522/** Opcode 0x0f 0x00 /3. */
523FNIEMOPRM_DEF(iemOp_Grp6_ltr)
524{
525 IEMOP_MNEMONIC(ltr, "ltr Ew");
526 IEMOP_HLP_MIN_286();
527 IEMOP_HLP_NO_REAL_OR_V86_MODE();
528
529 if (IEM_IS_MODRM_REG_MODE(bRm))
530 {
531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
532 IEM_MC_BEGIN(1, 0);
533 IEM_MC_ARG(uint16_t, u16Sel, 0);
534 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
535 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
536 IEM_MC_END();
537 }
538 else
539 {
540 IEM_MC_BEGIN(1, 1);
541 IEM_MC_ARG(uint16_t, u16Sel, 0);
542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
545 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
546 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
548 IEM_MC_END();
549 }
550 return VINF_SUCCESS;
551}
552
553
554/** Opcode 0x0f 0x00 /3. */
555FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
556{
557 IEMOP_HLP_MIN_286();
558 IEMOP_HLP_NO_REAL_OR_V86_MODE();
559
560 if (IEM_IS_MODRM_REG_MODE(bRm))
561 {
562 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
563 IEM_MC_BEGIN(2, 0);
564 IEM_MC_ARG(uint16_t, u16Sel, 0);
565 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
566 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
567 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
568 IEM_MC_END();
569 }
570 else
571 {
572 IEM_MC_BEGIN(2, 1);
573 IEM_MC_ARG(uint16_t, u16Sel, 0);
574 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
577 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
578 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
579 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
580 IEM_MC_END();
581 }
582 return VINF_SUCCESS;
583}
584
585
586/** Opcode 0x0f 0x00 /4. */
587FNIEMOPRM_DEF(iemOp_Grp6_verr)
588{
589 IEMOP_MNEMONIC(verr, "verr Ew");
590 IEMOP_HLP_MIN_286();
591 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
592}
593
594
595/** Opcode 0x0f 0x00 /5. */
596FNIEMOPRM_DEF(iemOp_Grp6_verw)
597{
598 IEMOP_MNEMONIC(verw, "verw Ew");
599 IEMOP_HLP_MIN_286();
600 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
601}
602
603
604/**
605 * Group 6 jump table.
606 */
607IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
608{
609 iemOp_Grp6_sldt,
610 iemOp_Grp6_str,
611 iemOp_Grp6_lldt,
612 iemOp_Grp6_ltr,
613 iemOp_Grp6_verr,
614 iemOp_Grp6_verw,
615 iemOp_InvalidWithRM,
616 iemOp_InvalidWithRM
617};
618
619/** Opcode 0x0f 0x00. */
620FNIEMOP_DEF(iemOp_Grp6)
621{
622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
623 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
624}
625
626
627/** Opcode 0x0f 0x01 /0. */
628FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
629{
630 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
631 IEMOP_HLP_MIN_286();
632 IEMOP_HLP_64BIT_OP_SIZE();
633 IEM_MC_BEGIN(2, 1);
634 IEM_MC_ARG(uint8_t, iEffSeg, 0);
635 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
638 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
639 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
640 IEM_MC_END();
641 return VINF_SUCCESS;
642}
643
644
645/** Opcode 0x0f 0x01 /0. */
646FNIEMOP_DEF(iemOp_Grp7_vmcall)
647{
648 IEMOP_MNEMONIC(vmcall, "vmcall");
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
650
651 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
652 want all hypercalls regardless of instruction used, and if a
653 hypercall isn't handled by GIM or HMSvm will raise an #UD.
654 (NEM/win makes ASSUMPTIONS about this behavior.) */
655 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
656}
657
658
659/** Opcode 0x0f 0x01 /0. */
660#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
661FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
662{
663 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
664 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
665 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
666 IEMOP_HLP_DONE_DECODING();
667 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
668}
669#else
670FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
671{
672 IEMOP_BITCH_ABOUT_STUB();
673 return IEMOP_RAISE_INVALID_OPCODE();
674}
675#endif
676
677
678/** Opcode 0x0f 0x01 /0. */
679#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
680FNIEMOP_DEF(iemOp_Grp7_vmresume)
681{
682 IEMOP_MNEMONIC(vmresume, "vmresume");
683 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
684 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
685 IEMOP_HLP_DONE_DECODING();
686 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
687}
688#else
689FNIEMOP_DEF(iemOp_Grp7_vmresume)
690{
691 IEMOP_BITCH_ABOUT_STUB();
692 return IEMOP_RAISE_INVALID_OPCODE();
693}
694#endif
695
696
697/** Opcode 0x0f 0x01 /0. */
698#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
699FNIEMOP_DEF(iemOp_Grp7_vmxoff)
700{
701 IEMOP_MNEMONIC(vmxoff, "vmxoff");
702 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
703 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
704 IEMOP_HLP_DONE_DECODING();
705 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
706}
707#else
708FNIEMOP_DEF(iemOp_Grp7_vmxoff)
709{
710 IEMOP_BITCH_ABOUT_STUB();
711 return IEMOP_RAISE_INVALID_OPCODE();
712}
713#endif
714
715
716/** Opcode 0x0f 0x01 /1. */
717FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
718{
719 IEMOP_MNEMONIC(sidt, "sidt Ms");
720 IEMOP_HLP_MIN_286();
721 IEMOP_HLP_64BIT_OP_SIZE();
722 IEM_MC_BEGIN(2, 1);
723 IEM_MC_ARG(uint8_t, iEffSeg, 0);
724 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
727 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
728 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
729 IEM_MC_END();
730 return VINF_SUCCESS;
731}
732
733
734/** Opcode 0x0f 0x01 /1. */
735FNIEMOP_DEF(iemOp_Grp7_monitor)
736{
737 IEMOP_MNEMONIC(monitor, "monitor");
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
739 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
740}
741
742
743/** Opcode 0x0f 0x01 /1. */
744FNIEMOP_DEF(iemOp_Grp7_mwait)
745{
746 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
748 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
749}
750
751
752/** Opcode 0x0f 0x01 /2. */
753FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
754{
755 IEMOP_MNEMONIC(lgdt, "lgdt");
756 IEMOP_HLP_64BIT_OP_SIZE();
757 IEM_MC_BEGIN(3, 1);
758 IEM_MC_ARG(uint8_t, iEffSeg, 0);
759 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
760 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
763 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
764 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
765 IEM_MC_END();
766 return VINF_SUCCESS;
767}
768
769
770/** Opcode 0x0f 0x01 0xd0. */
771FNIEMOP_DEF(iemOp_Grp7_xgetbv)
772{
773 IEMOP_MNEMONIC(xgetbv, "xgetbv");
774 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
775 {
776 /** @todo r=ramshankar: We should use
777 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
778 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
780 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
781 }
782 return IEMOP_RAISE_INVALID_OPCODE();
783}
784
785
786/** Opcode 0x0f 0x01 0xd1. */
787FNIEMOP_DEF(iemOp_Grp7_xsetbv)
788{
789 IEMOP_MNEMONIC(xsetbv, "xsetbv");
790 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
791 {
792 /** @todo r=ramshankar: We should use
793 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
794 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
795 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
796 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
797 }
798 return IEMOP_RAISE_INVALID_OPCODE();
799}
800
801
802/** Opcode 0x0f 0x01 /3. */
803FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
804{
805 IEMOP_MNEMONIC(lidt, "lidt");
806 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
807 ? IEMMODE_64BIT
808 : pVCpu->iem.s.enmEffOpSize;
809 IEM_MC_BEGIN(3, 1);
810 IEM_MC_ARG(uint8_t, iEffSeg, 0);
811 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
812 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
815 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
816 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
817 IEM_MC_END();
818 return VINF_SUCCESS;
819}
820
821
822/** Opcode 0x0f 0x01 0xd8. */
823#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
824FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
825{
826 IEMOP_MNEMONIC(vmrun, "vmrun");
827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
828 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
829}
830#else
831FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
832#endif
833
834/** Opcode 0x0f 0x01 0xd9. */
835FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
836{
837 IEMOP_MNEMONIC(vmmcall, "vmmcall");
838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
839
840 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
841 want all hypercalls regardless of instruction used, and if a
842 hypercall isn't handled by GIM or HMSvm will raise an #UD.
843 (NEM/win makes ASSUMPTIONS about this behavior.) */
844 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
845}
846
847/** Opcode 0x0f 0x01 0xda. */
848#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
849FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
850{
851 IEMOP_MNEMONIC(vmload, "vmload");
852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
853 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
854}
855#else
856FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
857#endif
858
859
860/** Opcode 0x0f 0x01 0xdb. */
861#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
862FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
863{
864 IEMOP_MNEMONIC(vmsave, "vmsave");
865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
866 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
867}
868#else
869FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
870#endif
871
872
873/** Opcode 0x0f 0x01 0xdc. */
874#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
875FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
876{
877 IEMOP_MNEMONIC(stgi, "stgi");
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
879 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
880}
881#else
882FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
883#endif
884
885
886/** Opcode 0x0f 0x01 0xdd. */
887#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
888FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
889{
890 IEMOP_MNEMONIC(clgi, "clgi");
891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
892 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
893}
894#else
895FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
896#endif
897
898
899/** Opcode 0x0f 0x01 0xdf. */
900#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
901FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
902{
903 IEMOP_MNEMONIC(invlpga, "invlpga");
904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
905 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
906}
907#else
908FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
909#endif
910
911
912/** Opcode 0x0f 0x01 0xde. */
913#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
914FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
915{
916 IEMOP_MNEMONIC(skinit, "skinit");
917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
918 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
919}
920#else
921FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
922#endif
923
924
925/** Opcode 0x0f 0x01 /4. */
926FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
927{
928 IEMOP_MNEMONIC(smsw, "smsw");
929 IEMOP_HLP_MIN_286();
930 if (IEM_IS_MODRM_REG_MODE(bRm))
931 {
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
934 }
935
936 /* Ignore operand size here, memory refs are always 16-bit. */
937 IEM_MC_BEGIN(2, 0);
938 IEM_MC_ARG(uint16_t, iEffSeg, 0);
939 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
943 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
944 IEM_MC_END();
945 return VINF_SUCCESS;
946}
947
948
949/** Opcode 0x0f 0x01 /6. */
950FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
951{
952 /* The operand size is effectively ignored, all is 16-bit and only the
953 lower 3-bits are used. */
954 IEMOP_MNEMONIC(lmsw, "lmsw");
955 IEMOP_HLP_MIN_286();
956 if (IEM_IS_MODRM_REG_MODE(bRm))
957 {
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
959 IEM_MC_BEGIN(2, 0);
960 IEM_MC_ARG(uint16_t, u16Tmp, 0);
961 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
962 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
963 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
964 IEM_MC_END();
965 }
966 else
967 {
968 IEM_MC_BEGIN(2, 0);
969 IEM_MC_ARG(uint16_t, u16Tmp, 0);
970 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
973 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
974 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
975 IEM_MC_END();
976 }
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /7. */
982FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
983{
984 IEMOP_MNEMONIC(invlpg, "invlpg");
985 IEMOP_HLP_MIN_486();
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
987 IEM_MC_BEGIN(1, 1);
988 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
990 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
991 IEM_MC_END();
992 return VINF_SUCCESS;
993}
994
995
996/** Opcode 0x0f 0x01 /7. */
997FNIEMOP_DEF(iemOp_Grp7_swapgs)
998{
999 IEMOP_MNEMONIC(swapgs, "swapgs");
1000 IEMOP_HLP_ONLY_64BIT();
1001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1002 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1003}
1004
1005
1006/** Opcode 0x0f 0x01 /7. */
1007FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1008{
1009 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1012}
1013
1014
1015/**
1016 * Group 7 jump table, memory variant.
1017 */
1018IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1019{
1020 iemOp_Grp7_sgdt,
1021 iemOp_Grp7_sidt,
1022 iemOp_Grp7_lgdt,
1023 iemOp_Grp7_lidt,
1024 iemOp_Grp7_smsw,
1025 iemOp_InvalidWithRM,
1026 iemOp_Grp7_lmsw,
1027 iemOp_Grp7_invlpg
1028};
1029
1030
1031/** Opcode 0x0f 0x01. */
1032FNIEMOP_DEF(iemOp_Grp7)
1033{
1034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1035 if (IEM_IS_MODRM_MEM_MODE(bRm))
1036 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1037
1038 switch (IEM_GET_MODRM_REG_8(bRm))
1039 {
1040 case 0:
1041 switch (IEM_GET_MODRM_RM_8(bRm))
1042 {
1043 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1044 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1045 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1046 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1047 }
1048 return IEMOP_RAISE_INVALID_OPCODE();
1049
1050 case 1:
1051 switch (IEM_GET_MODRM_RM_8(bRm))
1052 {
1053 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1054 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1055 }
1056 return IEMOP_RAISE_INVALID_OPCODE();
1057
1058 case 2:
1059 switch (IEM_GET_MODRM_RM_8(bRm))
1060 {
1061 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1062 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1063 }
1064 return IEMOP_RAISE_INVALID_OPCODE();
1065
1066 case 3:
1067 switch (IEM_GET_MODRM_RM_8(bRm))
1068 {
1069 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1070 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1071 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1072 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1073 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1074 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1075 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1076 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1078 }
1079
1080 case 4:
1081 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1082
1083 case 5:
1084 return IEMOP_RAISE_INVALID_OPCODE();
1085
1086 case 6:
1087 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1088
1089 case 7:
1090 switch (IEM_GET_MODRM_RM_8(bRm))
1091 {
1092 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1093 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1094 }
1095 return IEMOP_RAISE_INVALID_OPCODE();
1096
1097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1098 }
1099}
1100
1101/** Opcode 0x0f 0x00 /3. */
1102FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1103{
1104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1106
1107 if (IEM_IS_MODRM_REG_MODE(bRm))
1108 {
1109 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1110 switch (pVCpu->iem.s.enmEffOpSize)
1111 {
1112 case IEMMODE_16BIT:
1113 {
1114 IEM_MC_BEGIN(3, 0);
1115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1116 IEM_MC_ARG(uint16_t, u16Sel, 1);
1117 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1118
1119 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1121 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1122
1123 IEM_MC_END();
1124 return VINF_SUCCESS;
1125 }
1126
1127 case IEMMODE_32BIT:
1128 case IEMMODE_64BIT:
1129 {
1130 IEM_MC_BEGIN(3, 0);
1131 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1132 IEM_MC_ARG(uint16_t, u16Sel, 1);
1133 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1134
1135 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1136 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1137 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1138
1139 IEM_MC_END();
1140 return VINF_SUCCESS;
1141 }
1142
1143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1144 }
1145 }
1146 else
1147 {
1148 switch (pVCpu->iem.s.enmEffOpSize)
1149 {
1150 case IEMMODE_16BIT:
1151 {
1152 IEM_MC_BEGIN(3, 1);
1153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1154 IEM_MC_ARG(uint16_t, u16Sel, 1);
1155 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1160
1161 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1162 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1163 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1164
1165 IEM_MC_END();
1166 return VINF_SUCCESS;
1167 }
1168
1169 case IEMMODE_32BIT:
1170 case IEMMODE_64BIT:
1171 {
1172 IEM_MC_BEGIN(3, 1);
1173 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1174 IEM_MC_ARG(uint16_t, u16Sel, 1);
1175 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1177
1178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180/** @todo testcase: make sure it's a 16-bit read. */
1181
1182 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1183 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1185
1186 IEM_MC_END();
1187 return VINF_SUCCESS;
1188 }
1189
1190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1191 }
1192 }
1193}
1194
1195
1196
1197/** Opcode 0x0f 0x02. */
1198FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1199{
1200 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1201 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1202}
1203
1204
1205/** Opcode 0x0f 0x03. */
1206FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1207{
1208 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1209 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1210}
1211
1212
1213/** Opcode 0x0f 0x05. */
1214FNIEMOP_DEF(iemOp_syscall)
1215{
1216 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1218 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1219}
1220
1221
1222/** Opcode 0x0f 0x06. */
1223FNIEMOP_DEF(iemOp_clts)
1224{
1225 IEMOP_MNEMONIC(clts, "clts");
1226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1227 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1228}
1229
1230
1231/** Opcode 0x0f 0x07. */
1232FNIEMOP_DEF(iemOp_sysret)
1233{
1234 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1236 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1237}
1238
1239
1240/** Opcode 0x0f 0x08. */
1241FNIEMOP_DEF(iemOp_invd)
1242{
1243 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1244 IEMOP_HLP_MIN_486();
1245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1246 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1247}
1248
1249
1250/** Opcode 0x0f 0x09. */
1251FNIEMOP_DEF(iemOp_wbinvd)
1252{
1253 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1254 IEMOP_HLP_MIN_486();
1255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1256 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1257}
1258
1259
1260/** Opcode 0x0f 0x0b. */
1261FNIEMOP_DEF(iemOp_ud2)
1262{
1263 IEMOP_MNEMONIC(ud2, "ud2");
1264 return IEMOP_RAISE_INVALID_OPCODE();
1265}
1266
1267/** Opcode 0x0f 0x0d. */
1268FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1269{
1270 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1271 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1272 {
1273 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1274 return IEMOP_RAISE_INVALID_OPCODE();
1275 }
1276
1277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1278 if (IEM_IS_MODRM_REG_MODE(bRm))
1279 {
1280 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1281 return IEMOP_RAISE_INVALID_OPCODE();
1282 }
1283
1284 switch (IEM_GET_MODRM_REG_8(bRm))
1285 {
1286 case 2: /* Aliased to /0 for the time being. */
1287 case 4: /* Aliased to /0 for the time being. */
1288 case 5: /* Aliased to /0 for the time being. */
1289 case 6: /* Aliased to /0 for the time being. */
1290 case 7: /* Aliased to /0 for the time being. */
1291 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1292 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1293 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296
1297 IEM_MC_BEGIN(0, 1);
1298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1301 /* Currently a NOP. */
1302 NOREF(GCPtrEffSrc);
1303 IEM_MC_ADVANCE_RIP();
1304 IEM_MC_END();
1305 return VINF_SUCCESS;
1306}
1307
1308
1309/** Opcode 0x0f 0x0e. */
1310FNIEMOP_DEF(iemOp_femms)
1311{
1312 IEMOP_MNEMONIC(femms, "femms");
1313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1314
1315 IEM_MC_BEGIN(0,0);
1316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1318 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1319 IEM_MC_FPU_FROM_MMX_MODE();
1320 IEM_MC_ADVANCE_RIP();
1321 IEM_MC_END();
1322 return VINF_SUCCESS;
1323}
1324
1325
1326/** Opcode 0x0f 0x0f. */
1327FNIEMOP_DEF(iemOp_3Dnow)
1328{
1329 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1330 {
1331 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1332 return IEMOP_RAISE_INVALID_OPCODE();
1333 }
1334
1335#ifdef IEM_WITH_3DNOW
1336 /* This is pretty sparse, use switch instead of table. */
1337 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1338 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1339#else
1340 IEMOP_BITCH_ABOUT_STUB();
1341 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1342#endif
1343}
1344
1345
1346/**
1347 * @opcode 0x10
1348 * @oppfx none
1349 * @opcpuid sse
1350 * @opgroup og_sse_simdfp_datamove
1351 * @opxcpttype 4UA
1352 * @optest op1=1 op2=2 -> op1=2
1353 * @optest op1=0 op2=-22 -> op1=-22
1354 */
1355FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1356{
1357 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if (IEM_IS_MODRM_REG_MODE(bRm))
1360 {
1361 /*
1362 * Register, register.
1363 */
1364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1365 IEM_MC_BEGIN(0, 0);
1366 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1367 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1368 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1369 IEM_GET_MODRM_RM(pVCpu, bRm));
1370 IEM_MC_ADVANCE_RIP();
1371 IEM_MC_END();
1372 }
1373 else
1374 {
1375 /*
1376 * Memory, register.
1377 */
1378 IEM_MC_BEGIN(0, 2);
1379 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1381
1382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1385 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1386
1387 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1388 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1389
1390 IEM_MC_ADVANCE_RIP();
1391 IEM_MC_END();
1392 }
1393 return VINF_SUCCESS;
1394
1395}
1396
1397
1398/**
1399 * @opcode 0x10
1400 * @oppfx 0x66
1401 * @opcpuid sse2
1402 * @opgroup og_sse2_pcksclr_datamove
1403 * @opxcpttype 4UA
1404 * @optest op1=1 op2=2 -> op1=2
1405 * @optest op1=0 op2=-42 -> op1=-42
1406 */
1407FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1408{
1409 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1411 if (IEM_IS_MODRM_REG_MODE(bRm))
1412 {
1413 /*
1414 * Register, register.
1415 */
1416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1417 IEM_MC_BEGIN(0, 0);
1418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1420 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1421 IEM_GET_MODRM_RM(pVCpu, bRm));
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 else
1426 {
1427 /*
1428 * Memory, register.
1429 */
1430 IEM_MC_BEGIN(0, 2);
1431 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1433
1434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1436 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1437 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1438
1439 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1440 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1441
1442 IEM_MC_ADVANCE_RIP();
1443 IEM_MC_END();
1444 }
1445 return VINF_SUCCESS;
1446}
1447
1448
1449/**
1450 * @opcode 0x10
1451 * @oppfx 0xf3
1452 * @opcpuid sse
1453 * @opgroup og_sse_simdfp_datamove
1454 * @opxcpttype 5
1455 * @optest op1=1 op2=2 -> op1=2
1456 * @optest op1=0 op2=-22 -> op1=-22
1457 */
1458FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1459{
1460 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1462 if (IEM_IS_MODRM_REG_MODE(bRm))
1463 {
1464 /*
1465 * Register, register.
1466 */
1467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1468 IEM_MC_BEGIN(0, 1);
1469 IEM_MC_LOCAL(uint32_t, uSrc);
1470
1471 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1472 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1473 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1474 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1475
1476 IEM_MC_ADVANCE_RIP();
1477 IEM_MC_END();
1478 }
1479 else
1480 {
1481 /*
1482 * Memory, register.
1483 */
1484 IEM_MC_BEGIN(0, 2);
1485 IEM_MC_LOCAL(uint32_t, uSrc);
1486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1487
1488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1490 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1492
1493 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1494 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1495
1496 IEM_MC_ADVANCE_RIP();
1497 IEM_MC_END();
1498 }
1499 return VINF_SUCCESS;
1500}
1501
1502
1503/**
1504 * @opcode 0x10
1505 * @oppfx 0xf2
1506 * @opcpuid sse2
1507 * @opgroup og_sse2_pcksclr_datamove
1508 * @opxcpttype 5
1509 * @optest op1=1 op2=2 -> op1=2
1510 * @optest op1=0 op2=-42 -> op1=-42
1511 */
1512FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1513{
1514 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1516 if (IEM_IS_MODRM_REG_MODE(bRm))
1517 {
1518 /*
1519 * Register, register.
1520 */
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint64_t, uSrc);
1524
1525 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1527 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1528 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1529
1530 IEM_MC_ADVANCE_RIP();
1531 IEM_MC_END();
1532 }
1533 else
1534 {
1535 /*
1536 * Memory, register.
1537 */
1538 IEM_MC_BEGIN(0, 2);
1539 IEM_MC_LOCAL(uint64_t, uSrc);
1540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1541
1542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1546
1547 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1548 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1549
1550 IEM_MC_ADVANCE_RIP();
1551 IEM_MC_END();
1552 }
1553 return VINF_SUCCESS;
1554}
1555
1556
1557/**
1558 * @opcode 0x11
1559 * @oppfx none
1560 * @opcpuid sse
1561 * @opgroup og_sse_simdfp_datamove
1562 * @opxcpttype 4UA
1563 * @optest op1=1 op2=2 -> op1=2
1564 * @optest op1=0 op2=-42 -> op1=-42
1565 */
1566FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1567{
1568 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1570 if (IEM_IS_MODRM_REG_MODE(bRm))
1571 {
1572 /*
1573 * Register, register.
1574 */
1575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1576 IEM_MC_BEGIN(0, 0);
1577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1579 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1580 IEM_GET_MODRM_REG(pVCpu, bRm));
1581 IEM_MC_ADVANCE_RIP();
1582 IEM_MC_END();
1583 }
1584 else
1585 {
1586 /*
1587 * Memory, register.
1588 */
1589 IEM_MC_BEGIN(0, 2);
1590 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1592
1593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1596 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1597
1598 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1599 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1600
1601 IEM_MC_ADVANCE_RIP();
1602 IEM_MC_END();
1603 }
1604 return VINF_SUCCESS;
1605}
1606
1607
1608/**
1609 * @opcode 0x11
1610 * @oppfx 0x66
1611 * @opcpuid sse2
1612 * @opgroup og_sse2_pcksclr_datamove
1613 * @opxcpttype 4UA
1614 * @optest op1=1 op2=2 -> op1=2
1615 * @optest op1=0 op2=-42 -> op1=-42
1616 */
1617FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1618{
1619 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1621 if (IEM_IS_MODRM_REG_MODE(bRm))
1622 {
1623 /*
1624 * Register, register.
1625 */
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEM_MC_BEGIN(0, 0);
1628 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1629 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1630 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1631 IEM_GET_MODRM_REG(pVCpu, bRm));
1632 IEM_MC_ADVANCE_RIP();
1633 IEM_MC_END();
1634 }
1635 else
1636 {
1637 /*
1638 * Memory, register.
1639 */
1640 IEM_MC_BEGIN(0, 2);
1641 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1643
1644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1648
1649 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1650 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1651
1652 IEM_MC_ADVANCE_RIP();
1653 IEM_MC_END();
1654 }
1655 return VINF_SUCCESS;
1656}
1657
1658
1659/**
1660 * @opcode 0x11
1661 * @oppfx 0xf3
1662 * @opcpuid sse
1663 * @opgroup og_sse_simdfp_datamove
1664 * @opxcpttype 5
1665 * @optest op1=1 op2=2 -> op1=2
1666 * @optest op1=0 op2=-22 -> op1=-22
1667 */
1668FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1669{
1670 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1672 if (IEM_IS_MODRM_REG_MODE(bRm))
1673 {
1674 /*
1675 * Register, register.
1676 */
1677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint32_t, uSrc);
1680
1681 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1683 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1684 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1685
1686 IEM_MC_ADVANCE_RIP();
1687 IEM_MC_END();
1688 }
1689 else
1690 {
1691 /*
1692 * Memory, register.
1693 */
1694 IEM_MC_BEGIN(0, 2);
1695 IEM_MC_LOCAL(uint32_t, uSrc);
1696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1697
1698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1700 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1701 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1702
1703 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1704 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1705
1706 IEM_MC_ADVANCE_RIP();
1707 IEM_MC_END();
1708 }
1709 return VINF_SUCCESS;
1710}
1711
1712
1713/**
1714 * @opcode 0x11
1715 * @oppfx 0xf2
1716 * @opcpuid sse2
1717 * @opgroup og_sse2_pcksclr_datamove
1718 * @opxcpttype 5
1719 * @optest op1=1 op2=2 -> op1=2
1720 * @optest op1=0 op2=-42 -> op1=-42
1721 */
1722FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1723{
1724 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1726 if (IEM_IS_MODRM_REG_MODE(bRm))
1727 {
1728 /*
1729 * Register, register.
1730 */
1731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1732 IEM_MC_BEGIN(0, 1);
1733 IEM_MC_LOCAL(uint64_t, uSrc);
1734
1735 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1736 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1737 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1738 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1739
1740 IEM_MC_ADVANCE_RIP();
1741 IEM_MC_END();
1742 }
1743 else
1744 {
1745 /*
1746 * Memory, register.
1747 */
1748 IEM_MC_BEGIN(0, 2);
1749 IEM_MC_LOCAL(uint64_t, uSrc);
1750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1751
1752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1754 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1755 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1756
1757 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1758 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1759
1760 IEM_MC_ADVANCE_RIP();
1761 IEM_MC_END();
1762 }
1763 return VINF_SUCCESS;
1764}
1765
1766
1767FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1768{
1769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1770 if (IEM_IS_MODRM_REG_MODE(bRm))
1771 {
1772 /**
1773 * @opcode 0x12
1774 * @opcodesub 11 mr/reg
1775 * @oppfx none
1776 * @opcpuid sse
1777 * @opgroup og_sse_simdfp_datamove
1778 * @opxcpttype 5
1779 * @optest op1=1 op2=2 -> op1=2
1780 * @optest op1=0 op2=-42 -> op1=-42
1781 */
1782 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1783
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1785 IEM_MC_BEGIN(0, 1);
1786 IEM_MC_LOCAL(uint64_t, uSrc);
1787
1788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1790 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1791 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1792
1793 IEM_MC_ADVANCE_RIP();
1794 IEM_MC_END();
1795 }
1796 else
1797 {
1798 /**
1799 * @opdone
1800 * @opcode 0x12
1801 * @opcodesub !11 mr/reg
1802 * @oppfx none
1803 * @opcpuid sse
1804 * @opgroup og_sse_simdfp_datamove
1805 * @opxcpttype 5
1806 * @optest op1=1 op2=2 -> op1=2
1807 * @optest op1=0 op2=-42 -> op1=-42
1808 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1809 */
1810 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1811
1812 IEM_MC_BEGIN(0, 2);
1813 IEM_MC_LOCAL(uint64_t, uSrc);
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1815
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1820
1821 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1822 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 }
1827 return VINF_SUCCESS;
1828}
1829
1830
1831/**
1832 * @opcode 0x12
1833 * @opcodesub !11 mr/reg
1834 * @oppfx 0x66
1835 * @opcpuid sse2
1836 * @opgroup og_sse2_pcksclr_datamove
1837 * @opxcpttype 5
1838 * @optest op1=1 op2=2 -> op1=2
1839 * @optest op1=0 op2=-42 -> op1=-42
1840 */
1841FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1842{
1843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1844 if (IEM_IS_MODRM_MEM_MODE(bRm))
1845 {
1846 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1847
1848 IEM_MC_BEGIN(0, 2);
1849 IEM_MC_LOCAL(uint64_t, uSrc);
1850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1851
1852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1854 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1855 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1856
1857 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1858 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1859
1860 IEM_MC_ADVANCE_RIP();
1861 IEM_MC_END();
1862 return VINF_SUCCESS;
1863 }
1864
1865 /**
1866 * @opdone
1867 * @opmnemonic ud660f12m3
1868 * @opcode 0x12
1869 * @opcodesub 11 mr/reg
1870 * @oppfx 0x66
1871 * @opunused immediate
1872 * @opcpuid sse
1873 * @optest ->
1874 */
1875 return IEMOP_RAISE_INVALID_OPCODE();
1876}
1877
1878
1879/**
1880 * @opcode 0x12
1881 * @oppfx 0xf3
1882 * @opcpuid sse3
1883 * @opgroup og_sse3_pcksclr_datamove
1884 * @opxcpttype 4
1885 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1886 * op1=0x00000002000000020000000100000001
1887 */
1888FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1889{
1890 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1892 if (IEM_IS_MODRM_REG_MODE(bRm))
1893 {
1894 /*
1895 * Register, register.
1896 */
1897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1898 IEM_MC_BEGIN(2, 0);
1899 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1900 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1901
1902 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1903 IEM_MC_PREPARE_SSE_USAGE();
1904
1905 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1906 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1907 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1908
1909 IEM_MC_ADVANCE_RIP();
1910 IEM_MC_END();
1911 }
1912 else
1913 {
1914 /*
1915 * Register, memory.
1916 */
1917 IEM_MC_BEGIN(2, 2);
1918 IEM_MC_LOCAL(RTUINT128U, uSrc);
1919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1920 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1921 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1922
1923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1925 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1926 IEM_MC_PREPARE_SSE_USAGE();
1927
1928 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1929 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1930 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1931
1932 IEM_MC_ADVANCE_RIP();
1933 IEM_MC_END();
1934 }
1935 return VINF_SUCCESS;
1936}
1937
1938
1939/**
1940 * @opcode 0x12
1941 * @oppfx 0xf2
1942 * @opcpuid sse3
1943 * @opgroup og_sse3_pcksclr_datamove
1944 * @opxcpttype 5
1945 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1946 * op1=0x22222222111111112222222211111111
1947 */
1948FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1949{
1950 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1952 if (IEM_IS_MODRM_REG_MODE(bRm))
1953 {
1954 /*
1955 * Register, register.
1956 */
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1958 IEM_MC_BEGIN(2, 0);
1959 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1960 IEM_MC_ARG(uint64_t, uSrc, 1);
1961
1962 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1963 IEM_MC_PREPARE_SSE_USAGE();
1964
1965 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1966 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1967 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1968
1969 IEM_MC_ADVANCE_RIP();
1970 IEM_MC_END();
1971 }
1972 else
1973 {
1974 /*
1975 * Register, memory.
1976 */
1977 IEM_MC_BEGIN(2, 2);
1978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1979 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1980 IEM_MC_ARG(uint64_t, uSrc, 1);
1981
1982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1984 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1985 IEM_MC_PREPARE_SSE_USAGE();
1986
1987 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1988 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1989 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1990
1991 IEM_MC_ADVANCE_RIP();
1992 IEM_MC_END();
1993 }
1994 return VINF_SUCCESS;
1995}
1996
1997
1998/**
1999 * @opcode 0x13
2000 * @opcodesub !11 mr/reg
2001 * @oppfx none
2002 * @opcpuid sse
2003 * @opgroup og_sse_simdfp_datamove
2004 * @opxcpttype 5
2005 * @optest op1=1 op2=2 -> op1=2
2006 * @optest op1=0 op2=-42 -> op1=-42
2007 */
2008FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2009{
2010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2011 if (IEM_IS_MODRM_MEM_MODE(bRm))
2012 {
2013 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2014
2015 IEM_MC_BEGIN(0, 2);
2016 IEM_MC_LOCAL(uint64_t, uSrc);
2017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2018
2019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2023
2024 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2025 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2026
2027 IEM_MC_ADVANCE_RIP();
2028 IEM_MC_END();
2029 return VINF_SUCCESS;
2030 }
2031
2032 /**
2033 * @opdone
2034 * @opmnemonic ud0f13m3
2035 * @opcode 0x13
2036 * @opcodesub 11 mr/reg
2037 * @oppfx none
2038 * @opunused immediate
2039 * @opcpuid sse
2040 * @optest ->
2041 */
2042 return IEMOP_RAISE_INVALID_OPCODE();
2043}
2044
2045
2046/**
2047 * @opcode 0x13
2048 * @opcodesub !11 mr/reg
2049 * @oppfx 0x66
2050 * @opcpuid sse2
2051 * @opgroup og_sse2_pcksclr_datamove
2052 * @opxcpttype 5
2053 * @optest op1=1 op2=2 -> op1=2
2054 * @optest op1=0 op2=-42 -> op1=-42
2055 */
2056FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2057{
2058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2059 if (IEM_IS_MODRM_MEM_MODE(bRm))
2060 {
2061 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2062 IEM_MC_BEGIN(0, 2);
2063 IEM_MC_LOCAL(uint64_t, uSrc);
2064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2065
2066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2070
2071 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2072 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2073
2074 IEM_MC_ADVANCE_RIP();
2075 IEM_MC_END();
2076 return VINF_SUCCESS;
2077 }
2078
2079 /**
2080 * @opdone
2081 * @opmnemonic ud660f13m3
2082 * @opcode 0x13
2083 * @opcodesub 11 mr/reg
2084 * @oppfx 0x66
2085 * @opunused immediate
2086 * @opcpuid sse
2087 * @optest ->
2088 */
2089 return IEMOP_RAISE_INVALID_OPCODE();
2090}
2091
2092
2093/**
2094 * @opmnemonic udf30f13
2095 * @opcode 0x13
2096 * @oppfx 0xf3
2097 * @opunused intel-modrm
2098 * @opcpuid sse
2099 * @optest ->
2100 * @opdone
2101 */
2102
2103/**
2104 * @opmnemonic udf20f13
2105 * @opcode 0x13
2106 * @oppfx 0xf2
2107 * @opunused intel-modrm
2108 * @opcpuid sse
2109 * @optest ->
2110 * @opdone
2111 */
2112
2113/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2114FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
2115/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2116FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
2117
2118/**
2119 * @opdone
2120 * @opmnemonic udf30f14
2121 * @opcode 0x14
2122 * @oppfx 0xf3
2123 * @opunused intel-modrm
2124 * @opcpuid sse
2125 * @optest ->
2126 * @opdone
2127 */
2128
2129/**
2130 * @opmnemonic udf20f14
2131 * @opcode 0x14
2132 * @oppfx 0xf2
2133 * @opunused intel-modrm
2134 * @opcpuid sse
2135 * @optest ->
2136 * @opdone
2137 */
2138
2139/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2140FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
2141/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2142FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
2143/* Opcode 0xf3 0x0f 0x15 - invalid */
2144/* Opcode 0xf2 0x0f 0x15 - invalid */
2145
2146/**
2147 * @opdone
2148 * @opmnemonic udf30f15
2149 * @opcode 0x15
2150 * @oppfx 0xf3
2151 * @opunused intel-modrm
2152 * @opcpuid sse
2153 * @optest ->
2154 * @opdone
2155 */
2156
2157/**
2158 * @opmnemonic udf20f15
2159 * @opcode 0x15
2160 * @oppfx 0xf2
2161 * @opunused intel-modrm
2162 * @opcpuid sse
2163 * @optest ->
2164 * @opdone
2165 */
2166
2167FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2168{
2169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2170 if (IEM_IS_MODRM_REG_MODE(bRm))
2171 {
2172 /**
2173 * @opcode 0x16
2174 * @opcodesub 11 mr/reg
2175 * @oppfx none
2176 * @opcpuid sse
2177 * @opgroup og_sse_simdfp_datamove
2178 * @opxcpttype 5
2179 * @optest op1=1 op2=2 -> op1=2
2180 * @optest op1=0 op2=-42 -> op1=-42
2181 */
2182 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2183
2184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2185 IEM_MC_BEGIN(0, 1);
2186 IEM_MC_LOCAL(uint64_t, uSrc);
2187
2188 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2191 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2192
2193 IEM_MC_ADVANCE_RIP();
2194 IEM_MC_END();
2195 }
2196 else
2197 {
2198 /**
2199 * @opdone
2200 * @opcode 0x16
2201 * @opcodesub !11 mr/reg
2202 * @oppfx none
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-42 -> op1=-42
2208 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2209 */
2210 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2211
2212 IEM_MC_BEGIN(0, 2);
2213 IEM_MC_LOCAL(uint64_t, uSrc);
2214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2215
2216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2218 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2219 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2220
2221 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2222 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2223
2224 IEM_MC_ADVANCE_RIP();
2225 IEM_MC_END();
2226 }
2227 return VINF_SUCCESS;
2228}
2229
2230
2231/**
2232 * @opcode 0x16
2233 * @opcodesub !11 mr/reg
2234 * @oppfx 0x66
2235 * @opcpuid sse2
2236 * @opgroup og_sse2_pcksclr_datamove
2237 * @opxcpttype 5
2238 * @optest op1=1 op2=2 -> op1=2
2239 * @optest op1=0 op2=-42 -> op1=-42
2240 */
2241FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2242{
2243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2244 if (IEM_IS_MODRM_MEM_MODE(bRm))
2245 {
2246 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2247 IEM_MC_BEGIN(0, 2);
2248 IEM_MC_LOCAL(uint64_t, uSrc);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2255
2256 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2257 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2258
2259 IEM_MC_ADVANCE_RIP();
2260 IEM_MC_END();
2261 return VINF_SUCCESS;
2262 }
2263
2264 /**
2265 * @opdone
2266 * @opmnemonic ud660f16m3
2267 * @opcode 0x16
2268 * @opcodesub 11 mr/reg
2269 * @oppfx 0x66
2270 * @opunused immediate
2271 * @opcpuid sse
2272 * @optest ->
2273 */
2274 return IEMOP_RAISE_INVALID_OPCODE();
2275}
2276
2277
2278/**
2279 * @opcode 0x16
2280 * @oppfx 0xf3
2281 * @opcpuid sse3
2282 * @opgroup og_sse3_pcksclr_datamove
2283 * @opxcpttype 4
2284 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2285 * op1=0x00000002000000020000000100000001
2286 */
2287FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2288{
2289 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2291 if (IEM_IS_MODRM_REG_MODE(bRm))
2292 {
2293 /*
2294 * Register, register.
2295 */
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_BEGIN(2, 0);
2298 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2299 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2300
2301 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2302 IEM_MC_PREPARE_SSE_USAGE();
2303
2304 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2305 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2306 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2307
2308 IEM_MC_ADVANCE_RIP();
2309 IEM_MC_END();
2310 }
2311 else
2312 {
2313 /*
2314 * Register, memory.
2315 */
2316 IEM_MC_BEGIN(2, 2);
2317 IEM_MC_LOCAL(RTUINT128U, uSrc);
2318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2320 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2321
2322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2324 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2325 IEM_MC_PREPARE_SSE_USAGE();
2326
2327 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2328 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2329 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2330
2331 IEM_MC_ADVANCE_RIP();
2332 IEM_MC_END();
2333 }
2334 return VINF_SUCCESS;
2335}
2336
2337/**
2338 * @opdone
2339 * @opmnemonic udf30f16
2340 * @opcode 0x16
2341 * @oppfx 0xf2
2342 * @opunused intel-modrm
2343 * @opcpuid sse
2344 * @optest ->
2345 * @opdone
2346 */
2347
2348
2349/**
2350 * @opcode 0x17
2351 * @opcodesub !11 mr/reg
2352 * @oppfx none
2353 * @opcpuid sse
2354 * @opgroup og_sse_simdfp_datamove
2355 * @opxcpttype 5
2356 * @optest op1=1 op2=2 -> op1=2
2357 * @optest op1=0 op2=-42 -> op1=-42
2358 */
2359FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2360{
2361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2362 if (IEM_IS_MODRM_MEM_MODE(bRm))
2363 {
2364 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2365
2366 IEM_MC_BEGIN(0, 2);
2367 IEM_MC_LOCAL(uint64_t, uSrc);
2368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2369
2370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2374
2375 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2376 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2377
2378 IEM_MC_ADVANCE_RIP();
2379 IEM_MC_END();
2380 return VINF_SUCCESS;
2381 }
2382
2383 /**
2384 * @opdone
2385 * @opmnemonic ud0f17m3
2386 * @opcode 0x17
2387 * @opcodesub 11 mr/reg
2388 * @oppfx none
2389 * @opunused immediate
2390 * @opcpuid sse
2391 * @optest ->
2392 */
2393 return IEMOP_RAISE_INVALID_OPCODE();
2394}
2395
2396
2397/**
2398 * @opcode 0x17
2399 * @opcodesub !11 mr/reg
2400 * @oppfx 0x66
2401 * @opcpuid sse2
2402 * @opgroup og_sse2_pcksclr_datamove
2403 * @opxcpttype 5
2404 * @optest op1=1 op2=2 -> op1=2
2405 * @optest op1=0 op2=-42 -> op1=-42
2406 */
2407FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2408{
2409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2410 if (IEM_IS_MODRM_MEM_MODE(bRm))
2411 {
2412 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2413
2414 IEM_MC_BEGIN(0, 2);
2415 IEM_MC_LOCAL(uint64_t, uSrc);
2416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2417
2418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2420 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2421 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2422
2423 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2424 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2425
2426 IEM_MC_ADVANCE_RIP();
2427 IEM_MC_END();
2428 return VINF_SUCCESS;
2429 }
2430
2431 /**
2432 * @opdone
2433 * @opmnemonic ud660f17m3
2434 * @opcode 0x17
2435 * @opcodesub 11 mr/reg
2436 * @oppfx 0x66
2437 * @opunused immediate
2438 * @opcpuid sse
2439 * @optest ->
2440 */
2441 return IEMOP_RAISE_INVALID_OPCODE();
2442}
2443
2444
2445/**
2446 * @opdone
2447 * @opmnemonic udf30f17
2448 * @opcode 0x17
2449 * @oppfx 0xf3
2450 * @opunused intel-modrm
2451 * @opcpuid sse
2452 * @optest ->
2453 * @opdone
2454 */
2455
2456/**
2457 * @opmnemonic udf20f17
2458 * @opcode 0x17
2459 * @oppfx 0xf2
2460 * @opunused intel-modrm
2461 * @opcpuid sse
2462 * @optest ->
2463 * @opdone
2464 */
2465
2466
2467/** Opcode 0x0f 0x18. */
2468FNIEMOP_DEF(iemOp_prefetch_Grp16)
2469{
2470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2471 if (IEM_IS_MODRM_MEM_MODE(bRm))
2472 {
2473 switch (IEM_GET_MODRM_REG_8(bRm))
2474 {
2475 case 4: /* Aliased to /0 for the time being according to AMD. */
2476 case 5: /* Aliased to /0 for the time being according to AMD. */
2477 case 6: /* Aliased to /0 for the time being according to AMD. */
2478 case 7: /* Aliased to /0 for the time being according to AMD. */
2479 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2480 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2481 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2482 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2484 }
2485
2486 IEM_MC_BEGIN(0, 1);
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2490 /* Currently a NOP. */
2491 NOREF(GCPtrEffSrc);
2492 IEM_MC_ADVANCE_RIP();
2493 IEM_MC_END();
2494 return VINF_SUCCESS;
2495 }
2496
2497 return IEMOP_RAISE_INVALID_OPCODE();
2498}
2499
2500
2501/** Opcode 0x0f 0x19..0x1f. */
2502FNIEMOP_DEF(iemOp_nop_Ev)
2503{
2504 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2506 if (IEM_IS_MODRM_REG_MODE(bRm))
2507 {
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(0, 0);
2510 IEM_MC_ADVANCE_RIP();
2511 IEM_MC_END();
2512 }
2513 else
2514 {
2515 IEM_MC_BEGIN(0, 1);
2516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2519 /* Currently a NOP. */
2520 NOREF(GCPtrEffSrc);
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 return VINF_SUCCESS;
2525}
2526
2527
2528/** Opcode 0x0f 0x20. */
2529FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2530{
2531 /* mod is ignored, as is operand size overrides. */
2532 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2533 IEMOP_HLP_MIN_386();
2534 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2535 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2536 else
2537 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2538
2539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2540 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2541 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2542 {
2543 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2544 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2545 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2546 iCrReg |= 8;
2547 }
2548 switch (iCrReg)
2549 {
2550 case 0: case 2: case 3: case 4: case 8:
2551 break;
2552 default:
2553 return IEMOP_RAISE_INVALID_OPCODE();
2554 }
2555 IEMOP_HLP_DONE_DECODING();
2556
2557 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
2558}
2559
2560
2561/** Opcode 0x0f 0x21. */
2562FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2563{
2564 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2565 IEMOP_HLP_MIN_386();
2566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2568 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2569 return IEMOP_RAISE_INVALID_OPCODE();
2570 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2571 IEM_GET_MODRM_RM(pVCpu, bRm),
2572 IEM_GET_MODRM_REG_8(bRm));
2573}
2574
2575
2576/** Opcode 0x0f 0x22. */
2577FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2578{
2579 /* mod is ignored, as is operand size overrides. */
2580 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2581 IEMOP_HLP_MIN_386();
2582 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2583 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2584 else
2585 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2586
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2589 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2590 {
2591 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2592 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2593 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2594 iCrReg |= 8;
2595 }
2596 switch (iCrReg)
2597 {
2598 case 0: case 2: case 3: case 4: case 8:
2599 break;
2600 default:
2601 return IEMOP_RAISE_INVALID_OPCODE();
2602 }
2603 IEMOP_HLP_DONE_DECODING();
2604
2605 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
2606}
2607
2608
2609/** Opcode 0x0f 0x23. */
2610FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2611{
2612 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2613 IEMOP_HLP_MIN_386();
2614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2617 return IEMOP_RAISE_INVALID_OPCODE();
2618 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2619 IEM_GET_MODRM_REG_8(bRm),
2620 IEM_GET_MODRM_RM(pVCpu, bRm));
2621}
2622
2623
2624/** Opcode 0x0f 0x24. */
2625FNIEMOP_DEF(iemOp_mov_Rd_Td)
2626{
2627 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2628 IEMOP_HLP_MIN_386();
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2631 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2632 return IEMOP_RAISE_INVALID_OPCODE();
2633 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
2634 IEM_GET_MODRM_RM(pVCpu, bRm),
2635 IEM_GET_MODRM_REG_8(bRm));
2636}
2637
2638
2639/** Opcode 0x0f 0x26. */
2640FNIEMOP_DEF(iemOp_mov_Td_Rd)
2641{
2642 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2643 IEMOP_HLP_MIN_386();
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2646 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2647 return IEMOP_RAISE_INVALID_OPCODE();
2648 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
2649 IEM_GET_MODRM_REG_8(bRm),
2650 IEM_GET_MODRM_RM(pVCpu, bRm));
2651}
2652
2653
2654/**
2655 * @opcode 0x28
2656 * @oppfx none
2657 * @opcpuid sse
2658 * @opgroup og_sse_simdfp_datamove
2659 * @opxcpttype 1
2660 * @optest op1=1 op2=2 -> op1=2
2661 * @optest op1=0 op2=-42 -> op1=-42
2662 */
2663FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2664{
2665 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2667 if (IEM_IS_MODRM_REG_MODE(bRm))
2668 {
2669 /*
2670 * Register, register.
2671 */
2672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2673 IEM_MC_BEGIN(0, 0);
2674 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2675 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2676 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2677 IEM_GET_MODRM_RM(pVCpu, bRm));
2678 IEM_MC_ADVANCE_RIP();
2679 IEM_MC_END();
2680 }
2681 else
2682 {
2683 /*
2684 * Register, memory.
2685 */
2686 IEM_MC_BEGIN(0, 2);
2687 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2689
2690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2692 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2693 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2694
2695 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2696 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2697
2698 IEM_MC_ADVANCE_RIP();
2699 IEM_MC_END();
2700 }
2701 return VINF_SUCCESS;
2702}
2703
2704/**
2705 * @opcode 0x28
2706 * @oppfx 66
2707 * @opcpuid sse2
2708 * @opgroup og_sse2_pcksclr_datamove
2709 * @opxcpttype 1
2710 * @optest op1=1 op2=2 -> op1=2
2711 * @optest op1=0 op2=-42 -> op1=-42
2712 */
2713FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2714{
2715 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /*
2720 * Register, register.
2721 */
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723 IEM_MC_BEGIN(0, 0);
2724 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2725 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2726 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2727 IEM_GET_MODRM_RM(pVCpu, bRm));
2728 IEM_MC_ADVANCE_RIP();
2729 IEM_MC_END();
2730 }
2731 else
2732 {
2733 /*
2734 * Register, memory.
2735 */
2736 IEM_MC_BEGIN(0, 2);
2737 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2739
2740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2742 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2743 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2744
2745 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2746 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2747
2748 IEM_MC_ADVANCE_RIP();
2749 IEM_MC_END();
2750 }
2751 return VINF_SUCCESS;
2752}
2753
2754/* Opcode 0xf3 0x0f 0x28 - invalid */
2755/* Opcode 0xf2 0x0f 0x28 - invalid */
2756
2757/**
2758 * @opcode 0x29
2759 * @oppfx none
2760 * @opcpuid sse
2761 * @opgroup og_sse_simdfp_datamove
2762 * @opxcpttype 1
2763 * @optest op1=1 op2=2 -> op1=2
2764 * @optest op1=0 op2=-42 -> op1=-42
2765 */
2766FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2767{
2768 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2770 if (IEM_IS_MODRM_REG_MODE(bRm))
2771 {
2772 /*
2773 * Register, register.
2774 */
2775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2776 IEM_MC_BEGIN(0, 0);
2777 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2779 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2780 IEM_GET_MODRM_REG(pVCpu, bRm));
2781 IEM_MC_ADVANCE_RIP();
2782 IEM_MC_END();
2783 }
2784 else
2785 {
2786 /*
2787 * Memory, register.
2788 */
2789 IEM_MC_BEGIN(0, 2);
2790 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2792
2793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2796 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2797
2798 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2799 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2800
2801 IEM_MC_ADVANCE_RIP();
2802 IEM_MC_END();
2803 }
2804 return VINF_SUCCESS;
2805}
2806
2807/**
2808 * @opcode 0x29
2809 * @oppfx 66
2810 * @opcpuid sse2
2811 * @opgroup og_sse2_pcksclr_datamove
2812 * @opxcpttype 1
2813 * @optest op1=1 op2=2 -> op1=2
2814 * @optest op1=0 op2=-42 -> op1=-42
2815 */
2816FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2817{
2818 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2820 if (IEM_IS_MODRM_REG_MODE(bRm))
2821 {
2822 /*
2823 * Register, register.
2824 */
2825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2826 IEM_MC_BEGIN(0, 0);
2827 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2828 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2829 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2830 IEM_GET_MODRM_REG(pVCpu, bRm));
2831 IEM_MC_ADVANCE_RIP();
2832 IEM_MC_END();
2833 }
2834 else
2835 {
2836 /*
2837 * Memory, register.
2838 */
2839 IEM_MC_BEGIN(0, 2);
2840 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2842
2843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2845 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2846 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2847
2848 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2849 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2850
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 return VINF_SUCCESS;
2855}
2856
2857/* Opcode 0xf3 0x0f 0x29 - invalid */
2858/* Opcode 0xf2 0x0f 0x29 - invalid */
2859
2860
2861/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2862FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2863/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2864FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2865/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2866FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2867/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2868FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2869
2870
2871/**
2872 * @opcode 0x2b
2873 * @opcodesub !11 mr/reg
2874 * @oppfx none
2875 * @opcpuid sse
2876 * @opgroup og_sse1_cachect
2877 * @opxcpttype 1
2878 * @optest op1=1 op2=2 -> op1=2
2879 * @optest op1=0 op2=-42 -> op1=-42
2880 */
2881FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2882{
2883 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2885 if (IEM_IS_MODRM_MEM_MODE(bRm))
2886 {
2887 /*
2888 * memory, register.
2889 */
2890 IEM_MC_BEGIN(0, 2);
2891 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2893
2894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2897 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2898
2899 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2900 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2901
2902 IEM_MC_ADVANCE_RIP();
2903 IEM_MC_END();
2904 }
2905 /* The register, register encoding is invalid. */
2906 else
2907 return IEMOP_RAISE_INVALID_OPCODE();
2908 return VINF_SUCCESS;
2909}
2910
2911/**
2912 * @opcode 0x2b
2913 * @opcodesub !11 mr/reg
2914 * @oppfx 0x66
2915 * @opcpuid sse2
2916 * @opgroup og_sse2_cachect
2917 * @opxcpttype 1
2918 * @optest op1=1 op2=2 -> op1=2
2919 * @optest op1=0 op2=-42 -> op1=-42
2920 */
2921FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2922{
2923 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 if (IEM_IS_MODRM_MEM_MODE(bRm))
2926 {
2927 /*
2928 * memory, register.
2929 */
2930 IEM_MC_BEGIN(0, 2);
2931 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2933
2934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2936 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2937 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2938
2939 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2940 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2941
2942 IEM_MC_ADVANCE_RIP();
2943 IEM_MC_END();
2944 }
2945 /* The register, register encoding is invalid. */
2946 else
2947 return IEMOP_RAISE_INVALID_OPCODE();
2948 return VINF_SUCCESS;
2949}
2950/* Opcode 0xf3 0x0f 0x2b - invalid */
2951/* Opcode 0xf2 0x0f 0x2b - invalid */
2952
2953
2954/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2955FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2956/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2957FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2958/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2959FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2960/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2961FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2962
2963/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2964FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2965/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2966FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2967/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2968FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2969/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2970FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2971
2972/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2973FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2974/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2975FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2976/* Opcode 0xf3 0x0f 0x2e - invalid */
2977/* Opcode 0xf2 0x0f 0x2e - invalid */
2978
2979/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2980FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2981/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2982FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2983/* Opcode 0xf3 0x0f 0x2f - invalid */
2984/* Opcode 0xf2 0x0f 0x2f - invalid */
2985
2986/** Opcode 0x0f 0x30. */
2987FNIEMOP_DEF(iemOp_wrmsr)
2988{
2989 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2992}
2993
2994
2995/** Opcode 0x0f 0x31. */
2996FNIEMOP_DEF(iemOp_rdtsc)
2997{
2998 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3001}
3002
3003
3004/** Opcode 0x0f 0x33. */
3005FNIEMOP_DEF(iemOp_rdmsr)
3006{
3007 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3009 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3010}
3011
3012
3013/** Opcode 0x0f 0x34. */
3014FNIEMOP_DEF(iemOp_rdpmc)
3015{
3016 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3018 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3019}
3020
3021
3022/** Opcode 0x0f 0x34. */
3023FNIEMOP_DEF(iemOp_sysenter)
3024{
3025 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3027 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3028}
3029
3030/** Opcode 0x0f 0x35. */
3031FNIEMOP_DEF(iemOp_sysexit)
3032{
3033 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3035 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3036}
3037
3038/** Opcode 0x0f 0x37. */
3039FNIEMOP_STUB(iemOp_getsec);
3040
3041
3042/** Opcode 0x0f 0x38. */
3043FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3044{
3045#ifdef IEM_WITH_THREE_0F_38
3046 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3047 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3048#else
3049 IEMOP_BITCH_ABOUT_STUB();
3050 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3051#endif
3052}
3053
3054
3055/** Opcode 0x0f 0x3a. */
3056FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3057{
3058#ifdef IEM_WITH_THREE_0F_3A
3059 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3060 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3061#else
3062 IEMOP_BITCH_ABOUT_STUB();
3063 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3064#endif
3065}
3066
3067
3068/**
3069 * Implements a conditional move.
3070 *
3071 * Wish there was an obvious way to do this where we could share and reduce
3072 * code bloat.
3073 *
3074 * @param a_Cnd The conditional "microcode" operation.
3075 */
3076#define CMOV_X(a_Cnd) \
3077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3078 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3079 { \
3080 switch (pVCpu->iem.s.enmEffOpSize) \
3081 { \
3082 case IEMMODE_16BIT: \
3083 IEM_MC_BEGIN(0, 1); \
3084 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3085 a_Cnd { \
3086 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3087 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3088 } IEM_MC_ENDIF(); \
3089 IEM_MC_ADVANCE_RIP(); \
3090 IEM_MC_END(); \
3091 return VINF_SUCCESS; \
3092 \
3093 case IEMMODE_32BIT: \
3094 IEM_MC_BEGIN(0, 1); \
3095 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3096 a_Cnd { \
3097 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3098 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3099 } IEM_MC_ELSE() { \
3100 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3101 } IEM_MC_ENDIF(); \
3102 IEM_MC_ADVANCE_RIP(); \
3103 IEM_MC_END(); \
3104 return VINF_SUCCESS; \
3105 \
3106 case IEMMODE_64BIT: \
3107 IEM_MC_BEGIN(0, 1); \
3108 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3109 a_Cnd { \
3110 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3111 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3112 } IEM_MC_ENDIF(); \
3113 IEM_MC_ADVANCE_RIP(); \
3114 IEM_MC_END(); \
3115 return VINF_SUCCESS; \
3116 \
3117 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3118 } \
3119 } \
3120 else \
3121 { \
3122 switch (pVCpu->iem.s.enmEffOpSize) \
3123 { \
3124 case IEMMODE_16BIT: \
3125 IEM_MC_BEGIN(0, 2); \
3126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3127 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3129 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3130 a_Cnd { \
3131 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3132 } IEM_MC_ENDIF(); \
3133 IEM_MC_ADVANCE_RIP(); \
3134 IEM_MC_END(); \
3135 return VINF_SUCCESS; \
3136 \
3137 case IEMMODE_32BIT: \
3138 IEM_MC_BEGIN(0, 2); \
3139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3140 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3142 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3143 a_Cnd { \
3144 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3145 } IEM_MC_ELSE() { \
3146 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3147 } IEM_MC_ENDIF(); \
3148 IEM_MC_ADVANCE_RIP(); \
3149 IEM_MC_END(); \
3150 return VINF_SUCCESS; \
3151 \
3152 case IEMMODE_64BIT: \
3153 IEM_MC_BEGIN(0, 2); \
3154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3155 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3157 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3158 a_Cnd { \
3159 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3160 } IEM_MC_ENDIF(); \
3161 IEM_MC_ADVANCE_RIP(); \
3162 IEM_MC_END(); \
3163 return VINF_SUCCESS; \
3164 \
3165 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3166 } \
3167 } do {} while (0)
3168
3169
3170
3171/** Opcode 0x0f 0x40. */
3172FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3173{
3174 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3175 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3176}
3177
3178
3179/** Opcode 0x0f 0x41. */
3180FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3181{
3182 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3183 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3184}
3185
3186
3187/** Opcode 0x0f 0x42. */
3188FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3189{
3190 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3191 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3192}
3193
3194
3195/** Opcode 0x0f 0x43. */
3196FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3197{
3198 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3199 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3200}
3201
3202
3203/** Opcode 0x0f 0x44. */
3204FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3205{
3206 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3207 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3208}
3209
3210
3211/** Opcode 0x0f 0x45. */
3212FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3213{
3214 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3215 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3216}
3217
3218
3219/** Opcode 0x0f 0x46. */
3220FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3221{
3222 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3223 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3224}
3225
3226
3227/** Opcode 0x0f 0x47. */
3228FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3229{
3230 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3231 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3232}
3233
3234
3235/** Opcode 0x0f 0x48. */
3236FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3237{
3238 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3239 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3240}
3241
3242
3243/** Opcode 0x0f 0x49. */
3244FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3245{
3246 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3247 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3248}
3249
3250
3251/** Opcode 0x0f 0x4a. */
3252FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3253{
3254 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3255 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3256}
3257
3258
3259/** Opcode 0x0f 0x4b. */
3260FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3261{
3262 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3263 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3264}
3265
3266
3267/** Opcode 0x0f 0x4c. */
3268FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3269{
3270 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3271 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3272}
3273
3274
3275/** Opcode 0x0f 0x4d. */
3276FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3277{
3278 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3279 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3280}
3281
3282
3283/** Opcode 0x0f 0x4e. */
3284FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3285{
3286 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3287 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3288}
3289
3290
3291/** Opcode 0x0f 0x4f. */
3292FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3293{
3294 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3295 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3296}
3297
3298#undef CMOV_X
3299
3300/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3301FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3302/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3303FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3304/* Opcode 0xf3 0x0f 0x50 - invalid */
3305/* Opcode 0xf2 0x0f 0x50 - invalid */
3306
3307/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3308FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3309/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3310FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3311/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3312FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3313/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3314FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3315
3316/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3317FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3318/* Opcode 0x66 0x0f 0x52 - invalid */
3319/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3320FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3321/* Opcode 0xf2 0x0f 0x52 - invalid */
3322
3323/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3324FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3325/* Opcode 0x66 0x0f 0x53 - invalid */
3326/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3327FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3328/* Opcode 0xf2 0x0f 0x53 - invalid */
3329
3330
3331/** Opcode 0x0f 0x54 - andps Vps, Wps */
3332FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3333{
3334 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3335 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3336}
3337
3338
3339/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3340FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3341{
3342 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3343 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3344}
3345
3346
3347/* Opcode 0xf3 0x0f 0x54 - invalid */
3348/* Opcode 0xf2 0x0f 0x54 - invalid */
3349
3350
3351/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3352FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3353{
3354 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3355 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3356}
3357
3358
3359/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3360FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3361{
3362 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3363 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3364}
3365
3366
3367/* Opcode 0xf3 0x0f 0x55 - invalid */
3368/* Opcode 0xf2 0x0f 0x55 - invalid */
3369
3370
3371/** Opcode 0x0f 0x56 - orps Vps, Wps */
3372FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3373{
3374 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3375 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3376}
3377
3378
3379/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3380FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3381{
3382 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3383 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3384}
3385
3386
3387/* Opcode 0xf3 0x0f 0x56 - invalid */
3388/* Opcode 0xf2 0x0f 0x56 - invalid */
3389
3390
3391/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3392FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3393{
3394 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3395 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3396}
3397
3398
3399/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3400FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3401{
3402 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3403 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3404}
3405
3406
3407/* Opcode 0xf3 0x0f 0x57 - invalid */
3408/* Opcode 0xf2 0x0f 0x57 - invalid */
3409
3410/** Opcode 0x0f 0x58 - addps Vps, Wps */
3411FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3412/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3413FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3414/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3415FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3416/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3417FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3418
3419/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3420FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3421/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3422FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3423/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3424FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3425/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3426FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3427
3428/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3429FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3430/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3431FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3432/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3433FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3434/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3435FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3436
3437/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3438FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3439/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3440FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3441/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3442FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3443/* Opcode 0xf2 0x0f 0x5b - invalid */
3444
3445/** Opcode 0x0f 0x5c - subps Vps, Wps */
3446FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3447/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3448FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3449/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3450FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3451/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3452FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3453
3454/** Opcode 0x0f 0x5d - minps Vps, Wps */
3455FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3456/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3457FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3458/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3459FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3460/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3461FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3462
3463/** Opcode 0x0f 0x5e - divps Vps, Wps */
3464FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3465/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3466FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3467/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3468FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3469/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3470FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3471
3472/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3473FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3474/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3475FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3476/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3477FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3478/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3479FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3480
3481/**
3482 * Common worker for MMX instructions on the forms:
3483 * pxxxx mm1, mm2/mem32
3484 *
3485 * The 2nd operand is the first half of a register, which in the memory case
3486 * means a 32-bit memory access.
3487 */
3488FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
3489{
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if (IEM_IS_MODRM_REG_MODE(bRm))
3492 {
3493 /*
3494 * Register, register.
3495 */
3496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3497 IEM_MC_BEGIN(2, 0);
3498 IEM_MC_ARG(uint64_t *, puDst, 0);
3499 IEM_MC_ARG(uint64_t const *, puSrc, 1);
3500 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3501 IEM_MC_PREPARE_FPU_USAGE();
3502 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3503 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
3504 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3505 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3506 IEM_MC_FPU_TO_MMX_MODE();
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 }
3510 else
3511 {
3512 /*
3513 * Register, memory.
3514 */
3515 IEM_MC_BEGIN(2, 2);
3516 IEM_MC_ARG(uint64_t *, puDst, 0);
3517 IEM_MC_LOCAL(uint64_t, uSrc);
3518 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
3519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3520
3521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3523 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3524 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3525
3526 IEM_MC_PREPARE_FPU_USAGE();
3527 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3528 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3529 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3530 IEM_MC_FPU_TO_MMX_MODE();
3531
3532 IEM_MC_ADVANCE_RIP();
3533 IEM_MC_END();
3534 }
3535 return VINF_SUCCESS;
3536}
3537
3538
3539/**
3540 * Common worker for SSE2 instructions on the forms:
3541 * pxxxx xmm1, xmm2/mem128
3542 *
3543 * The 2nd operand is the first half of a register, which in the memory case
3544 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
3545 *
3546 * Exceptions type 4.
3547 */
3548FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
3549{
3550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3551 if (IEM_IS_MODRM_REG_MODE(bRm))
3552 {
3553 /*
3554 * Register, register.
3555 */
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557 IEM_MC_BEGIN(2, 0);
3558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3559 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3562 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3563 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3564 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3565 IEM_MC_ADVANCE_RIP();
3566 IEM_MC_END();
3567 }
3568 else
3569 {
3570 /*
3571 * Register, memory.
3572 */
3573 IEM_MC_BEGIN(2, 2);
3574 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3575 IEM_MC_LOCAL(RTUINT128U, uSrc);
3576 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3578
3579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3582 /** @todo Most CPUs probably only read the low qword. We read everything to
3583 * make sure we apply segmentation and alignment checks correctly.
3584 * When we have time, it would be interesting to explore what real
3585 * CPUs actually does and whether it will do a TLB load for the high
3586 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
3587 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3588
3589 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3590 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3591 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3592
3593 IEM_MC_ADVANCE_RIP();
3594 IEM_MC_END();
3595 }
3596 return VINF_SUCCESS;
3597}
3598
3599
3600/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3601FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3602{
3603 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3604 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
3605}
3606
3607
3608/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3609FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3610{
3611 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3612 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
3613}
3614
3615
3616/* Opcode 0xf3 0x0f 0x60 - invalid */
3617
3618
3619/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3620FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3621{
3622 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3623 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3624 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
3625}
3626
3627
3628/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3629FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3630{
3631 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3632 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
3633}
3634
3635
3636/* Opcode 0xf3 0x0f 0x61 - invalid */
3637
3638
3639/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3640FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3641{
3642 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3643 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
3644}
3645
3646
3647/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3648FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3649{
3650 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3651 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
3652}
3653
3654
3655/* Opcode 0xf3 0x0f 0x62 - invalid */
3656
3657
3658
3659/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3660FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
3661{
3662 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3663 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
3664}
3665
3666
3667/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3668FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
3669{
3670 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3671 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
3672}
3673
3674
3675/* Opcode 0xf3 0x0f 0x63 - invalid */
3676
3677
3678/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3679FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
3680{
3681 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3682 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
3683}
3684
3685
3686/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3687FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
3688{
3689 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3690 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
3691}
3692
3693
3694/* Opcode 0xf3 0x0f 0x64 - invalid */
3695
3696
3697/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3698FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
3699{
3700 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3701 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
3702}
3703
3704
3705/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3706FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
3707{
3708 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3709 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
3710}
3711
3712
3713/* Opcode 0xf3 0x0f 0x65 - invalid */
3714
3715
3716/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3717FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
3718{
3719 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3720 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
3721}
3722
3723
3724/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3725FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
3726{
3727 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3728 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
3729}
3730
3731
3732/* Opcode 0xf3 0x0f 0x66 - invalid */
3733
3734
3735/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3736FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
3737{
3738 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3739 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
3740}
3741
3742
3743/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
3744FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
3745{
3746 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3747 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
3748}
3749
3750
3751/* Opcode 0xf3 0x0f 0x67 - invalid */
3752
3753
3754/**
3755 * Common worker for MMX instructions on the form:
3756 * pxxxx mm1, mm2/mem64
3757 *
3758 * The 2nd operand is the second half of a register, which in the memory case
3759 * means a 64-bit memory access for MMX.
3760 */
3761FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
3762{
3763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3764 if (IEM_IS_MODRM_REG_MODE(bRm))
3765 {
3766 /*
3767 * Register, register.
3768 */
3769 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3770 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3772 IEM_MC_BEGIN(2, 0);
3773 IEM_MC_ARG(uint64_t *, puDst, 0);
3774 IEM_MC_ARG(uint64_t const *, puSrc, 1);
3775 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3776 IEM_MC_PREPARE_FPU_USAGE();
3777 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3778 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
3779 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3780 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3781 IEM_MC_FPU_TO_MMX_MODE();
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 /*
3788 * Register, memory.
3789 */
3790 IEM_MC_BEGIN(2, 2);
3791 IEM_MC_ARG(uint64_t *, puDst, 0);
3792 IEM_MC_LOCAL(uint64_t, uSrc);
3793 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
3794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3795
3796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3798 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3799 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
3800
3801 IEM_MC_PREPARE_FPU_USAGE();
3802 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3803 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3804 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3805 IEM_MC_FPU_TO_MMX_MODE();
3806
3807 IEM_MC_ADVANCE_RIP();
3808 IEM_MC_END();
3809 }
3810 return VINF_SUCCESS;
3811}
3812
3813
3814/**
3815 * Common worker for SSE2 instructions on the form:
3816 * pxxxx xmm1, xmm2/mem128
3817 *
3818 * The 2nd operand is the second half of a register, which for SSE a 128-bit
3819 * aligned access where it may read the full 128 bits or only the upper 64 bits.
3820 *
3821 * Exceptions type 4.
3822 */
3823FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
3824{
3825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3826 if (IEM_IS_MODRM_REG_MODE(bRm))
3827 {
3828 /*
3829 * Register, register.
3830 */
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832 IEM_MC_BEGIN(2, 0);
3833 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3834 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3835 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3836 IEM_MC_PREPARE_SSE_USAGE();
3837 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3838 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3839 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 }
3843 else
3844 {
3845 /*
3846 * Register, memory.
3847 */
3848 IEM_MC_BEGIN(2, 2);
3849 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3850 IEM_MC_LOCAL(RTUINT128U, uSrc);
3851 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3853
3854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3857 /** @todo Most CPUs probably only read the high qword. We read everything to
3858 * make sure we apply segmentation and alignment checks correctly.
3859 * When we have time, it would be interesting to explore what real
3860 * CPUs actually does and whether it will do a TLB load for the lower
3861 * part or skip any associated \#PF. */
3862 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3863
3864 IEM_MC_PREPARE_SSE_USAGE();
3865 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3866 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3867
3868 IEM_MC_ADVANCE_RIP();
3869 IEM_MC_END();
3870 }
3871 return VINF_SUCCESS;
3872}
3873
3874
3875/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
3876 * @note Intel and AMD both uses Qd for the second parameter, however they
3877 * both list it as a mmX/mem64 operand and intel describes it as being
3878 * loaded as a qword, so it should be Qq, shouldn't it? */
3879FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
3880{
3881 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3882 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
3883}
3884
3885
3886/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3887FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3888{
3889 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3890 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
3891}
3892
3893
3894/* Opcode 0xf3 0x0f 0x68 - invalid */
3895
3896
3897/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
3898 * @note Intel and AMD both uses Qd for the second parameter, however they
3899 * both list it as a mmX/mem64 operand and intel describes it as being
3900 * loaded as a qword, so it should be Qq, shouldn't it? */
3901FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
3902{
3903 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3904 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
3905}
3906
3907
3908/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3909FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3910{
3911 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3912 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
3913
3914}
3915
3916
3917/* Opcode 0xf3 0x0f 0x69 - invalid */
3918
3919
3920/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
3921 * @note Intel and AMD both uses Qd for the second parameter, however they
3922 * both list it as a mmX/mem64 operand and intel describes it as being
3923 * loaded as a qword, so it should be Qq, shouldn't it? */
3924FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
3925{
3926 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3927 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
3928}
3929
3930
3931/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
3932FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
3933{
3934 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3935 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
3936}
3937
3938
3939/* Opcode 0xf3 0x0f 0x6a - invalid */
3940
3941
3942/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3943FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
3944{
3945 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3946 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
3947}
3948
3949
3950/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3951FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
3952{
3953 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3954 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
3955}
3956
3957
3958/* Opcode 0xf3 0x0f 0x6b - invalid */
3959
3960
3961/* Opcode 0x0f 0x6c - invalid */
3962
3963
3964/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3965FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3966{
3967 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3968 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
3969}
3970
3971
3972/* Opcode 0xf3 0x0f 0x6c - invalid */
3973/* Opcode 0xf2 0x0f 0x6c - invalid */
3974
3975
3976/* Opcode 0x0f 0x6d - invalid */
3977
3978
3979/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
3980FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
3981{
3982 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3983 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
3984}
3985
3986
3987/* Opcode 0xf3 0x0f 0x6d - invalid */
3988
3989
3990FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3991{
3992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3993 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3994 {
3995 /**
3996 * @opcode 0x6e
3997 * @opcodesub rex.w=1
3998 * @oppfx none
3999 * @opcpuid mmx
4000 * @opgroup og_mmx_datamove
4001 * @opxcpttype 5
4002 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4003 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4004 */
4005 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4006 if (IEM_IS_MODRM_REG_MODE(bRm))
4007 {
4008 /* MMX, greg64 */
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4010 IEM_MC_BEGIN(0, 1);
4011 IEM_MC_LOCAL(uint64_t, u64Tmp);
4012
4013 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4014 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4015
4016 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4017 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4018 IEM_MC_FPU_TO_MMX_MODE();
4019
4020 IEM_MC_ADVANCE_RIP();
4021 IEM_MC_END();
4022 }
4023 else
4024 {
4025 /* MMX, [mem64] */
4026 IEM_MC_BEGIN(0, 2);
4027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4028 IEM_MC_LOCAL(uint64_t, u64Tmp);
4029
4030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4034
4035 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4036 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4037 IEM_MC_FPU_TO_MMX_MODE();
4038
4039 IEM_MC_ADVANCE_RIP();
4040 IEM_MC_END();
4041 }
4042 }
4043 else
4044 {
4045 /**
4046 * @opdone
4047 * @opcode 0x6e
4048 * @opcodesub rex.w=0
4049 * @oppfx none
4050 * @opcpuid mmx
4051 * @opgroup og_mmx_datamove
4052 * @opxcpttype 5
4053 * @opfunction iemOp_movd_q_Pd_Ey
4054 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4055 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4056 */
4057 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4058 if (IEM_IS_MODRM_REG_MODE(bRm))
4059 {
4060 /* MMX, greg */
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4062 IEM_MC_BEGIN(0, 1);
4063 IEM_MC_LOCAL(uint64_t, u64Tmp);
4064
4065 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4067
4068 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4069 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4070 IEM_MC_FPU_TO_MMX_MODE();
4071
4072 IEM_MC_ADVANCE_RIP();
4073 IEM_MC_END();
4074 }
4075 else
4076 {
4077 /* MMX, [mem] */
4078 IEM_MC_BEGIN(0, 2);
4079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4080 IEM_MC_LOCAL(uint32_t, u32Tmp);
4081
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4085 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4086
4087 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4088 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4089 IEM_MC_FPU_TO_MMX_MODE();
4090
4091 IEM_MC_ADVANCE_RIP();
4092 IEM_MC_END();
4093 }
4094 }
4095 return VINF_SUCCESS;
4096}
4097
4098FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4099{
4100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4101 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4102 {
4103 /**
4104 * @opcode 0x6e
4105 * @opcodesub rex.w=1
4106 * @oppfx 0x66
4107 * @opcpuid sse2
4108 * @opgroup og_sse2_simdint_datamove
4109 * @opxcpttype 5
4110 * @optest 64-bit / op1=1 op2=2 -> op1=2
4111 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4112 */
4113 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4114 if (IEM_IS_MODRM_REG_MODE(bRm))
4115 {
4116 /* XMM, greg64 */
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118 IEM_MC_BEGIN(0, 1);
4119 IEM_MC_LOCAL(uint64_t, u64Tmp);
4120
4121 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4122 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4123
4124 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4125 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4126
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 }
4130 else
4131 {
4132 /* XMM, [mem64] */
4133 IEM_MC_BEGIN(0, 2);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135 IEM_MC_LOCAL(uint64_t, u64Tmp);
4136
4137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4139 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4141
4142 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4143 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4144
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 }
4149 else
4150 {
4151 /**
4152 * @opdone
4153 * @opcode 0x6e
4154 * @opcodesub rex.w=0
4155 * @oppfx 0x66
4156 * @opcpuid sse2
4157 * @opgroup og_sse2_simdint_datamove
4158 * @opxcpttype 5
4159 * @opfunction iemOp_movd_q_Vy_Ey
4160 * @optest op1=1 op2=2 -> op1=2
4161 * @optest op1=0 op2=-42 -> op1=-42
4162 */
4163 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4164 if (IEM_IS_MODRM_REG_MODE(bRm))
4165 {
4166 /* XMM, greg32 */
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4168 IEM_MC_BEGIN(0, 1);
4169 IEM_MC_LOCAL(uint32_t, u32Tmp);
4170
4171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4173
4174 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4175 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4176
4177 IEM_MC_ADVANCE_RIP();
4178 IEM_MC_END();
4179 }
4180 else
4181 {
4182 /* XMM, [mem32] */
4183 IEM_MC_BEGIN(0, 2);
4184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4185 IEM_MC_LOCAL(uint32_t, u32Tmp);
4186
4187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4191
4192 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4194
4195 IEM_MC_ADVANCE_RIP();
4196 IEM_MC_END();
4197 }
4198 }
4199 return VINF_SUCCESS;
4200}
4201
4202/* Opcode 0xf3 0x0f 0x6e - invalid */
4203
4204
4205/**
4206 * @opcode 0x6f
4207 * @oppfx none
4208 * @opcpuid mmx
4209 * @opgroup og_mmx_datamove
4210 * @opxcpttype 5
4211 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4212 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4213 */
4214FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4215{
4216 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 if (IEM_IS_MODRM_REG_MODE(bRm))
4219 {
4220 /*
4221 * Register, register.
4222 */
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_BEGIN(0, 1);
4225 IEM_MC_LOCAL(uint64_t, u64Tmp);
4226
4227 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4228 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4229
4230 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4231 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4232 IEM_MC_FPU_TO_MMX_MODE();
4233
4234 IEM_MC_ADVANCE_RIP();
4235 IEM_MC_END();
4236 }
4237 else
4238 {
4239 /*
4240 * Register, memory.
4241 */
4242 IEM_MC_BEGIN(0, 2);
4243 IEM_MC_LOCAL(uint64_t, u64Tmp);
4244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4245
4246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4249 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4250
4251 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4252 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4253 IEM_MC_FPU_TO_MMX_MODE();
4254
4255 IEM_MC_ADVANCE_RIP();
4256 IEM_MC_END();
4257 }
4258 return VINF_SUCCESS;
4259}
4260
4261/**
4262 * @opcode 0x6f
4263 * @oppfx 0x66
4264 * @opcpuid sse2
4265 * @opgroup og_sse2_simdint_datamove
4266 * @opxcpttype 1
4267 * @optest op1=1 op2=2 -> op1=2
4268 * @optest op1=0 op2=-42 -> op1=-42
4269 */
4270FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4271{
4272 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4274 if (IEM_IS_MODRM_REG_MODE(bRm))
4275 {
4276 /*
4277 * Register, register.
4278 */
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4280 IEM_MC_BEGIN(0, 0);
4281
4282 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4284
4285 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4286 IEM_GET_MODRM_RM(pVCpu, bRm));
4287 IEM_MC_ADVANCE_RIP();
4288 IEM_MC_END();
4289 }
4290 else
4291 {
4292 /*
4293 * Register, memory.
4294 */
4295 IEM_MC_BEGIN(0, 2);
4296 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4298
4299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4302 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4303
4304 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4305 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4306
4307 IEM_MC_ADVANCE_RIP();
4308 IEM_MC_END();
4309 }
4310 return VINF_SUCCESS;
4311}
4312
4313/**
4314 * @opcode 0x6f
4315 * @oppfx 0xf3
4316 * @opcpuid sse2
4317 * @opgroup og_sse2_simdint_datamove
4318 * @opxcpttype 4UA
4319 * @optest op1=1 op2=2 -> op1=2
4320 * @optest op1=0 op2=-42 -> op1=-42
4321 */
4322FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4323{
4324 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4326 if (IEM_IS_MODRM_REG_MODE(bRm))
4327 {
4328 /*
4329 * Register, register.
4330 */
4331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4332 IEM_MC_BEGIN(0, 0);
4333 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4335 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4336 IEM_GET_MODRM_RM(pVCpu, bRm));
4337 IEM_MC_ADVANCE_RIP();
4338 IEM_MC_END();
4339 }
4340 else
4341 {
4342 /*
4343 * Register, memory.
4344 */
4345 IEM_MC_BEGIN(0, 2);
4346 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4348
4349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4351 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4352 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4353 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4354 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4355
4356 IEM_MC_ADVANCE_RIP();
4357 IEM_MC_END();
4358 }
4359 return VINF_SUCCESS;
4360}
4361
4362
4363/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4364FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4365{
4366 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4368 if (IEM_IS_MODRM_REG_MODE(bRm))
4369 {
4370 /*
4371 * Register, register.
4372 */
4373 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4375
4376 IEM_MC_BEGIN(3, 0);
4377 IEM_MC_ARG(uint64_t *, pDst, 0);
4378 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4379 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4380 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4381 IEM_MC_PREPARE_FPU_USAGE();
4382 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4383 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4384 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4385 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4386 IEM_MC_FPU_TO_MMX_MODE();
4387 IEM_MC_ADVANCE_RIP();
4388 IEM_MC_END();
4389 }
4390 else
4391 {
4392 /*
4393 * Register, memory.
4394 */
4395 IEM_MC_BEGIN(3, 2);
4396 IEM_MC_ARG(uint64_t *, pDst, 0);
4397 IEM_MC_LOCAL(uint64_t, uSrc);
4398 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4400
4401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4402 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4403 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4405 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4406
4407 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4408 IEM_MC_PREPARE_FPU_USAGE();
4409 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4410 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4411 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4412 IEM_MC_FPU_TO_MMX_MODE();
4413
4414 IEM_MC_ADVANCE_RIP();
4415 IEM_MC_END();
4416 }
4417 return VINF_SUCCESS;
4418}
4419
4420
4421/**
4422 * Common worker for SSE2 instructions on the forms:
4423 * pshufd xmm1, xmm2/mem128, imm8
4424 * pshufhw xmm1, xmm2/mem128, imm8
4425 * pshuflw xmm1, xmm2/mem128, imm8
4426 *
4427 * Proper alignment of the 128-bit operand is enforced.
4428 * Exceptions type 4. SSE2 cpuid checks.
4429 */
4430FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4431{
4432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4433 if (IEM_IS_MODRM_REG_MODE(bRm))
4434 {
4435 /*
4436 * Register, register.
4437 */
4438 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440
4441 IEM_MC_BEGIN(3, 0);
4442 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4443 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4444 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4445 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4446 IEM_MC_PREPARE_SSE_USAGE();
4447 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4448 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4449 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4450 IEM_MC_ADVANCE_RIP();
4451 IEM_MC_END();
4452 }
4453 else
4454 {
4455 /*
4456 * Register, memory.
4457 */
4458 IEM_MC_BEGIN(3, 2);
4459 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4460 IEM_MC_LOCAL(RTUINT128U, uSrc);
4461 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4463
4464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4465 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4466 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4468 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4469
4470 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4471 IEM_MC_PREPARE_SSE_USAGE();
4472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4473 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4474
4475 IEM_MC_ADVANCE_RIP();
4476 IEM_MC_END();
4477 }
4478 return VINF_SUCCESS;
4479}
4480
4481
4482/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4483FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4484{
4485 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4486 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4487}
4488
4489
4490/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4491FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4492{
4493 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4494 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4495}
4496
4497
4498/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4499FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4500{
4501 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4502 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4503}
4504
4505
4506/**
4507 * Common worker for MMX instructions of the form:
4508 * psrlw mm, imm8
4509 * psraw mm, imm8
4510 * psllw mm, imm8
4511 * psrld mm, imm8
4512 * psrad mm, imm8
4513 * pslld mm, imm8
4514 * psrlq mm, imm8
4515 * psllq mm, imm8
4516 *
4517 */
4518FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4519{
4520 if (IEM_IS_MODRM_REG_MODE(bRm))
4521 {
4522 /*
4523 * Register, immediate.
4524 */
4525 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4527
4528 IEM_MC_BEGIN(2, 0);
4529 IEM_MC_ARG(uint64_t *, pDst, 0);
4530 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4531 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4532 IEM_MC_PREPARE_FPU_USAGE();
4533 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4534 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4535 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4536 IEM_MC_FPU_TO_MMX_MODE();
4537 IEM_MC_ADVANCE_RIP();
4538 IEM_MC_END();
4539 }
4540 else
4541 {
4542 /*
4543 * Register, memory not supported.
4544 */
4545 /// @todo Caller already enforced register mode?!
4546 }
4547 return VINF_SUCCESS;
4548}
4549
4550
4551/**
4552 * Common worker for SSE2 instructions of the form:
4553 * psrlw xmm, imm8
4554 * psraw xmm, imm8
4555 * psllw xmm, imm8
4556 * psrld xmm, imm8
4557 * psrad xmm, imm8
4558 * pslld xmm, imm8
4559 * psrlq xmm, imm8
4560 * psllq xmm, imm8
4561 *
4562 */
4563FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4564{
4565 if (IEM_IS_MODRM_REG_MODE(bRm))
4566 {
4567 /*
4568 * Register, immediate.
4569 */
4570 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572
4573 IEM_MC_BEGIN(2, 0);
4574 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4575 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4576 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4577 IEM_MC_PREPARE_SSE_USAGE();
4578 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4579 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4580 IEM_MC_ADVANCE_RIP();
4581 IEM_MC_END();
4582 }
4583 else
4584 {
4585 /*
4586 * Register, memory.
4587 */
4588 /// @todo Caller already enforced register mode?!
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4595FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4596{
4597// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4598 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4599}
4600
4601
4602/** Opcode 0x66 0x0f 0x71 11/2. */
4603FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4604{
4605// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4606 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4607}
4608
4609
4610/** Opcode 0x0f 0x71 11/4. */
4611FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4612{
4613// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4614 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4615}
4616
4617
4618/** Opcode 0x66 0x0f 0x71 11/4. */
4619FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4620{
4621// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4622 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4623}
4624
4625
4626/** Opcode 0x0f 0x71 11/6. */
4627FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4628{
4629// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4630 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4631}
4632
4633
4634/** Opcode 0x66 0x0f 0x71 11/6. */
4635FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4636{
4637// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4638 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4639}
4640
4641
4642/**
4643 * Group 12 jump table for register variant.
4644 */
4645IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4646{
4647 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4648 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4649 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4650 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4651 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4652 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4653 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4654 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4655};
4656AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4657
4658
4659/** Opcode 0x0f 0x71. */
4660FNIEMOP_DEF(iemOp_Grp12)
4661{
4662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4663 if (IEM_IS_MODRM_REG_MODE(bRm))
4664 /* register, register */
4665 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4666 + pVCpu->iem.s.idxPrefix], bRm);
4667 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4668}
4669
4670
4671/** Opcode 0x0f 0x72 11/2. */
4672FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
4673{
4674// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4675 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
4676}
4677
4678
4679/** Opcode 0x66 0x0f 0x72 11/2. */
4680FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
4681{
4682// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4683 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
4684}
4685
4686
4687/** Opcode 0x0f 0x72 11/4. */
4688FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
4689{
4690// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4691 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
4692}
4693
4694
4695/** Opcode 0x66 0x0f 0x72 11/4. */
4696FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
4697{
4698// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4699 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
4700}
4701
4702
4703/** Opcode 0x0f 0x72 11/6. */
4704FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
4705{
4706// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4707 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
4708}
4709
4710/** Opcode 0x66 0x0f 0x72 11/6. */
4711FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
4712{
4713// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4714 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
4715}
4716
4717
4718/**
4719 * Group 13 jump table for register variant.
4720 */
4721IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4722{
4723 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4724 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4725 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4726 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4727 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4728 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4729 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4730 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4731};
4732AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4733
4734/** Opcode 0x0f 0x72. */
4735FNIEMOP_DEF(iemOp_Grp13)
4736{
4737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4738 if (IEM_IS_MODRM_REG_MODE(bRm))
4739 /* register, register */
4740 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4741 + pVCpu->iem.s.idxPrefix], bRm);
4742 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4743}
4744
4745
4746/** Opcode 0x0f 0x73 11/2. */
4747FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
4748{
4749// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4750 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
4751}
4752
4753
4754/** Opcode 0x66 0x0f 0x73 11/2. */
4755FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
4756{
4757// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4758 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
4759}
4760
4761
4762/** Opcode 0x66 0x0f 0x73 11/3. */
4763FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
4764{
4765// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4766 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
4767}
4768
4769
4770/** Opcode 0x0f 0x73 11/6. */
4771FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
4772{
4773// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4774 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
4775}
4776
4777
4778/** Opcode 0x66 0x0f 0x73 11/6. */
4779FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
4780{
4781// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4782 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
4783}
4784
4785
4786/** Opcode 0x66 0x0f 0x73 11/7. */
4787FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
4788{
4789// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4790 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
4791}
4792
4793/**
4794 * Group 14 jump table for register variant.
4795 */
4796IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4797{
4798 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4799 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4800 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4801 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4802 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4803 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4804 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4805 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4806};
4807AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4808
4809
4810/** Opcode 0x0f 0x73. */
4811FNIEMOP_DEF(iemOp_Grp14)
4812{
4813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4814 if (IEM_IS_MODRM_REG_MODE(bRm))
4815 /* register, register */
4816 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4817 + pVCpu->iem.s.idxPrefix], bRm);
4818 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4819}
4820
4821
4822/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4823FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4824{
4825 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4826 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
4827}
4828
4829
4830/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4831FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4832{
4833 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4834 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
4835}
4836
4837
4838/* Opcode 0xf3 0x0f 0x74 - invalid */
4839/* Opcode 0xf2 0x0f 0x74 - invalid */
4840
4841
4842/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4843FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4844{
4845 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4846 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
4847}
4848
4849
4850/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4851FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4852{
4853 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4854 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
4855}
4856
4857
4858/* Opcode 0xf3 0x0f 0x75 - invalid */
4859/* Opcode 0xf2 0x0f 0x75 - invalid */
4860
4861
4862/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4863FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4864{
4865 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4866 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
4867}
4868
4869
4870/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4871FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4872{
4873 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4874 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
4875}
4876
4877
4878/* Opcode 0xf3 0x0f 0x76 - invalid */
4879/* Opcode 0xf2 0x0f 0x76 - invalid */
4880
4881
4882/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4883FNIEMOP_DEF(iemOp_emms)
4884{
4885 IEMOP_MNEMONIC(emms, "emms");
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4887
4888 IEM_MC_BEGIN(0,0);
4889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4891 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4892 IEM_MC_FPU_FROM_MMX_MODE();
4893 IEM_MC_ADVANCE_RIP();
4894 IEM_MC_END();
4895 return VINF_SUCCESS;
4896}
4897
4898/* Opcode 0x66 0x0f 0x77 - invalid */
4899/* Opcode 0xf3 0x0f 0x77 - invalid */
4900/* Opcode 0xf2 0x0f 0x77 - invalid */
4901
4902/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4903#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4904FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4905{
4906 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4907 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4908 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4909 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4910
4911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4912 if (IEM_IS_MODRM_REG_MODE(bRm))
4913 {
4914 /*
4915 * Register, register.
4916 */
4917 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4918 if (enmEffOpSize == IEMMODE_64BIT)
4919 {
4920 IEM_MC_BEGIN(2, 0);
4921 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4922 IEM_MC_ARG(uint64_t, u64Enc, 1);
4923 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4924 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
4925 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4926 IEM_MC_END();
4927 }
4928 else
4929 {
4930 IEM_MC_BEGIN(2, 0);
4931 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4932 IEM_MC_ARG(uint32_t, u32Enc, 1);
4933 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4934 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
4935 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4936 IEM_MC_END();
4937 }
4938 }
4939 else
4940 {
4941 /*
4942 * Memory, register.
4943 */
4944 if (enmEffOpSize == IEMMODE_64BIT)
4945 {
4946 IEM_MC_BEGIN(3, 0);
4947 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4948 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4949 IEM_MC_ARG(uint64_t, u64Enc, 2);
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4951 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4952 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4953 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4954 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
4955 IEM_MC_END();
4956 }
4957 else
4958 {
4959 IEM_MC_BEGIN(3, 0);
4960 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4961 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4962 IEM_MC_ARG(uint32_t, u32Enc, 2);
4963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4964 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4965 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4966 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4967 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
4968 IEM_MC_END();
4969 }
4970 }
4971 return VINF_SUCCESS;
4972}
4973#else
4974FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4975#endif
4976
4977/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4978FNIEMOP_STUB(iemOp_AmdGrp17);
4979/* Opcode 0xf3 0x0f 0x78 - invalid */
4980/* Opcode 0xf2 0x0f 0x78 - invalid */
4981
4982/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4983#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4984FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4985{
4986 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4987 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4988 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4989 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4990
4991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4992 if (IEM_IS_MODRM_REG_MODE(bRm))
4993 {
4994 /*
4995 * Register, register.
4996 */
4997 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4998 if (enmEffOpSize == IEMMODE_64BIT)
4999 {
5000 IEM_MC_BEGIN(2, 0);
5001 IEM_MC_ARG(uint64_t, u64Val, 0);
5002 IEM_MC_ARG(uint64_t, u64Enc, 1);
5003 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5004 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5005 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5006 IEM_MC_END();
5007 }
5008 else
5009 {
5010 IEM_MC_BEGIN(2, 0);
5011 IEM_MC_ARG(uint32_t, u32Val, 0);
5012 IEM_MC_ARG(uint32_t, u32Enc, 1);
5013 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5014 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5015 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5016 IEM_MC_END();
5017 }
5018 }
5019 else
5020 {
5021 /*
5022 * Register, memory.
5023 */
5024 if (enmEffOpSize == IEMMODE_64BIT)
5025 {
5026 IEM_MC_BEGIN(3, 0);
5027 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5028 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5029 IEM_MC_ARG(uint64_t, u64Enc, 2);
5030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5031 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5032 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5033 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5034 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5035 IEM_MC_END();
5036 }
5037 else
5038 {
5039 IEM_MC_BEGIN(3, 0);
5040 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5041 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5042 IEM_MC_ARG(uint32_t, u32Enc, 2);
5043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5044 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5045 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5046 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5047 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5048 IEM_MC_END();
5049 }
5050 }
5051 return VINF_SUCCESS;
5052}
5053#else
5054FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5055#endif
5056/* Opcode 0x66 0x0f 0x79 - invalid */
5057/* Opcode 0xf3 0x0f 0x79 - invalid */
5058/* Opcode 0xf2 0x0f 0x79 - invalid */
5059
5060/* Opcode 0x0f 0x7a - invalid */
5061/* Opcode 0x66 0x0f 0x7a - invalid */
5062/* Opcode 0xf3 0x0f 0x7a - invalid */
5063/* Opcode 0xf2 0x0f 0x7a - invalid */
5064
5065/* Opcode 0x0f 0x7b - invalid */
5066/* Opcode 0x66 0x0f 0x7b - invalid */
5067/* Opcode 0xf3 0x0f 0x7b - invalid */
5068/* Opcode 0xf2 0x0f 0x7b - invalid */
5069
5070/* Opcode 0x0f 0x7c - invalid */
5071/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5072FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5073/* Opcode 0xf3 0x0f 0x7c - invalid */
5074/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5075FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5076
5077/* Opcode 0x0f 0x7d - invalid */
5078/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5079FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5080/* Opcode 0xf3 0x0f 0x7d - invalid */
5081/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5082FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5083
5084
5085/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5086FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5087{
5088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5089 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5090 {
5091 /**
5092 * @opcode 0x7e
5093 * @opcodesub rex.w=1
5094 * @oppfx none
5095 * @opcpuid mmx
5096 * @opgroup og_mmx_datamove
5097 * @opxcpttype 5
5098 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5099 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5100 */
5101 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5102 if (IEM_IS_MODRM_REG_MODE(bRm))
5103 {
5104 /* greg64, MMX */
5105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5106 IEM_MC_BEGIN(0, 1);
5107 IEM_MC_LOCAL(uint64_t, u64Tmp);
5108
5109 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5110 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5111
5112 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5113 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5114 IEM_MC_FPU_TO_MMX_MODE();
5115
5116 IEM_MC_ADVANCE_RIP();
5117 IEM_MC_END();
5118 }
5119 else
5120 {
5121 /* [mem64], MMX */
5122 IEM_MC_BEGIN(0, 2);
5123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5124 IEM_MC_LOCAL(uint64_t, u64Tmp);
5125
5126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5129 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5130
5131 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5132 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5133 IEM_MC_FPU_TO_MMX_MODE();
5134
5135 IEM_MC_ADVANCE_RIP();
5136 IEM_MC_END();
5137 }
5138 }
5139 else
5140 {
5141 /**
5142 * @opdone
5143 * @opcode 0x7e
5144 * @opcodesub rex.w=0
5145 * @oppfx none
5146 * @opcpuid mmx
5147 * @opgroup og_mmx_datamove
5148 * @opxcpttype 5
5149 * @opfunction iemOp_movd_q_Pd_Ey
5150 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5151 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5152 */
5153 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5154 if (IEM_IS_MODRM_REG_MODE(bRm))
5155 {
5156 /* greg32, MMX */
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_BEGIN(0, 1);
5159 IEM_MC_LOCAL(uint32_t, u32Tmp);
5160
5161 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5162 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5163
5164 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5165 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5166 IEM_MC_FPU_TO_MMX_MODE();
5167
5168 IEM_MC_ADVANCE_RIP();
5169 IEM_MC_END();
5170 }
5171 else
5172 {
5173 /* [mem32], MMX */
5174 IEM_MC_BEGIN(0, 2);
5175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5176 IEM_MC_LOCAL(uint32_t, u32Tmp);
5177
5178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5181 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5182
5183 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5184 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5185 IEM_MC_FPU_TO_MMX_MODE();
5186
5187 IEM_MC_ADVANCE_RIP();
5188 IEM_MC_END();
5189 }
5190 }
5191 return VINF_SUCCESS;
5192
5193}
5194
5195
5196FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5197{
5198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5199 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5200 {
5201 /**
5202 * @opcode 0x7e
5203 * @opcodesub rex.w=1
5204 * @oppfx 0x66
5205 * @opcpuid sse2
5206 * @opgroup og_sse2_simdint_datamove
5207 * @opxcpttype 5
5208 * @optest 64-bit / op1=1 op2=2 -> op1=2
5209 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5210 */
5211 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5212 if (IEM_IS_MODRM_REG_MODE(bRm))
5213 {
5214 /* greg64, XMM */
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216 IEM_MC_BEGIN(0, 1);
5217 IEM_MC_LOCAL(uint64_t, u64Tmp);
5218
5219 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5220 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5221
5222 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5223 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5224
5225 IEM_MC_ADVANCE_RIP();
5226 IEM_MC_END();
5227 }
5228 else
5229 {
5230 /* [mem64], XMM */
5231 IEM_MC_BEGIN(0, 2);
5232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5233 IEM_MC_LOCAL(uint64_t, u64Tmp);
5234
5235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5239
5240 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5242
5243 IEM_MC_ADVANCE_RIP();
5244 IEM_MC_END();
5245 }
5246 }
5247 else
5248 {
5249 /**
5250 * @opdone
5251 * @opcode 0x7e
5252 * @opcodesub rex.w=0
5253 * @oppfx 0x66
5254 * @opcpuid sse2
5255 * @opgroup og_sse2_simdint_datamove
5256 * @opxcpttype 5
5257 * @opfunction iemOp_movd_q_Vy_Ey
5258 * @optest op1=1 op2=2 -> op1=2
5259 * @optest op1=0 op2=-42 -> op1=-42
5260 */
5261 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5262 if (IEM_IS_MODRM_REG_MODE(bRm))
5263 {
5264 /* greg32, XMM */
5265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5266 IEM_MC_BEGIN(0, 1);
5267 IEM_MC_LOCAL(uint32_t, u32Tmp);
5268
5269 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5270 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5271
5272 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5273 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5274
5275 IEM_MC_ADVANCE_RIP();
5276 IEM_MC_END();
5277 }
5278 else
5279 {
5280 /* [mem32], XMM */
5281 IEM_MC_BEGIN(0, 2);
5282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5283 IEM_MC_LOCAL(uint32_t, u32Tmp);
5284
5285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5288 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5289
5290 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5291 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5292
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 }
5296 }
5297 return VINF_SUCCESS;
5298
5299}
5300
5301/**
5302 * @opcode 0x7e
5303 * @oppfx 0xf3
5304 * @opcpuid sse2
5305 * @opgroup og_sse2_pcksclr_datamove
5306 * @opxcpttype none
5307 * @optest op1=1 op2=2 -> op1=2
5308 * @optest op1=0 op2=-42 -> op1=-42
5309 */
5310FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5311{
5312 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5314 if (IEM_IS_MODRM_REG_MODE(bRm))
5315 {
5316 /*
5317 * Register, register.
5318 */
5319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5320 IEM_MC_BEGIN(0, 2);
5321 IEM_MC_LOCAL(uint64_t, uSrc);
5322
5323 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5324 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5325
5326 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5327 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5328
5329 IEM_MC_ADVANCE_RIP();
5330 IEM_MC_END();
5331 }
5332 else
5333 {
5334 /*
5335 * Memory, register.
5336 */
5337 IEM_MC_BEGIN(0, 2);
5338 IEM_MC_LOCAL(uint64_t, uSrc);
5339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5340
5341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5345
5346 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5347 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5348
5349 IEM_MC_ADVANCE_RIP();
5350 IEM_MC_END();
5351 }
5352 return VINF_SUCCESS;
5353}
5354
5355/* Opcode 0xf2 0x0f 0x7e - invalid */
5356
5357
5358/** Opcode 0x0f 0x7f - movq Qq, Pq */
5359FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5360{
5361 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363 if (IEM_IS_MODRM_REG_MODE(bRm))
5364 {
5365 /*
5366 * Register, register.
5367 */
5368 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5369 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 IEM_MC_BEGIN(0, 1);
5372 IEM_MC_LOCAL(uint64_t, u64Tmp);
5373 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5374 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5375 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5376 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5377 IEM_MC_FPU_TO_MMX_MODE();
5378 IEM_MC_ADVANCE_RIP();
5379 IEM_MC_END();
5380 }
5381 else
5382 {
5383 /*
5384 * Memory, Register.
5385 */
5386 IEM_MC_BEGIN(0, 2);
5387 IEM_MC_LOCAL(uint64_t, u64Tmp);
5388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5389
5390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5392 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5393 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5394
5395 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5396 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5397 IEM_MC_FPU_TO_MMX_MODE();
5398
5399 IEM_MC_ADVANCE_RIP();
5400 IEM_MC_END();
5401 }
5402 return VINF_SUCCESS;
5403}
5404
5405/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5406FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5407{
5408 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5410 if (IEM_IS_MODRM_REG_MODE(bRm))
5411 {
5412 /*
5413 * Register, register.
5414 */
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_BEGIN(0, 0);
5417 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5418 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5419 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5420 IEM_GET_MODRM_REG(pVCpu, bRm));
5421 IEM_MC_ADVANCE_RIP();
5422 IEM_MC_END();
5423 }
5424 else
5425 {
5426 /*
5427 * Register, memory.
5428 */
5429 IEM_MC_BEGIN(0, 2);
5430 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5432
5433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5437
5438 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5439 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5440
5441 IEM_MC_ADVANCE_RIP();
5442 IEM_MC_END();
5443 }
5444 return VINF_SUCCESS;
5445}
5446
5447/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5448FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5449{
5450 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 /*
5455 * Register, register.
5456 */
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 IEM_MC_BEGIN(0, 0);
5459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5461 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5462 IEM_GET_MODRM_REG(pVCpu, bRm));
5463 IEM_MC_ADVANCE_RIP();
5464 IEM_MC_END();
5465 }
5466 else
5467 {
5468 /*
5469 * Register, memory.
5470 */
5471 IEM_MC_BEGIN(0, 2);
5472 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5474
5475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5479
5480 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5481 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5482
5483 IEM_MC_ADVANCE_RIP();
5484 IEM_MC_END();
5485 }
5486 return VINF_SUCCESS;
5487}
5488
5489/* Opcode 0xf2 0x0f 0x7f - invalid */
5490
5491
5492
5493/** Opcode 0x0f 0x80. */
5494FNIEMOP_DEF(iemOp_jo_Jv)
5495{
5496 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5497 IEMOP_HLP_MIN_386();
5498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5499 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5500 {
5501 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5503
5504 IEM_MC_BEGIN(0, 0);
5505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5506 IEM_MC_REL_JMP_S16(i16Imm);
5507 } IEM_MC_ELSE() {
5508 IEM_MC_ADVANCE_RIP();
5509 } IEM_MC_ENDIF();
5510 IEM_MC_END();
5511 }
5512 else
5513 {
5514 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5516
5517 IEM_MC_BEGIN(0, 0);
5518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5519 IEM_MC_REL_JMP_S32(i32Imm);
5520 } IEM_MC_ELSE() {
5521 IEM_MC_ADVANCE_RIP();
5522 } IEM_MC_ENDIF();
5523 IEM_MC_END();
5524 }
5525 return VINF_SUCCESS;
5526}
5527
5528
5529/** Opcode 0x0f 0x81. */
5530FNIEMOP_DEF(iemOp_jno_Jv)
5531{
5532 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5533 IEMOP_HLP_MIN_386();
5534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5535 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5536 {
5537 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5539
5540 IEM_MC_BEGIN(0, 0);
5541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5542 IEM_MC_ADVANCE_RIP();
5543 } IEM_MC_ELSE() {
5544 IEM_MC_REL_JMP_S16(i16Imm);
5545 } IEM_MC_ENDIF();
5546 IEM_MC_END();
5547 }
5548 else
5549 {
5550 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552
5553 IEM_MC_BEGIN(0, 0);
5554 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5555 IEM_MC_ADVANCE_RIP();
5556 } IEM_MC_ELSE() {
5557 IEM_MC_REL_JMP_S32(i32Imm);
5558 } IEM_MC_ENDIF();
5559 IEM_MC_END();
5560 }
5561 return VINF_SUCCESS;
5562}
5563
5564
5565/** Opcode 0x0f 0x82. */
5566FNIEMOP_DEF(iemOp_jc_Jv)
5567{
5568 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5569 IEMOP_HLP_MIN_386();
5570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5571 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5572 {
5573 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5575
5576 IEM_MC_BEGIN(0, 0);
5577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5578 IEM_MC_REL_JMP_S16(i16Imm);
5579 } IEM_MC_ELSE() {
5580 IEM_MC_ADVANCE_RIP();
5581 } IEM_MC_ENDIF();
5582 IEM_MC_END();
5583 }
5584 else
5585 {
5586 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5588
5589 IEM_MC_BEGIN(0, 0);
5590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5591 IEM_MC_REL_JMP_S32(i32Imm);
5592 } IEM_MC_ELSE() {
5593 IEM_MC_ADVANCE_RIP();
5594 } IEM_MC_ENDIF();
5595 IEM_MC_END();
5596 }
5597 return VINF_SUCCESS;
5598}
5599
5600
5601/** Opcode 0x0f 0x83. */
5602FNIEMOP_DEF(iemOp_jnc_Jv)
5603{
5604 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5605 IEMOP_HLP_MIN_386();
5606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5607 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5608 {
5609 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5611
5612 IEM_MC_BEGIN(0, 0);
5613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5614 IEM_MC_ADVANCE_RIP();
5615 } IEM_MC_ELSE() {
5616 IEM_MC_REL_JMP_S16(i16Imm);
5617 } IEM_MC_ENDIF();
5618 IEM_MC_END();
5619 }
5620 else
5621 {
5622 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624
5625 IEM_MC_BEGIN(0, 0);
5626 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5627 IEM_MC_ADVANCE_RIP();
5628 } IEM_MC_ELSE() {
5629 IEM_MC_REL_JMP_S32(i32Imm);
5630 } IEM_MC_ENDIF();
5631 IEM_MC_END();
5632 }
5633 return VINF_SUCCESS;
5634}
5635
5636
5637/** Opcode 0x0f 0x84. */
5638FNIEMOP_DEF(iemOp_je_Jv)
5639{
5640 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5641 IEMOP_HLP_MIN_386();
5642 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5643 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5644 {
5645 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5647
5648 IEM_MC_BEGIN(0, 0);
5649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5650 IEM_MC_REL_JMP_S16(i16Imm);
5651 } IEM_MC_ELSE() {
5652 IEM_MC_ADVANCE_RIP();
5653 } IEM_MC_ENDIF();
5654 IEM_MC_END();
5655 }
5656 else
5657 {
5658 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660
5661 IEM_MC_BEGIN(0, 0);
5662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5663 IEM_MC_REL_JMP_S32(i32Imm);
5664 } IEM_MC_ELSE() {
5665 IEM_MC_ADVANCE_RIP();
5666 } IEM_MC_ENDIF();
5667 IEM_MC_END();
5668 }
5669 return VINF_SUCCESS;
5670}
5671
5672
5673/** Opcode 0x0f 0x85. */
5674FNIEMOP_DEF(iemOp_jne_Jv)
5675{
5676 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5677 IEMOP_HLP_MIN_386();
5678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5679 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5680 {
5681 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5683
5684 IEM_MC_BEGIN(0, 0);
5685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5686 IEM_MC_ADVANCE_RIP();
5687 } IEM_MC_ELSE() {
5688 IEM_MC_REL_JMP_S16(i16Imm);
5689 } IEM_MC_ENDIF();
5690 IEM_MC_END();
5691 }
5692 else
5693 {
5694 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5696
5697 IEM_MC_BEGIN(0, 0);
5698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5699 IEM_MC_ADVANCE_RIP();
5700 } IEM_MC_ELSE() {
5701 IEM_MC_REL_JMP_S32(i32Imm);
5702 } IEM_MC_ENDIF();
5703 IEM_MC_END();
5704 }
5705 return VINF_SUCCESS;
5706}
5707
5708
5709/** Opcode 0x0f 0x86. */
5710FNIEMOP_DEF(iemOp_jbe_Jv)
5711{
5712 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5713 IEMOP_HLP_MIN_386();
5714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5715 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5716 {
5717 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719
5720 IEM_MC_BEGIN(0, 0);
5721 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5722 IEM_MC_REL_JMP_S16(i16Imm);
5723 } IEM_MC_ELSE() {
5724 IEM_MC_ADVANCE_RIP();
5725 } IEM_MC_ENDIF();
5726 IEM_MC_END();
5727 }
5728 else
5729 {
5730 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5732
5733 IEM_MC_BEGIN(0, 0);
5734 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5735 IEM_MC_REL_JMP_S32(i32Imm);
5736 } IEM_MC_ELSE() {
5737 IEM_MC_ADVANCE_RIP();
5738 } IEM_MC_ENDIF();
5739 IEM_MC_END();
5740 }
5741 return VINF_SUCCESS;
5742}
5743
5744
5745/** Opcode 0x0f 0x87. */
5746FNIEMOP_DEF(iemOp_jnbe_Jv)
5747{
5748 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5749 IEMOP_HLP_MIN_386();
5750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5752 {
5753 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5755
5756 IEM_MC_BEGIN(0, 0);
5757 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5758 IEM_MC_ADVANCE_RIP();
5759 } IEM_MC_ELSE() {
5760 IEM_MC_REL_JMP_S16(i16Imm);
5761 } IEM_MC_ENDIF();
5762 IEM_MC_END();
5763 }
5764 else
5765 {
5766 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5768
5769 IEM_MC_BEGIN(0, 0);
5770 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5771 IEM_MC_ADVANCE_RIP();
5772 } IEM_MC_ELSE() {
5773 IEM_MC_REL_JMP_S32(i32Imm);
5774 } IEM_MC_ENDIF();
5775 IEM_MC_END();
5776 }
5777 return VINF_SUCCESS;
5778}
5779
5780
5781/** Opcode 0x0f 0x88. */
5782FNIEMOP_DEF(iemOp_js_Jv)
5783{
5784 IEMOP_MNEMONIC(js_Jv, "js Jv");
5785 IEMOP_HLP_MIN_386();
5786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5787 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5788 {
5789 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5791
5792 IEM_MC_BEGIN(0, 0);
5793 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5794 IEM_MC_REL_JMP_S16(i16Imm);
5795 } IEM_MC_ELSE() {
5796 IEM_MC_ADVANCE_RIP();
5797 } IEM_MC_ENDIF();
5798 IEM_MC_END();
5799 }
5800 else
5801 {
5802 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804
5805 IEM_MC_BEGIN(0, 0);
5806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5807 IEM_MC_REL_JMP_S32(i32Imm);
5808 } IEM_MC_ELSE() {
5809 IEM_MC_ADVANCE_RIP();
5810 } IEM_MC_ENDIF();
5811 IEM_MC_END();
5812 }
5813 return VINF_SUCCESS;
5814}
5815
5816
5817/** Opcode 0x0f 0x89. */
5818FNIEMOP_DEF(iemOp_jns_Jv)
5819{
5820 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5821 IEMOP_HLP_MIN_386();
5822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5823 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5824 {
5825 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5827
5828 IEM_MC_BEGIN(0, 0);
5829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5830 IEM_MC_ADVANCE_RIP();
5831 } IEM_MC_ELSE() {
5832 IEM_MC_REL_JMP_S16(i16Imm);
5833 } IEM_MC_ENDIF();
5834 IEM_MC_END();
5835 }
5836 else
5837 {
5838 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5840
5841 IEM_MC_BEGIN(0, 0);
5842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5843 IEM_MC_ADVANCE_RIP();
5844 } IEM_MC_ELSE() {
5845 IEM_MC_REL_JMP_S32(i32Imm);
5846 } IEM_MC_ENDIF();
5847 IEM_MC_END();
5848 }
5849 return VINF_SUCCESS;
5850}
5851
5852
5853/** Opcode 0x0f 0x8a. */
5854FNIEMOP_DEF(iemOp_jp_Jv)
5855{
5856 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5857 IEMOP_HLP_MIN_386();
5858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5859 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5860 {
5861 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863
5864 IEM_MC_BEGIN(0, 0);
5865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5866 IEM_MC_REL_JMP_S16(i16Imm);
5867 } IEM_MC_ELSE() {
5868 IEM_MC_ADVANCE_RIP();
5869 } IEM_MC_ENDIF();
5870 IEM_MC_END();
5871 }
5872 else
5873 {
5874 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876
5877 IEM_MC_BEGIN(0, 0);
5878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5879 IEM_MC_REL_JMP_S32(i32Imm);
5880 } IEM_MC_ELSE() {
5881 IEM_MC_ADVANCE_RIP();
5882 } IEM_MC_ENDIF();
5883 IEM_MC_END();
5884 }
5885 return VINF_SUCCESS;
5886}
5887
5888
5889/** Opcode 0x0f 0x8b. */
5890FNIEMOP_DEF(iemOp_jnp_Jv)
5891{
5892 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5893 IEMOP_HLP_MIN_386();
5894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5895 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5896 {
5897 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5899
5900 IEM_MC_BEGIN(0, 0);
5901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5902 IEM_MC_ADVANCE_RIP();
5903 } IEM_MC_ELSE() {
5904 IEM_MC_REL_JMP_S16(i16Imm);
5905 } IEM_MC_ENDIF();
5906 IEM_MC_END();
5907 }
5908 else
5909 {
5910 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912
5913 IEM_MC_BEGIN(0, 0);
5914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5915 IEM_MC_ADVANCE_RIP();
5916 } IEM_MC_ELSE() {
5917 IEM_MC_REL_JMP_S32(i32Imm);
5918 } IEM_MC_ENDIF();
5919 IEM_MC_END();
5920 }
5921 return VINF_SUCCESS;
5922}
5923
5924
5925/** Opcode 0x0f 0x8c. */
5926FNIEMOP_DEF(iemOp_jl_Jv)
5927{
5928 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5929 IEMOP_HLP_MIN_386();
5930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5932 {
5933 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5935
5936 IEM_MC_BEGIN(0, 0);
5937 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5938 IEM_MC_REL_JMP_S16(i16Imm);
5939 } IEM_MC_ELSE() {
5940 IEM_MC_ADVANCE_RIP();
5941 } IEM_MC_ENDIF();
5942 IEM_MC_END();
5943 }
5944 else
5945 {
5946 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5948
5949 IEM_MC_BEGIN(0, 0);
5950 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5951 IEM_MC_REL_JMP_S32(i32Imm);
5952 } IEM_MC_ELSE() {
5953 IEM_MC_ADVANCE_RIP();
5954 } IEM_MC_ENDIF();
5955 IEM_MC_END();
5956 }
5957 return VINF_SUCCESS;
5958}
5959
5960
5961/** Opcode 0x0f 0x8d. */
5962FNIEMOP_DEF(iemOp_jnl_Jv)
5963{
5964 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5965 IEMOP_HLP_MIN_386();
5966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5968 {
5969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971
5972 IEM_MC_BEGIN(0, 0);
5973 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5974 IEM_MC_ADVANCE_RIP();
5975 } IEM_MC_ELSE() {
5976 IEM_MC_REL_JMP_S16(i16Imm);
5977 } IEM_MC_ENDIF();
5978 IEM_MC_END();
5979 }
5980 else
5981 {
5982 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5984
5985 IEM_MC_BEGIN(0, 0);
5986 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5987 IEM_MC_ADVANCE_RIP();
5988 } IEM_MC_ELSE() {
5989 IEM_MC_REL_JMP_S32(i32Imm);
5990 } IEM_MC_ENDIF();
5991 IEM_MC_END();
5992 }
5993 return VINF_SUCCESS;
5994}
5995
5996
5997/** Opcode 0x0f 0x8e. */
5998FNIEMOP_DEF(iemOp_jle_Jv)
5999{
6000 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6001 IEMOP_HLP_MIN_386();
6002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6003 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6004 {
6005 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6007
6008 IEM_MC_BEGIN(0, 0);
6009 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6010 IEM_MC_REL_JMP_S16(i16Imm);
6011 } IEM_MC_ELSE() {
6012 IEM_MC_ADVANCE_RIP();
6013 } IEM_MC_ENDIF();
6014 IEM_MC_END();
6015 }
6016 else
6017 {
6018 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6020
6021 IEM_MC_BEGIN(0, 0);
6022 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6023 IEM_MC_REL_JMP_S32(i32Imm);
6024 } IEM_MC_ELSE() {
6025 IEM_MC_ADVANCE_RIP();
6026 } IEM_MC_ENDIF();
6027 IEM_MC_END();
6028 }
6029 return VINF_SUCCESS;
6030}
6031
6032
6033/** Opcode 0x0f 0x8f. */
6034FNIEMOP_DEF(iemOp_jnle_Jv)
6035{
6036 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6037 IEMOP_HLP_MIN_386();
6038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6039 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6040 {
6041 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6043
6044 IEM_MC_BEGIN(0, 0);
6045 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6046 IEM_MC_ADVANCE_RIP();
6047 } IEM_MC_ELSE() {
6048 IEM_MC_REL_JMP_S16(i16Imm);
6049 } IEM_MC_ENDIF();
6050 IEM_MC_END();
6051 }
6052 else
6053 {
6054 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6056
6057 IEM_MC_BEGIN(0, 0);
6058 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6059 IEM_MC_ADVANCE_RIP();
6060 } IEM_MC_ELSE() {
6061 IEM_MC_REL_JMP_S32(i32Imm);
6062 } IEM_MC_ENDIF();
6063 IEM_MC_END();
6064 }
6065 return VINF_SUCCESS;
6066}
6067
6068
6069/** Opcode 0x0f 0x90. */
6070FNIEMOP_DEF(iemOp_seto_Eb)
6071{
6072 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6073 IEMOP_HLP_MIN_386();
6074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6075
6076 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6077 * any way. AMD says it's "unused", whatever that means. We're
6078 * ignoring for now. */
6079 if (IEM_IS_MODRM_REG_MODE(bRm))
6080 {
6081 /* register target */
6082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6083 IEM_MC_BEGIN(0, 0);
6084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6085 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6086 } IEM_MC_ELSE() {
6087 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6088 } IEM_MC_ENDIF();
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 }
6092 else
6093 {
6094 /* memory target */
6095 IEM_MC_BEGIN(0, 1);
6096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6100 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6101 } IEM_MC_ELSE() {
6102 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6103 } IEM_MC_ENDIF();
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 }
6107 return VINF_SUCCESS;
6108}
6109
6110
6111/** Opcode 0x0f 0x91. */
6112FNIEMOP_DEF(iemOp_setno_Eb)
6113{
6114 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6115 IEMOP_HLP_MIN_386();
6116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6117
6118 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6119 * any way. AMD says it's "unused", whatever that means. We're
6120 * ignoring for now. */
6121 if (IEM_IS_MODRM_REG_MODE(bRm))
6122 {
6123 /* register target */
6124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6125 IEM_MC_BEGIN(0, 0);
6126 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6127 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6128 } IEM_MC_ELSE() {
6129 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6130 } IEM_MC_ENDIF();
6131 IEM_MC_ADVANCE_RIP();
6132 IEM_MC_END();
6133 }
6134 else
6135 {
6136 /* memory target */
6137 IEM_MC_BEGIN(0, 1);
6138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6142 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6143 } IEM_MC_ELSE() {
6144 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6145 } IEM_MC_ENDIF();
6146 IEM_MC_ADVANCE_RIP();
6147 IEM_MC_END();
6148 }
6149 return VINF_SUCCESS;
6150}
6151
6152
6153/** Opcode 0x0f 0x92. */
6154FNIEMOP_DEF(iemOp_setc_Eb)
6155{
6156 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6157 IEMOP_HLP_MIN_386();
6158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6159
6160 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6161 * any way. AMD says it's "unused", whatever that means. We're
6162 * ignoring for now. */
6163 if (IEM_IS_MODRM_REG_MODE(bRm))
6164 {
6165 /* register target */
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_BEGIN(0, 0);
6168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6169 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6170 } IEM_MC_ELSE() {
6171 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6172 } IEM_MC_ENDIF();
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 }
6176 else
6177 {
6178 /* memory target */
6179 IEM_MC_BEGIN(0, 1);
6180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6184 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6185 } IEM_MC_ELSE() {
6186 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6187 } IEM_MC_ENDIF();
6188 IEM_MC_ADVANCE_RIP();
6189 IEM_MC_END();
6190 }
6191 return VINF_SUCCESS;
6192}
6193
6194
6195/** Opcode 0x0f 0x93. */
6196FNIEMOP_DEF(iemOp_setnc_Eb)
6197{
6198 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6199 IEMOP_HLP_MIN_386();
6200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6201
6202 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6203 * any way. AMD says it's "unused", whatever that means. We're
6204 * ignoring for now. */
6205 if (IEM_IS_MODRM_REG_MODE(bRm))
6206 {
6207 /* register target */
6208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6209 IEM_MC_BEGIN(0, 0);
6210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6211 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6212 } IEM_MC_ELSE() {
6213 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6214 } IEM_MC_ENDIF();
6215 IEM_MC_ADVANCE_RIP();
6216 IEM_MC_END();
6217 }
6218 else
6219 {
6220 /* memory target */
6221 IEM_MC_BEGIN(0, 1);
6222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6226 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6227 } IEM_MC_ELSE() {
6228 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6229 } IEM_MC_ENDIF();
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 }
6233 return VINF_SUCCESS;
6234}
6235
6236
6237/** Opcode 0x0f 0x94. */
6238FNIEMOP_DEF(iemOp_sete_Eb)
6239{
6240 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6241 IEMOP_HLP_MIN_386();
6242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6243
6244 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6245 * any way. AMD says it's "unused", whatever that means. We're
6246 * ignoring for now. */
6247 if (IEM_IS_MODRM_REG_MODE(bRm))
6248 {
6249 /* register target */
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251 IEM_MC_BEGIN(0, 0);
6252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6253 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6254 } IEM_MC_ELSE() {
6255 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6256 } IEM_MC_ENDIF();
6257 IEM_MC_ADVANCE_RIP();
6258 IEM_MC_END();
6259 }
6260 else
6261 {
6262 /* memory target */
6263 IEM_MC_BEGIN(0, 1);
6264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6269 } IEM_MC_ELSE() {
6270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6271 } IEM_MC_ENDIF();
6272 IEM_MC_ADVANCE_RIP();
6273 IEM_MC_END();
6274 }
6275 return VINF_SUCCESS;
6276}
6277
6278
6279/** Opcode 0x0f 0x95. */
6280FNIEMOP_DEF(iemOp_setne_Eb)
6281{
6282 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6283 IEMOP_HLP_MIN_386();
6284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6285
6286 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6287 * any way. AMD says it's "unused", whatever that means. We're
6288 * ignoring for now. */
6289 if (IEM_IS_MODRM_REG_MODE(bRm))
6290 {
6291 /* register target */
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6293 IEM_MC_BEGIN(0, 0);
6294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6295 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6296 } IEM_MC_ELSE() {
6297 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6298 } IEM_MC_ENDIF();
6299 IEM_MC_ADVANCE_RIP();
6300 IEM_MC_END();
6301 }
6302 else
6303 {
6304 /* memory target */
6305 IEM_MC_BEGIN(0, 1);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6311 } IEM_MC_ELSE() {
6312 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6313 } IEM_MC_ENDIF();
6314 IEM_MC_ADVANCE_RIP();
6315 IEM_MC_END();
6316 }
6317 return VINF_SUCCESS;
6318}
6319
6320
6321/** Opcode 0x0f 0x96. */
6322FNIEMOP_DEF(iemOp_setbe_Eb)
6323{
6324 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6325 IEMOP_HLP_MIN_386();
6326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6327
6328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6329 * any way. AMD says it's "unused", whatever that means. We're
6330 * ignoring for now. */
6331 if (IEM_IS_MODRM_REG_MODE(bRm))
6332 {
6333 /* register target */
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 IEM_MC_BEGIN(0, 0);
6336 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6337 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6338 } IEM_MC_ELSE() {
6339 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6340 } IEM_MC_ENDIF();
6341 IEM_MC_ADVANCE_RIP();
6342 IEM_MC_END();
6343 }
6344 else
6345 {
6346 /* memory target */
6347 IEM_MC_BEGIN(0, 1);
6348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6351 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6353 } IEM_MC_ELSE() {
6354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6355 } IEM_MC_ENDIF();
6356 IEM_MC_ADVANCE_RIP();
6357 IEM_MC_END();
6358 }
6359 return VINF_SUCCESS;
6360}
6361
6362
6363/** Opcode 0x0f 0x97. */
6364FNIEMOP_DEF(iemOp_setnbe_Eb)
6365{
6366 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6367 IEMOP_HLP_MIN_386();
6368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6369
6370 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6371 * any way. AMD says it's "unused", whatever that means. We're
6372 * ignoring for now. */
6373 if (IEM_IS_MODRM_REG_MODE(bRm))
6374 {
6375 /* register target */
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377 IEM_MC_BEGIN(0, 0);
6378 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6379 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6380 } IEM_MC_ELSE() {
6381 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6382 } IEM_MC_ENDIF();
6383 IEM_MC_ADVANCE_RIP();
6384 IEM_MC_END();
6385 }
6386 else
6387 {
6388 /* memory target */
6389 IEM_MC_BEGIN(0, 1);
6390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6393 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6394 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6395 } IEM_MC_ELSE() {
6396 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6397 } IEM_MC_ENDIF();
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 }
6401 return VINF_SUCCESS;
6402}
6403
6404
6405/** Opcode 0x0f 0x98. */
6406FNIEMOP_DEF(iemOp_sets_Eb)
6407{
6408 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6409 IEMOP_HLP_MIN_386();
6410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6411
6412 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6413 * any way. AMD says it's "unused", whatever that means. We're
6414 * ignoring for now. */
6415 if (IEM_IS_MODRM_REG_MODE(bRm))
6416 {
6417 /* register target */
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_BEGIN(0, 0);
6420 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6421 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6422 } IEM_MC_ELSE() {
6423 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6424 } IEM_MC_ENDIF();
6425 IEM_MC_ADVANCE_RIP();
6426 IEM_MC_END();
6427 }
6428 else
6429 {
6430 /* memory target */
6431 IEM_MC_BEGIN(0, 1);
6432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6437 } IEM_MC_ELSE() {
6438 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6439 } IEM_MC_ENDIF();
6440 IEM_MC_ADVANCE_RIP();
6441 IEM_MC_END();
6442 }
6443 return VINF_SUCCESS;
6444}
6445
6446
6447/** Opcode 0x0f 0x99. */
6448FNIEMOP_DEF(iemOp_setns_Eb)
6449{
6450 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6451 IEMOP_HLP_MIN_386();
6452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6453
6454 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6455 * any way. AMD says it's "unused", whatever that means. We're
6456 * ignoring for now. */
6457 if (IEM_IS_MODRM_REG_MODE(bRm))
6458 {
6459 /* register target */
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEM_MC_BEGIN(0, 0);
6462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6463 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6464 } IEM_MC_ELSE() {
6465 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6466 } IEM_MC_ENDIF();
6467 IEM_MC_ADVANCE_RIP();
6468 IEM_MC_END();
6469 }
6470 else
6471 {
6472 /* memory target */
6473 IEM_MC_BEGIN(0, 1);
6474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6478 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6479 } IEM_MC_ELSE() {
6480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6481 } IEM_MC_ENDIF();
6482 IEM_MC_ADVANCE_RIP();
6483 IEM_MC_END();
6484 }
6485 return VINF_SUCCESS;
6486}
6487
6488
6489/** Opcode 0x0f 0x9a. */
6490FNIEMOP_DEF(iemOp_setp_Eb)
6491{
6492 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6493 IEMOP_HLP_MIN_386();
6494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6495
6496 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6497 * any way. AMD says it's "unused", whatever that means. We're
6498 * ignoring for now. */
6499 if (IEM_IS_MODRM_REG_MODE(bRm))
6500 {
6501 /* register target */
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 IEM_MC_BEGIN(0, 0);
6504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6505 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6506 } IEM_MC_ELSE() {
6507 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6508 } IEM_MC_ENDIF();
6509 IEM_MC_ADVANCE_RIP();
6510 IEM_MC_END();
6511 }
6512 else
6513 {
6514 /* memory target */
6515 IEM_MC_BEGIN(0, 1);
6516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6521 } IEM_MC_ELSE() {
6522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6523 } IEM_MC_ENDIF();
6524 IEM_MC_ADVANCE_RIP();
6525 IEM_MC_END();
6526 }
6527 return VINF_SUCCESS;
6528}
6529
6530
6531/** Opcode 0x0f 0x9b. */
6532FNIEMOP_DEF(iemOp_setnp_Eb)
6533{
6534 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6535 IEMOP_HLP_MIN_386();
6536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6537
6538 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6539 * any way. AMD says it's "unused", whatever that means. We're
6540 * ignoring for now. */
6541 if (IEM_IS_MODRM_REG_MODE(bRm))
6542 {
6543 /* register target */
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545 IEM_MC_BEGIN(0, 0);
6546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6547 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6548 } IEM_MC_ELSE() {
6549 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6550 } IEM_MC_ENDIF();
6551 IEM_MC_ADVANCE_RIP();
6552 IEM_MC_END();
6553 }
6554 else
6555 {
6556 /* memory target */
6557 IEM_MC_BEGIN(0, 1);
6558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6563 } IEM_MC_ELSE() {
6564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6565 } IEM_MC_ENDIF();
6566 IEM_MC_ADVANCE_RIP();
6567 IEM_MC_END();
6568 }
6569 return VINF_SUCCESS;
6570}
6571
6572
6573/** Opcode 0x0f 0x9c. */
6574FNIEMOP_DEF(iemOp_setl_Eb)
6575{
6576 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6577 IEMOP_HLP_MIN_386();
6578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6579
6580 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6581 * any way. AMD says it's "unused", whatever that means. We're
6582 * ignoring for now. */
6583 if (IEM_IS_MODRM_REG_MODE(bRm))
6584 {
6585 /* register target */
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6587 IEM_MC_BEGIN(0, 0);
6588 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6589 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6590 } IEM_MC_ELSE() {
6591 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6592 } IEM_MC_ENDIF();
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 }
6596 else
6597 {
6598 /* memory target */
6599 IEM_MC_BEGIN(0, 1);
6600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6603 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6605 } IEM_MC_ELSE() {
6606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6607 } IEM_MC_ENDIF();
6608 IEM_MC_ADVANCE_RIP();
6609 IEM_MC_END();
6610 }
6611 return VINF_SUCCESS;
6612}
6613
6614
6615/** Opcode 0x0f 0x9d. */
6616FNIEMOP_DEF(iemOp_setnl_Eb)
6617{
6618 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6619 IEMOP_HLP_MIN_386();
6620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6621
6622 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6623 * any way. AMD says it's "unused", whatever that means. We're
6624 * ignoring for now. */
6625 if (IEM_IS_MODRM_REG_MODE(bRm))
6626 {
6627 /* register target */
6628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6629 IEM_MC_BEGIN(0, 0);
6630 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6631 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6632 } IEM_MC_ELSE() {
6633 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6634 } IEM_MC_ENDIF();
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 }
6638 else
6639 {
6640 /* memory target */
6641 IEM_MC_BEGIN(0, 1);
6642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6646 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6647 } IEM_MC_ELSE() {
6648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6649 } IEM_MC_ENDIF();
6650 IEM_MC_ADVANCE_RIP();
6651 IEM_MC_END();
6652 }
6653 return VINF_SUCCESS;
6654}
6655
6656
6657/** Opcode 0x0f 0x9e. */
6658FNIEMOP_DEF(iemOp_setle_Eb)
6659{
6660 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6661 IEMOP_HLP_MIN_386();
6662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6663
6664 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6665 * any way. AMD says it's "unused", whatever that means. We're
6666 * ignoring for now. */
6667 if (IEM_IS_MODRM_REG_MODE(bRm))
6668 {
6669 /* register target */
6670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6671 IEM_MC_BEGIN(0, 0);
6672 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6673 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6674 } IEM_MC_ELSE() {
6675 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6676 } IEM_MC_ENDIF();
6677 IEM_MC_ADVANCE_RIP();
6678 IEM_MC_END();
6679 }
6680 else
6681 {
6682 /* memory target */
6683 IEM_MC_BEGIN(0, 1);
6684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6688 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6689 } IEM_MC_ELSE() {
6690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6691 } IEM_MC_ENDIF();
6692 IEM_MC_ADVANCE_RIP();
6693 IEM_MC_END();
6694 }
6695 return VINF_SUCCESS;
6696}
6697
6698
6699/** Opcode 0x0f 0x9f. */
6700FNIEMOP_DEF(iemOp_setnle_Eb)
6701{
6702 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6703 IEMOP_HLP_MIN_386();
6704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6705
6706 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6707 * any way. AMD says it's "unused", whatever that means. We're
6708 * ignoring for now. */
6709 if (IEM_IS_MODRM_REG_MODE(bRm))
6710 {
6711 /* register target */
6712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6713 IEM_MC_BEGIN(0, 0);
6714 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6715 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6716 } IEM_MC_ELSE() {
6717 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6718 } IEM_MC_ENDIF();
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 }
6722 else
6723 {
6724 /* memory target */
6725 IEM_MC_BEGIN(0, 1);
6726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6730 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6731 } IEM_MC_ELSE() {
6732 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6733 } IEM_MC_ENDIF();
6734 IEM_MC_ADVANCE_RIP();
6735 IEM_MC_END();
6736 }
6737 return VINF_SUCCESS;
6738}
6739
6740
6741/**
6742 * Common 'push segment-register' helper.
6743 */
6744FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6745{
6746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6747 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6749
6750 switch (pVCpu->iem.s.enmEffOpSize)
6751 {
6752 case IEMMODE_16BIT:
6753 IEM_MC_BEGIN(0, 1);
6754 IEM_MC_LOCAL(uint16_t, u16Value);
6755 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6756 IEM_MC_PUSH_U16(u16Value);
6757 IEM_MC_ADVANCE_RIP();
6758 IEM_MC_END();
6759 break;
6760
6761 case IEMMODE_32BIT:
6762 IEM_MC_BEGIN(0, 1);
6763 IEM_MC_LOCAL(uint32_t, u32Value);
6764 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6765 IEM_MC_PUSH_U32_SREG(u32Value);
6766 IEM_MC_ADVANCE_RIP();
6767 IEM_MC_END();
6768 break;
6769
6770 case IEMMODE_64BIT:
6771 IEM_MC_BEGIN(0, 1);
6772 IEM_MC_LOCAL(uint64_t, u64Value);
6773 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6774 IEM_MC_PUSH_U64(u64Value);
6775 IEM_MC_ADVANCE_RIP();
6776 IEM_MC_END();
6777 break;
6778 }
6779
6780 return VINF_SUCCESS;
6781}
6782
6783
6784/** Opcode 0x0f 0xa0. */
6785FNIEMOP_DEF(iemOp_push_fs)
6786{
6787 IEMOP_MNEMONIC(push_fs, "push fs");
6788 IEMOP_HLP_MIN_386();
6789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6790 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6791}
6792
6793
6794/** Opcode 0x0f 0xa1. */
6795FNIEMOP_DEF(iemOp_pop_fs)
6796{
6797 IEMOP_MNEMONIC(pop_fs, "pop fs");
6798 IEMOP_HLP_MIN_386();
6799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6800 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6801}
6802
6803
6804/** Opcode 0x0f 0xa2. */
6805FNIEMOP_DEF(iemOp_cpuid)
6806{
6807 IEMOP_MNEMONIC(cpuid, "cpuid");
6808 IEMOP_HLP_MIN_486(); /* not all 486es. */
6809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6810 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6811}
6812
6813
6814/**
6815 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6816 * iemOp_bts_Ev_Gv.
6817 */
6818FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6819{
6820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6821 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6822
6823 if (IEM_IS_MODRM_REG_MODE(bRm))
6824 {
6825 /* register destination. */
6826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6827 switch (pVCpu->iem.s.enmEffOpSize)
6828 {
6829 case IEMMODE_16BIT:
6830 IEM_MC_BEGIN(3, 0);
6831 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6832 IEM_MC_ARG(uint16_t, u16Src, 1);
6833 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6834
6835 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6836 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6838 IEM_MC_REF_EFLAGS(pEFlags);
6839 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6840
6841 IEM_MC_ADVANCE_RIP();
6842 IEM_MC_END();
6843 return VINF_SUCCESS;
6844
6845 case IEMMODE_32BIT:
6846 IEM_MC_BEGIN(3, 0);
6847 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6848 IEM_MC_ARG(uint32_t, u32Src, 1);
6849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6850
6851 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6852 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6853 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6854 IEM_MC_REF_EFLAGS(pEFlags);
6855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6856
6857 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6858 IEM_MC_ADVANCE_RIP();
6859 IEM_MC_END();
6860 return VINF_SUCCESS;
6861
6862 case IEMMODE_64BIT:
6863 IEM_MC_BEGIN(3, 0);
6864 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6865 IEM_MC_ARG(uint64_t, u64Src, 1);
6866 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6867
6868 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6869 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6871 IEM_MC_REF_EFLAGS(pEFlags);
6872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6873
6874 IEM_MC_ADVANCE_RIP();
6875 IEM_MC_END();
6876 return VINF_SUCCESS;
6877
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880 }
6881 else
6882 {
6883 /* memory destination. */
6884
6885 uint32_t fAccess;
6886 if (pImpl->pfnLockedU16)
6887 fAccess = IEM_ACCESS_DATA_RW;
6888 else /* BT */
6889 fAccess = IEM_ACCESS_DATA_R;
6890
6891 /** @todo test negative bit offsets! */
6892 switch (pVCpu->iem.s.enmEffOpSize)
6893 {
6894 case IEMMODE_16BIT:
6895 IEM_MC_BEGIN(3, 2);
6896 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6897 IEM_MC_ARG(uint16_t, u16Src, 1);
6898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6900 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6901
6902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6903 if (pImpl->pfnLockedU16)
6904 IEMOP_HLP_DONE_DECODING();
6905 else
6906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6907 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6908 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6909 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6910 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6911 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6912 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6913 IEM_MC_FETCH_EFLAGS(EFlags);
6914
6915 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6916 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6918 else
6919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6921
6922 IEM_MC_COMMIT_EFLAGS(EFlags);
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926
6927 case IEMMODE_32BIT:
6928 IEM_MC_BEGIN(3, 2);
6929 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6930 IEM_MC_ARG(uint32_t, u32Src, 1);
6931 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6933 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6934
6935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6936 if (pImpl->pfnLockedU16)
6937 IEMOP_HLP_DONE_DECODING();
6938 else
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6941 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6942 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6943 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6944 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6945 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6946 IEM_MC_FETCH_EFLAGS(EFlags);
6947
6948 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6949 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6950 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6951 else
6952 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6953 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6954
6955 IEM_MC_COMMIT_EFLAGS(EFlags);
6956 IEM_MC_ADVANCE_RIP();
6957 IEM_MC_END();
6958 return VINF_SUCCESS;
6959
6960 case IEMMODE_64BIT:
6961 IEM_MC_BEGIN(3, 2);
6962 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6963 IEM_MC_ARG(uint64_t, u64Src, 1);
6964 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6966 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6967
6968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6969 if (pImpl->pfnLockedU16)
6970 IEMOP_HLP_DONE_DECODING();
6971 else
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6974 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6975 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6976 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6977 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6978 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6979 IEM_MC_FETCH_EFLAGS(EFlags);
6980
6981 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6982 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6983 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6984 else
6985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6986 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6987
6988 IEM_MC_COMMIT_EFLAGS(EFlags);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 return VINF_SUCCESS;
6992
6993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6994 }
6995 }
6996}
6997
6998
6999/** Opcode 0x0f 0xa3. */
7000FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7001{
7002 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7003 IEMOP_HLP_MIN_386();
7004 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7005}
7006
7007
7008/**
7009 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7010 */
7011FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7012{
7013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7014 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7015
7016 if (IEM_IS_MODRM_REG_MODE(bRm))
7017 {
7018 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7020
7021 switch (pVCpu->iem.s.enmEffOpSize)
7022 {
7023 case IEMMODE_16BIT:
7024 IEM_MC_BEGIN(4, 0);
7025 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7026 IEM_MC_ARG(uint16_t, u16Src, 1);
7027 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7028 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7029
7030 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7031 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7032 IEM_MC_REF_EFLAGS(pEFlags);
7033 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7034
7035 IEM_MC_ADVANCE_RIP();
7036 IEM_MC_END();
7037 return VINF_SUCCESS;
7038
7039 case IEMMODE_32BIT:
7040 IEM_MC_BEGIN(4, 0);
7041 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7042 IEM_MC_ARG(uint32_t, u32Src, 1);
7043 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7044 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7045
7046 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7047 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7048 IEM_MC_REF_EFLAGS(pEFlags);
7049 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7050
7051 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7052 IEM_MC_ADVANCE_RIP();
7053 IEM_MC_END();
7054 return VINF_SUCCESS;
7055
7056 case IEMMODE_64BIT:
7057 IEM_MC_BEGIN(4, 0);
7058 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7059 IEM_MC_ARG(uint64_t, u64Src, 1);
7060 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7061 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7062
7063 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7064 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7065 IEM_MC_REF_EFLAGS(pEFlags);
7066 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7067
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 return VINF_SUCCESS;
7071
7072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7073 }
7074 }
7075 else
7076 {
7077 switch (pVCpu->iem.s.enmEffOpSize)
7078 {
7079 case IEMMODE_16BIT:
7080 IEM_MC_BEGIN(4, 2);
7081 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7082 IEM_MC_ARG(uint16_t, u16Src, 1);
7083 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7084 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7086
7087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7088 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7089 IEM_MC_ASSIGN(cShiftArg, cShift);
7090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7091 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7092 IEM_MC_FETCH_EFLAGS(EFlags);
7093 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7094 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7095
7096 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7097 IEM_MC_COMMIT_EFLAGS(EFlags);
7098 IEM_MC_ADVANCE_RIP();
7099 IEM_MC_END();
7100 return VINF_SUCCESS;
7101
7102 case IEMMODE_32BIT:
7103 IEM_MC_BEGIN(4, 2);
7104 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7105 IEM_MC_ARG(uint32_t, u32Src, 1);
7106 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7107 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7109
7110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7111 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7112 IEM_MC_ASSIGN(cShiftArg, cShift);
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7115 IEM_MC_FETCH_EFLAGS(EFlags);
7116 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7117 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7118
7119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7120 IEM_MC_COMMIT_EFLAGS(EFlags);
7121 IEM_MC_ADVANCE_RIP();
7122 IEM_MC_END();
7123 return VINF_SUCCESS;
7124
7125 case IEMMODE_64BIT:
7126 IEM_MC_BEGIN(4, 2);
7127 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7128 IEM_MC_ARG(uint64_t, u64Src, 1);
7129 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7130 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7132
7133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7134 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7135 IEM_MC_ASSIGN(cShiftArg, cShift);
7136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7137 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7138 IEM_MC_FETCH_EFLAGS(EFlags);
7139 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7140 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7141
7142 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7143 IEM_MC_COMMIT_EFLAGS(EFlags);
7144 IEM_MC_ADVANCE_RIP();
7145 IEM_MC_END();
7146 return VINF_SUCCESS;
7147
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150 }
7151}
7152
7153
7154/**
7155 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7156 */
7157FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7158{
7159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7161
7162 if (IEM_IS_MODRM_REG_MODE(bRm))
7163 {
7164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7165
7166 switch (pVCpu->iem.s.enmEffOpSize)
7167 {
7168 case IEMMODE_16BIT:
7169 IEM_MC_BEGIN(4, 0);
7170 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7171 IEM_MC_ARG(uint16_t, u16Src, 1);
7172 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7173 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7174
7175 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7176 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7177 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7180
7181 IEM_MC_ADVANCE_RIP();
7182 IEM_MC_END();
7183 return VINF_SUCCESS;
7184
7185 case IEMMODE_32BIT:
7186 IEM_MC_BEGIN(4, 0);
7187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7188 IEM_MC_ARG(uint32_t, u32Src, 1);
7189 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7190 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7191
7192 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7194 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7195 IEM_MC_REF_EFLAGS(pEFlags);
7196 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7197
7198 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7199 IEM_MC_ADVANCE_RIP();
7200 IEM_MC_END();
7201 return VINF_SUCCESS;
7202
7203 case IEMMODE_64BIT:
7204 IEM_MC_BEGIN(4, 0);
7205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7206 IEM_MC_ARG(uint64_t, u64Src, 1);
7207 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7208 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7209
7210 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7211 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7212 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7213 IEM_MC_REF_EFLAGS(pEFlags);
7214 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7215
7216 IEM_MC_ADVANCE_RIP();
7217 IEM_MC_END();
7218 return VINF_SUCCESS;
7219
7220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7221 }
7222 }
7223 else
7224 {
7225 switch (pVCpu->iem.s.enmEffOpSize)
7226 {
7227 case IEMMODE_16BIT:
7228 IEM_MC_BEGIN(4, 2);
7229 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7230 IEM_MC_ARG(uint16_t, u16Src, 1);
7231 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7232 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7234
7235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7237 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7238 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7239 IEM_MC_FETCH_EFLAGS(EFlags);
7240 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7241 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7242
7243 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7244 IEM_MC_COMMIT_EFLAGS(EFlags);
7245 IEM_MC_ADVANCE_RIP();
7246 IEM_MC_END();
7247 return VINF_SUCCESS;
7248
7249 case IEMMODE_32BIT:
7250 IEM_MC_BEGIN(4, 2);
7251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7252 IEM_MC_ARG(uint32_t, u32Src, 1);
7253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7256
7257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7259 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7261 IEM_MC_FETCH_EFLAGS(EFlags);
7262 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7264
7265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7266 IEM_MC_COMMIT_EFLAGS(EFlags);
7267 IEM_MC_ADVANCE_RIP();
7268 IEM_MC_END();
7269 return VINF_SUCCESS;
7270
7271 case IEMMODE_64BIT:
7272 IEM_MC_BEGIN(4, 2);
7273 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7274 IEM_MC_ARG(uint64_t, u64Src, 1);
7275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7278
7279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7281 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7282 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7283 IEM_MC_FETCH_EFLAGS(EFlags);
7284 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7286
7287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7288 IEM_MC_COMMIT_EFLAGS(EFlags);
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 return VINF_SUCCESS;
7292
7293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7294 }
7295 }
7296}
7297
7298
7299
7300/** Opcode 0x0f 0xa4. */
7301FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7302{
7303 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7304 IEMOP_HLP_MIN_386();
7305 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7306}
7307
7308
7309/** Opcode 0x0f 0xa5. */
7310FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7311{
7312 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7313 IEMOP_HLP_MIN_386();
7314 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7315}
7316
7317
7318/** Opcode 0x0f 0xa8. */
7319FNIEMOP_DEF(iemOp_push_gs)
7320{
7321 IEMOP_MNEMONIC(push_gs, "push gs");
7322 IEMOP_HLP_MIN_386();
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7325}
7326
7327
7328/** Opcode 0x0f 0xa9. */
7329FNIEMOP_DEF(iemOp_pop_gs)
7330{
7331 IEMOP_MNEMONIC(pop_gs, "pop gs");
7332 IEMOP_HLP_MIN_386();
7333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7334 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7335}
7336
7337
7338/** Opcode 0x0f 0xaa. */
7339FNIEMOP_DEF(iemOp_rsm)
7340{
7341 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7342 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7344 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7345}
7346
7347
7348
7349/** Opcode 0x0f 0xab. */
7350FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7351{
7352 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7353 IEMOP_HLP_MIN_386();
7354 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7355}
7356
7357
7358/** Opcode 0x0f 0xac. */
7359FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7360{
7361 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7362 IEMOP_HLP_MIN_386();
7363 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7364}
7365
7366
7367/** Opcode 0x0f 0xad. */
7368FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7369{
7370 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7371 IEMOP_HLP_MIN_386();
7372 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7373}
7374
7375
7376/** Opcode 0x0f 0xae mem/0. */
7377FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7378{
7379 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7380 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7381 return IEMOP_RAISE_INVALID_OPCODE();
7382
7383 IEM_MC_BEGIN(3, 1);
7384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7385 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7386 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7389 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7390 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7391 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7392 IEM_MC_END();
7393 return VINF_SUCCESS;
7394}
7395
7396
7397/** Opcode 0x0f 0xae mem/1. */
7398FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7399{
7400 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7401 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7402 return IEMOP_RAISE_INVALID_OPCODE();
7403
7404 IEM_MC_BEGIN(3, 1);
7405 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7406 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7407 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7410 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7411 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7412 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7413 IEM_MC_END();
7414 return VINF_SUCCESS;
7415}
7416
7417
7418/**
7419 * @opmaps grp15
7420 * @opcode !11/2
7421 * @oppfx none
7422 * @opcpuid sse
7423 * @opgroup og_sse_mxcsrsm
7424 * @opxcpttype 5
7425 * @optest op1=0 -> mxcsr=0
7426 * @optest op1=0x2083 -> mxcsr=0x2083
7427 * @optest op1=0xfffffffe -> value.xcpt=0xd
7428 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7429 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7430 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7431 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7432 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7433 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7434 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7435 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7436 */
7437FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7438{
7439 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7440 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7441 return IEMOP_RAISE_INVALID_OPCODE();
7442
7443 IEM_MC_BEGIN(2, 0);
7444 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7445 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7449 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7450 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7451 IEM_MC_END();
7452 return VINF_SUCCESS;
7453}
7454
7455
7456/**
7457 * @opmaps grp15
7458 * @opcode !11/3
7459 * @oppfx none
7460 * @opcpuid sse
7461 * @opgroup og_sse_mxcsrsm
7462 * @opxcpttype 5
7463 * @optest mxcsr=0 -> op1=0
7464 * @optest mxcsr=0x2083 -> op1=0x2083
7465 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7466 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7467 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7468 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7469 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7470 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7471 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7472 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7473 */
7474FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7475{
7476 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7478 return IEMOP_RAISE_INVALID_OPCODE();
7479
7480 IEM_MC_BEGIN(2, 0);
7481 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7482 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7486 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7487 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7488 IEM_MC_END();
7489 return VINF_SUCCESS;
7490}
7491
7492
7493/**
7494 * @opmaps grp15
7495 * @opcode !11/4
7496 * @oppfx none
7497 * @opcpuid xsave
7498 * @opgroup og_system
7499 * @opxcpttype none
7500 */
7501FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7502{
7503 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7504 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7505 return IEMOP_RAISE_INVALID_OPCODE();
7506
7507 IEM_MC_BEGIN(3, 0);
7508 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7509 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7514 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7515 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7516 IEM_MC_END();
7517 return VINF_SUCCESS;
7518}
7519
7520
7521/**
7522 * @opmaps grp15
7523 * @opcode !11/5
7524 * @oppfx none
7525 * @opcpuid xsave
7526 * @opgroup og_system
7527 * @opxcpttype none
7528 */
7529FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7530{
7531 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7532 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7533 return IEMOP_RAISE_INVALID_OPCODE();
7534
7535 IEM_MC_BEGIN(3, 0);
7536 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7537 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7538 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7541 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7542 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7543 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7544 IEM_MC_END();
7545 return VINF_SUCCESS;
7546}
7547
7548/** Opcode 0x0f 0xae mem/6. */
7549FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7550
7551/**
7552 * @opmaps grp15
7553 * @opcode !11/7
7554 * @oppfx none
7555 * @opcpuid clfsh
7556 * @opgroup og_cachectl
7557 * @optest op1=1 ->
7558 */
7559FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7560{
7561 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7562 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7563 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7564
7565 IEM_MC_BEGIN(2, 0);
7566 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7567 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7570 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7571 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7572 IEM_MC_END();
7573 return VINF_SUCCESS;
7574}
7575
7576/**
7577 * @opmaps grp15
7578 * @opcode !11/7
7579 * @oppfx 0x66
7580 * @opcpuid clflushopt
7581 * @opgroup og_cachectl
7582 * @optest op1=1 ->
7583 */
7584FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7585{
7586 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7587 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7588 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7589
7590 IEM_MC_BEGIN(2, 0);
7591 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7592 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7596 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7597 IEM_MC_END();
7598 return VINF_SUCCESS;
7599}
7600
7601
7602/** Opcode 0x0f 0xae 11b/5. */
7603FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7604{
7605 RT_NOREF_PV(bRm);
7606 IEMOP_MNEMONIC(lfence, "lfence");
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7609 return IEMOP_RAISE_INVALID_OPCODE();
7610
7611 IEM_MC_BEGIN(0, 0);
7612#ifndef RT_ARCH_ARM64
7613 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7614#endif
7615 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7616#ifndef RT_ARCH_ARM64
7617 else
7618 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7619#endif
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623}
7624
7625
7626/** Opcode 0x0f 0xae 11b/6. */
7627FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7628{
7629 RT_NOREF_PV(bRm);
7630 IEMOP_MNEMONIC(mfence, "mfence");
7631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7632 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7633 return IEMOP_RAISE_INVALID_OPCODE();
7634
7635 IEM_MC_BEGIN(0, 0);
7636#ifndef RT_ARCH_ARM64
7637 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7638#endif
7639 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7640#ifndef RT_ARCH_ARM64
7641 else
7642 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7643#endif
7644 IEM_MC_ADVANCE_RIP();
7645 IEM_MC_END();
7646 return VINF_SUCCESS;
7647}
7648
7649
7650/** Opcode 0x0f 0xae 11b/7. */
7651FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7652{
7653 RT_NOREF_PV(bRm);
7654 IEMOP_MNEMONIC(sfence, "sfence");
7655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7656 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7657 return IEMOP_RAISE_INVALID_OPCODE();
7658
7659 IEM_MC_BEGIN(0, 0);
7660#ifndef RT_ARCH_ARM64
7661 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7662#endif
7663 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7664#ifndef RT_ARCH_ARM64
7665 else
7666 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7667#endif
7668 IEM_MC_ADVANCE_RIP();
7669 IEM_MC_END();
7670 return VINF_SUCCESS;
7671}
7672
7673
7674/** Opcode 0xf3 0x0f 0xae 11b/0. */
7675FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7676{
7677 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7680 {
7681 IEM_MC_BEGIN(1, 0);
7682 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7683 IEM_MC_ARG(uint64_t, u64Dst, 0);
7684 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7685 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7686 IEM_MC_ADVANCE_RIP();
7687 IEM_MC_END();
7688 }
7689 else
7690 {
7691 IEM_MC_BEGIN(1, 0);
7692 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7693 IEM_MC_ARG(uint32_t, u32Dst, 0);
7694 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7695 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7696 IEM_MC_ADVANCE_RIP();
7697 IEM_MC_END();
7698 }
7699 return VINF_SUCCESS;
7700}
7701
7702
7703/** Opcode 0xf3 0x0f 0xae 11b/1. */
7704FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7705{
7706 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7708 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7709 {
7710 IEM_MC_BEGIN(1, 0);
7711 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7712 IEM_MC_ARG(uint64_t, u64Dst, 0);
7713 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7714 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7715 IEM_MC_ADVANCE_RIP();
7716 IEM_MC_END();
7717 }
7718 else
7719 {
7720 IEM_MC_BEGIN(1, 0);
7721 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7722 IEM_MC_ARG(uint32_t, u32Dst, 0);
7723 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7724 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7725 IEM_MC_ADVANCE_RIP();
7726 IEM_MC_END();
7727 }
7728 return VINF_SUCCESS;
7729}
7730
7731
7732/** Opcode 0xf3 0x0f 0xae 11b/2. */
7733FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7734{
7735 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7738 {
7739 IEM_MC_BEGIN(1, 0);
7740 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7741 IEM_MC_ARG(uint64_t, u64Dst, 0);
7742 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7743 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7744 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7745 IEM_MC_ADVANCE_RIP();
7746 IEM_MC_END();
7747 }
7748 else
7749 {
7750 IEM_MC_BEGIN(1, 0);
7751 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7752 IEM_MC_ARG(uint32_t, u32Dst, 0);
7753 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7754 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7755 IEM_MC_ADVANCE_RIP();
7756 IEM_MC_END();
7757 }
7758 return VINF_SUCCESS;
7759}
7760
7761
7762/** Opcode 0xf3 0x0f 0xae 11b/3. */
7763FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7764{
7765 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7768 {
7769 IEM_MC_BEGIN(1, 0);
7770 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7771 IEM_MC_ARG(uint64_t, u64Dst, 0);
7772 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7773 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7774 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7775 IEM_MC_ADVANCE_RIP();
7776 IEM_MC_END();
7777 }
7778 else
7779 {
7780 IEM_MC_BEGIN(1, 0);
7781 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7782 IEM_MC_ARG(uint32_t, u32Dst, 0);
7783 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7784 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7785 IEM_MC_ADVANCE_RIP();
7786 IEM_MC_END();
7787 }
7788 return VINF_SUCCESS;
7789}
7790
7791
7792/**
7793 * Group 15 jump table for register variant.
7794 */
7795IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7796{ /* pfx: none, 066h, 0f3h, 0f2h */
7797 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7798 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7799 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7800 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7801 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7802 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7803 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7804 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7805};
7806AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7807
7808
7809/**
7810 * Group 15 jump table for memory variant.
7811 */
7812IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7813{ /* pfx: none, 066h, 0f3h, 0f2h */
7814 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7815 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7816 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7817 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7818 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7819 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7820 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7821 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7822};
7823AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7824
7825
7826/** Opcode 0x0f 0xae. */
7827FNIEMOP_DEF(iemOp_Grp15)
7828{
7829 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7831 if (IEM_IS_MODRM_REG_MODE(bRm))
7832 /* register, register */
7833 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7834 + pVCpu->iem.s.idxPrefix], bRm);
7835 /* memory, register */
7836 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7837 + pVCpu->iem.s.idxPrefix], bRm);
7838}
7839
7840
7841/** Opcode 0x0f 0xaf. */
7842FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7843{
7844 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7845 IEMOP_HLP_MIN_386();
7846 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7847 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
7848}
7849
7850
7851/** Opcode 0x0f 0xb0. */
7852FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7853{
7854 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7855 IEMOP_HLP_MIN_486();
7856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7857
7858 if (IEM_IS_MODRM_REG_MODE(bRm))
7859 {
7860 IEMOP_HLP_DONE_DECODING();
7861 IEM_MC_BEGIN(4, 0);
7862 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7863 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7864 IEM_MC_ARG(uint8_t, u8Src, 2);
7865 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7866
7867 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7868 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7869 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7870 IEM_MC_REF_EFLAGS(pEFlags);
7871 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7872 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7873 else
7874 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7875
7876 IEM_MC_ADVANCE_RIP();
7877 IEM_MC_END();
7878 }
7879 else
7880 {
7881 IEM_MC_BEGIN(4, 3);
7882 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7883 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7884 IEM_MC_ARG(uint8_t, u8Src, 2);
7885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7887 IEM_MC_LOCAL(uint8_t, u8Al);
7888
7889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7890 IEMOP_HLP_DONE_DECODING();
7891 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7892 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7893 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7894 IEM_MC_FETCH_EFLAGS(EFlags);
7895 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7896 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7897 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7898 else
7899 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7900
7901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7902 IEM_MC_COMMIT_EFLAGS(EFlags);
7903 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7904 IEM_MC_ADVANCE_RIP();
7905 IEM_MC_END();
7906 }
7907 return VINF_SUCCESS;
7908}
7909
7910/** Opcode 0x0f 0xb1. */
7911FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7912{
7913 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7914 IEMOP_HLP_MIN_486();
7915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7916
7917 if (IEM_IS_MODRM_REG_MODE(bRm))
7918 {
7919 IEMOP_HLP_DONE_DECODING();
7920 switch (pVCpu->iem.s.enmEffOpSize)
7921 {
7922 case IEMMODE_16BIT:
7923 IEM_MC_BEGIN(4, 0);
7924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7925 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7926 IEM_MC_ARG(uint16_t, u16Src, 2);
7927 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7928
7929 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7930 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7931 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7932 IEM_MC_REF_EFLAGS(pEFlags);
7933 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7934 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7935 else
7936 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7937
7938 IEM_MC_ADVANCE_RIP();
7939 IEM_MC_END();
7940 return VINF_SUCCESS;
7941
7942 case IEMMODE_32BIT:
7943 IEM_MC_BEGIN(4, 0);
7944 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7945 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7946 IEM_MC_ARG(uint32_t, u32Src, 2);
7947 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7948
7949 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7950 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7951 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7952 IEM_MC_REF_EFLAGS(pEFlags);
7953 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7954 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7955 else
7956 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7957
7958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7959 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7960 IEM_MC_ADVANCE_RIP();
7961 IEM_MC_END();
7962 return VINF_SUCCESS;
7963
7964 case IEMMODE_64BIT:
7965 IEM_MC_BEGIN(4, 0);
7966 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7967 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7968#ifdef RT_ARCH_X86
7969 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7970#else
7971 IEM_MC_ARG(uint64_t, u64Src, 2);
7972#endif
7973 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7974
7975 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7976 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7977 IEM_MC_REF_EFLAGS(pEFlags);
7978#ifdef RT_ARCH_X86
7979 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7980 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7981 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7982 else
7983 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7984#else
7985 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7987 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7988 else
7989 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7990#endif
7991
7992 IEM_MC_ADVANCE_RIP();
7993 IEM_MC_END();
7994 return VINF_SUCCESS;
7995
7996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7997 }
7998 }
7999 else
8000 {
8001 switch (pVCpu->iem.s.enmEffOpSize)
8002 {
8003 case IEMMODE_16BIT:
8004 IEM_MC_BEGIN(4, 3);
8005 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8006 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8007 IEM_MC_ARG(uint16_t, u16Src, 2);
8008 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8010 IEM_MC_LOCAL(uint16_t, u16Ax);
8011
8012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8013 IEMOP_HLP_DONE_DECODING();
8014 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8015 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8016 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8017 IEM_MC_FETCH_EFLAGS(EFlags);
8018 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8019 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8021 else
8022 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8023
8024 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8025 IEM_MC_COMMIT_EFLAGS(EFlags);
8026 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8027 IEM_MC_ADVANCE_RIP();
8028 IEM_MC_END();
8029 return VINF_SUCCESS;
8030
8031 case IEMMODE_32BIT:
8032 IEM_MC_BEGIN(4, 3);
8033 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8034 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8035 IEM_MC_ARG(uint32_t, u32Src, 2);
8036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8038 IEM_MC_LOCAL(uint32_t, u32Eax);
8039
8040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8041 IEMOP_HLP_DONE_DECODING();
8042 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8043 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8044 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8045 IEM_MC_FETCH_EFLAGS(EFlags);
8046 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8048 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8049 else
8050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8051
8052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8053 IEM_MC_COMMIT_EFLAGS(EFlags);
8054 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8055 IEM_MC_ADVANCE_RIP();
8056 IEM_MC_END();
8057 return VINF_SUCCESS;
8058
8059 case IEMMODE_64BIT:
8060 IEM_MC_BEGIN(4, 3);
8061 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8062 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8063#ifdef RT_ARCH_X86
8064 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8065#else
8066 IEM_MC_ARG(uint64_t, u64Src, 2);
8067#endif
8068 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8070 IEM_MC_LOCAL(uint64_t, u64Rax);
8071
8072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8073 IEMOP_HLP_DONE_DECODING();
8074 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8075 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8076 IEM_MC_FETCH_EFLAGS(EFlags);
8077 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8078#ifdef RT_ARCH_X86
8079 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8080 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8081 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8082 else
8083 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8084#else
8085 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8086 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8088 else
8089 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8090#endif
8091
8092 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8093 IEM_MC_COMMIT_EFLAGS(EFlags);
8094 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8095 IEM_MC_ADVANCE_RIP();
8096 IEM_MC_END();
8097 return VINF_SUCCESS;
8098
8099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8100 }
8101 }
8102}
8103
8104
8105FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8106{
8107 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8108 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8109
8110 switch (pVCpu->iem.s.enmEffOpSize)
8111 {
8112 case IEMMODE_16BIT:
8113 IEM_MC_BEGIN(5, 1);
8114 IEM_MC_ARG(uint16_t, uSel, 0);
8115 IEM_MC_ARG(uint16_t, offSeg, 1);
8116 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8117 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8118 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8119 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8123 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8124 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8125 IEM_MC_END();
8126 return VINF_SUCCESS;
8127
8128 case IEMMODE_32BIT:
8129 IEM_MC_BEGIN(5, 1);
8130 IEM_MC_ARG(uint16_t, uSel, 0);
8131 IEM_MC_ARG(uint32_t, offSeg, 1);
8132 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8133 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8134 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8135 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8138 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8139 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8140 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8141 IEM_MC_END();
8142 return VINF_SUCCESS;
8143
8144 case IEMMODE_64BIT:
8145 IEM_MC_BEGIN(5, 1);
8146 IEM_MC_ARG(uint16_t, uSel, 0);
8147 IEM_MC_ARG(uint64_t, offSeg, 1);
8148 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8149 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8150 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8151 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8154 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8155 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8156 else
8157 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8158 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8159 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8160 IEM_MC_END();
8161 return VINF_SUCCESS;
8162
8163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8164 }
8165}
8166
8167
8168/** Opcode 0x0f 0xb2. */
8169FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8170{
8171 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8172 IEMOP_HLP_MIN_386();
8173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8174 if (IEM_IS_MODRM_REG_MODE(bRm))
8175 return IEMOP_RAISE_INVALID_OPCODE();
8176 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8177}
8178
8179
8180/** Opcode 0x0f 0xb3. */
8181FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8182{
8183 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8184 IEMOP_HLP_MIN_386();
8185 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8186}
8187
8188
8189/** Opcode 0x0f 0xb4. */
8190FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8191{
8192 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8193 IEMOP_HLP_MIN_386();
8194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8195 if (IEM_IS_MODRM_REG_MODE(bRm))
8196 return IEMOP_RAISE_INVALID_OPCODE();
8197 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8198}
8199
8200
8201/** Opcode 0x0f 0xb5. */
8202FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8203{
8204 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8205 IEMOP_HLP_MIN_386();
8206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8207 if (IEM_IS_MODRM_REG_MODE(bRm))
8208 return IEMOP_RAISE_INVALID_OPCODE();
8209 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8210}
8211
8212
8213/** Opcode 0x0f 0xb6. */
8214FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8215{
8216 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8217 IEMOP_HLP_MIN_386();
8218
8219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8220
8221 /*
8222 * If rm is denoting a register, no more instruction bytes.
8223 */
8224 if (IEM_IS_MODRM_REG_MODE(bRm))
8225 {
8226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8227 switch (pVCpu->iem.s.enmEffOpSize)
8228 {
8229 case IEMMODE_16BIT:
8230 IEM_MC_BEGIN(0, 1);
8231 IEM_MC_LOCAL(uint16_t, u16Value);
8232 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8233 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8234 IEM_MC_ADVANCE_RIP();
8235 IEM_MC_END();
8236 return VINF_SUCCESS;
8237
8238 case IEMMODE_32BIT:
8239 IEM_MC_BEGIN(0, 1);
8240 IEM_MC_LOCAL(uint32_t, u32Value);
8241 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8243 IEM_MC_ADVANCE_RIP();
8244 IEM_MC_END();
8245 return VINF_SUCCESS;
8246
8247 case IEMMODE_64BIT:
8248 IEM_MC_BEGIN(0, 1);
8249 IEM_MC_LOCAL(uint64_t, u64Value);
8250 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8251 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8252 IEM_MC_ADVANCE_RIP();
8253 IEM_MC_END();
8254 return VINF_SUCCESS;
8255
8256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8257 }
8258 }
8259 else
8260 {
8261 /*
8262 * We're loading a register from memory.
8263 */
8264 switch (pVCpu->iem.s.enmEffOpSize)
8265 {
8266 case IEMMODE_16BIT:
8267 IEM_MC_BEGIN(0, 2);
8268 IEM_MC_LOCAL(uint16_t, u16Value);
8269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8272 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8273 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8274 IEM_MC_ADVANCE_RIP();
8275 IEM_MC_END();
8276 return VINF_SUCCESS;
8277
8278 case IEMMODE_32BIT:
8279 IEM_MC_BEGIN(0, 2);
8280 IEM_MC_LOCAL(uint32_t, u32Value);
8281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8284 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8285 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8286 IEM_MC_ADVANCE_RIP();
8287 IEM_MC_END();
8288 return VINF_SUCCESS;
8289
8290 case IEMMODE_64BIT:
8291 IEM_MC_BEGIN(0, 2);
8292 IEM_MC_LOCAL(uint64_t, u64Value);
8293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8296 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8297 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8298 IEM_MC_ADVANCE_RIP();
8299 IEM_MC_END();
8300 return VINF_SUCCESS;
8301
8302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8303 }
8304 }
8305}
8306
8307
8308/** Opcode 0x0f 0xb7. */
8309FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8310{
8311 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8312 IEMOP_HLP_MIN_386();
8313
8314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8315
8316 /** @todo Not entirely sure how the operand size prefix is handled here,
8317 * assuming that it will be ignored. Would be nice to have a few
8318 * test for this. */
8319 /*
8320 * If rm is denoting a register, no more instruction bytes.
8321 */
8322 if (IEM_IS_MODRM_REG_MODE(bRm))
8323 {
8324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8325 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8326 {
8327 IEM_MC_BEGIN(0, 1);
8328 IEM_MC_LOCAL(uint32_t, u32Value);
8329 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8330 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8331 IEM_MC_ADVANCE_RIP();
8332 IEM_MC_END();
8333 }
8334 else
8335 {
8336 IEM_MC_BEGIN(0, 1);
8337 IEM_MC_LOCAL(uint64_t, u64Value);
8338 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8339 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8340 IEM_MC_ADVANCE_RIP();
8341 IEM_MC_END();
8342 }
8343 }
8344 else
8345 {
8346 /*
8347 * We're loading a register from memory.
8348 */
8349 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8350 {
8351 IEM_MC_BEGIN(0, 2);
8352 IEM_MC_LOCAL(uint32_t, u32Value);
8353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8356 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8357 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8358 IEM_MC_ADVANCE_RIP();
8359 IEM_MC_END();
8360 }
8361 else
8362 {
8363 IEM_MC_BEGIN(0, 2);
8364 IEM_MC_LOCAL(uint64_t, u64Value);
8365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8369 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8370 IEM_MC_ADVANCE_RIP();
8371 IEM_MC_END();
8372 }
8373 }
8374 return VINF_SUCCESS;
8375}
8376
8377
8378/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8379FNIEMOP_UD_STUB(iemOp_jmpe);
8380
8381
8382/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8383FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8384{
8385 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8386 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8387 return iemOp_InvalidNeedRM(pVCpu);
8388#ifndef TST_IEM_CHECK_MC
8389# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8390 static const IEMOPBINSIZES s_Native =
8391 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8392# endif
8393 static const IEMOPBINSIZES s_Fallback =
8394 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8395#endif
8396 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8397}
8398
8399
8400/**
8401 * @opcode 0xb9
8402 * @opinvalid intel-modrm
8403 * @optest ->
8404 */
8405FNIEMOP_DEF(iemOp_Grp10)
8406{
8407 /*
8408 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8409 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8410 */
8411 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8412 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8413 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8414}
8415
8416
8417/** Opcode 0x0f 0xba. */
8418FNIEMOP_DEF(iemOp_Grp8)
8419{
8420 IEMOP_HLP_MIN_386();
8421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8422 PCIEMOPBINSIZES pImpl;
8423 switch (IEM_GET_MODRM_REG_8(bRm))
8424 {
8425 case 0: case 1: case 2: case 3:
8426 /* Both AMD and Intel want full modr/m decoding and imm8. */
8427 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8428 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8429 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8430 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8431 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8433 }
8434 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8435
8436 if (IEM_IS_MODRM_REG_MODE(bRm))
8437 {
8438 /* register destination. */
8439 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8441
8442 switch (pVCpu->iem.s.enmEffOpSize)
8443 {
8444 case IEMMODE_16BIT:
8445 IEM_MC_BEGIN(3, 0);
8446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8447 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8449
8450 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8451 IEM_MC_REF_EFLAGS(pEFlags);
8452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8453
8454 IEM_MC_ADVANCE_RIP();
8455 IEM_MC_END();
8456 return VINF_SUCCESS;
8457
8458 case IEMMODE_32BIT:
8459 IEM_MC_BEGIN(3, 0);
8460 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8461 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8462 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8463
8464 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8465 IEM_MC_REF_EFLAGS(pEFlags);
8466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8467
8468 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8469 IEM_MC_ADVANCE_RIP();
8470 IEM_MC_END();
8471 return VINF_SUCCESS;
8472
8473 case IEMMODE_64BIT:
8474 IEM_MC_BEGIN(3, 0);
8475 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8476 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8477 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8478
8479 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8480 IEM_MC_REF_EFLAGS(pEFlags);
8481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8482
8483 IEM_MC_ADVANCE_RIP();
8484 IEM_MC_END();
8485 return VINF_SUCCESS;
8486
8487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8488 }
8489 }
8490 else
8491 {
8492 /* memory destination. */
8493
8494 uint32_t fAccess;
8495 if (pImpl->pfnLockedU16)
8496 fAccess = IEM_ACCESS_DATA_RW;
8497 else /* BT */
8498 fAccess = IEM_ACCESS_DATA_R;
8499
8500 /** @todo test negative bit offsets! */
8501 switch (pVCpu->iem.s.enmEffOpSize)
8502 {
8503 case IEMMODE_16BIT:
8504 IEM_MC_BEGIN(3, 1);
8505 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8506 IEM_MC_ARG(uint16_t, u16Src, 1);
8507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8509
8510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8511 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8512 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8513 if (pImpl->pfnLockedU16)
8514 IEMOP_HLP_DONE_DECODING();
8515 else
8516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8517 IEM_MC_FETCH_EFLAGS(EFlags);
8518 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8519 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8521 else
8522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8524
8525 IEM_MC_COMMIT_EFLAGS(EFlags);
8526 IEM_MC_ADVANCE_RIP();
8527 IEM_MC_END();
8528 return VINF_SUCCESS;
8529
8530 case IEMMODE_32BIT:
8531 IEM_MC_BEGIN(3, 1);
8532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8533 IEM_MC_ARG(uint32_t, u32Src, 1);
8534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8536
8537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8538 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8539 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8540 if (pImpl->pfnLockedU16)
8541 IEMOP_HLP_DONE_DECODING();
8542 else
8543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8544 IEM_MC_FETCH_EFLAGS(EFlags);
8545 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8548 else
8549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8551
8552 IEM_MC_COMMIT_EFLAGS(EFlags);
8553 IEM_MC_ADVANCE_RIP();
8554 IEM_MC_END();
8555 return VINF_SUCCESS;
8556
8557 case IEMMODE_64BIT:
8558 IEM_MC_BEGIN(3, 1);
8559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8560 IEM_MC_ARG(uint64_t, u64Src, 1);
8561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8563
8564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8565 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8566 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8567 if (pImpl->pfnLockedU16)
8568 IEMOP_HLP_DONE_DECODING();
8569 else
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_FETCH_EFLAGS(EFlags);
8572 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8573 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8574 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8575 else
8576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8578
8579 IEM_MC_COMMIT_EFLAGS(EFlags);
8580 IEM_MC_ADVANCE_RIP();
8581 IEM_MC_END();
8582 return VINF_SUCCESS;
8583
8584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8585 }
8586 }
8587}
8588
8589
8590/** Opcode 0x0f 0xbb. */
8591FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8592{
8593 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8594 IEMOP_HLP_MIN_386();
8595 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8596}
8597
8598
8599/**
8600 * Common worker for BSF and BSR instructions.
8601 *
8602 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8603 * the destination register, which means that for 32-bit operations the high
8604 * bits must be left alone.
8605 *
8606 * @param pImpl Pointer to the instruction implementation (assembly).
8607 */
8608FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8609{
8610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8611
8612 /*
8613 * If rm is denoting a register, no more instruction bytes.
8614 */
8615 if (IEM_IS_MODRM_REG_MODE(bRm))
8616 {
8617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8618 switch (pVCpu->iem.s.enmEffOpSize)
8619 {
8620 case IEMMODE_16BIT:
8621 IEM_MC_BEGIN(3, 0);
8622 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8623 IEM_MC_ARG(uint16_t, u16Src, 1);
8624 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8625
8626 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8627 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8628 IEM_MC_REF_EFLAGS(pEFlags);
8629 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8630
8631 IEM_MC_ADVANCE_RIP();
8632 IEM_MC_END();
8633 break;
8634
8635 case IEMMODE_32BIT:
8636 IEM_MC_BEGIN(3, 0);
8637 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8638 IEM_MC_ARG(uint32_t, u32Src, 1);
8639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8640
8641 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8642 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8643 IEM_MC_REF_EFLAGS(pEFlags);
8644 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8645 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8646 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8647 IEM_MC_ENDIF();
8648 IEM_MC_ADVANCE_RIP();
8649 IEM_MC_END();
8650 break;
8651
8652 case IEMMODE_64BIT:
8653 IEM_MC_BEGIN(3, 0);
8654 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8655 IEM_MC_ARG(uint64_t, u64Src, 1);
8656 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8657
8658 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8659 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8660 IEM_MC_REF_EFLAGS(pEFlags);
8661 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8662
8663 IEM_MC_ADVANCE_RIP();
8664 IEM_MC_END();
8665 break;
8666 }
8667 }
8668 else
8669 {
8670 /*
8671 * We're accessing memory.
8672 */
8673 switch (pVCpu->iem.s.enmEffOpSize)
8674 {
8675 case IEMMODE_16BIT:
8676 IEM_MC_BEGIN(3, 1);
8677 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8678 IEM_MC_ARG(uint16_t, u16Src, 1);
8679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8681
8682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8684 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8685 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8686 IEM_MC_REF_EFLAGS(pEFlags);
8687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8688
8689 IEM_MC_ADVANCE_RIP();
8690 IEM_MC_END();
8691 break;
8692
8693 case IEMMODE_32BIT:
8694 IEM_MC_BEGIN(3, 1);
8695 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8696 IEM_MC_ARG(uint32_t, u32Src, 1);
8697 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8699
8700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8703 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8704 IEM_MC_REF_EFLAGS(pEFlags);
8705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8706
8707 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8708 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8709 IEM_MC_ENDIF();
8710 IEM_MC_ADVANCE_RIP();
8711 IEM_MC_END();
8712 break;
8713
8714 case IEMMODE_64BIT:
8715 IEM_MC_BEGIN(3, 1);
8716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8717 IEM_MC_ARG(uint64_t, u64Src, 1);
8718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8720
8721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8724 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8725 IEM_MC_REF_EFLAGS(pEFlags);
8726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8727
8728 IEM_MC_ADVANCE_RIP();
8729 IEM_MC_END();
8730 break;
8731 }
8732 }
8733 return VINF_SUCCESS;
8734}
8735
8736
8737/** Opcode 0x0f 0xbc. */
8738FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
8739{
8740 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
8741 IEMOP_HLP_MIN_386();
8742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8743 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
8744}
8745
8746
8747/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
8748FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
8749{
8750 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8751 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
8752 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8753
8754#ifndef TST_IEM_CHECK_MC
8755 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
8756 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
8757 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
8758 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
8759 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
8760 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
8761 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
8762 {
8763 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
8764 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
8765 };
8766#endif
8767 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8768 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8769 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8770}
8771
8772
8773/** Opcode 0x0f 0xbd. */
8774FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
8775{
8776 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8777 IEMOP_HLP_MIN_386();
8778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8779 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
8780}
8781
8782
8783/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8784FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
8785{
8786 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8787 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
8788 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8789
8790#ifndef TST_IEM_CHECK_MC
8791 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
8792 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
8793 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
8794 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
8795 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
8796 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
8797 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
8798 {
8799 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
8800 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
8801 };
8802#endif
8803 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8805 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8806}
8807
8808
8809
8810/** Opcode 0x0f 0xbe. */
8811FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8812{
8813 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8814 IEMOP_HLP_MIN_386();
8815
8816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8817
8818 /*
8819 * If rm is denoting a register, no more instruction bytes.
8820 */
8821 if (IEM_IS_MODRM_REG_MODE(bRm))
8822 {
8823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8824 switch (pVCpu->iem.s.enmEffOpSize)
8825 {
8826 case IEMMODE_16BIT:
8827 IEM_MC_BEGIN(0, 1);
8828 IEM_MC_LOCAL(uint16_t, u16Value);
8829 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8830 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8831 IEM_MC_ADVANCE_RIP();
8832 IEM_MC_END();
8833 return VINF_SUCCESS;
8834
8835 case IEMMODE_32BIT:
8836 IEM_MC_BEGIN(0, 1);
8837 IEM_MC_LOCAL(uint32_t, u32Value);
8838 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8839 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8840 IEM_MC_ADVANCE_RIP();
8841 IEM_MC_END();
8842 return VINF_SUCCESS;
8843
8844 case IEMMODE_64BIT:
8845 IEM_MC_BEGIN(0, 1);
8846 IEM_MC_LOCAL(uint64_t, u64Value);
8847 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8848 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8849 IEM_MC_ADVANCE_RIP();
8850 IEM_MC_END();
8851 return VINF_SUCCESS;
8852
8853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8854 }
8855 }
8856 else
8857 {
8858 /*
8859 * We're loading a register from memory.
8860 */
8861 switch (pVCpu->iem.s.enmEffOpSize)
8862 {
8863 case IEMMODE_16BIT:
8864 IEM_MC_BEGIN(0, 2);
8865 IEM_MC_LOCAL(uint16_t, u16Value);
8866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8870 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8871 IEM_MC_ADVANCE_RIP();
8872 IEM_MC_END();
8873 return VINF_SUCCESS;
8874
8875 case IEMMODE_32BIT:
8876 IEM_MC_BEGIN(0, 2);
8877 IEM_MC_LOCAL(uint32_t, u32Value);
8878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8882 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8883 IEM_MC_ADVANCE_RIP();
8884 IEM_MC_END();
8885 return VINF_SUCCESS;
8886
8887 case IEMMODE_64BIT:
8888 IEM_MC_BEGIN(0, 2);
8889 IEM_MC_LOCAL(uint64_t, u64Value);
8890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8894 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8895 IEM_MC_ADVANCE_RIP();
8896 IEM_MC_END();
8897 return VINF_SUCCESS;
8898
8899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8900 }
8901 }
8902}
8903
8904
8905/** Opcode 0x0f 0xbf. */
8906FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8907{
8908 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8909 IEMOP_HLP_MIN_386();
8910
8911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8912
8913 /** @todo Not entirely sure how the operand size prefix is handled here,
8914 * assuming that it will be ignored. Would be nice to have a few
8915 * test for this. */
8916 /*
8917 * If rm is denoting a register, no more instruction bytes.
8918 */
8919 if (IEM_IS_MODRM_REG_MODE(bRm))
8920 {
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8923 {
8924 IEM_MC_BEGIN(0, 1);
8925 IEM_MC_LOCAL(uint32_t, u32Value);
8926 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8927 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8928 IEM_MC_ADVANCE_RIP();
8929 IEM_MC_END();
8930 }
8931 else
8932 {
8933 IEM_MC_BEGIN(0, 1);
8934 IEM_MC_LOCAL(uint64_t, u64Value);
8935 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8936 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8937 IEM_MC_ADVANCE_RIP();
8938 IEM_MC_END();
8939 }
8940 }
8941 else
8942 {
8943 /*
8944 * We're loading a register from memory.
8945 */
8946 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8947 {
8948 IEM_MC_BEGIN(0, 2);
8949 IEM_MC_LOCAL(uint32_t, u32Value);
8950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8953 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8954 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8955 IEM_MC_ADVANCE_RIP();
8956 IEM_MC_END();
8957 }
8958 else
8959 {
8960 IEM_MC_BEGIN(0, 2);
8961 IEM_MC_LOCAL(uint64_t, u64Value);
8962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8966 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8967 IEM_MC_ADVANCE_RIP();
8968 IEM_MC_END();
8969 }
8970 }
8971 return VINF_SUCCESS;
8972}
8973
8974
8975/** Opcode 0x0f 0xc0. */
8976FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8977{
8978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8979 IEMOP_HLP_MIN_486();
8980 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8981
8982 /*
8983 * If rm is denoting a register, no more instruction bytes.
8984 */
8985 if (IEM_IS_MODRM_REG_MODE(bRm))
8986 {
8987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8988
8989 IEM_MC_BEGIN(3, 0);
8990 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8991 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8993
8994 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8995 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8996 IEM_MC_REF_EFLAGS(pEFlags);
8997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8998
8999 IEM_MC_ADVANCE_RIP();
9000 IEM_MC_END();
9001 }
9002 else
9003 {
9004 /*
9005 * We're accessing memory.
9006 */
9007 IEM_MC_BEGIN(3, 3);
9008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9009 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9010 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9011 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9013
9014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9015 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9016 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9017 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9018 IEM_MC_FETCH_EFLAGS(EFlags);
9019 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9021 else
9022 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9023
9024 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9025 IEM_MC_COMMIT_EFLAGS(EFlags);
9026 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9027 IEM_MC_ADVANCE_RIP();
9028 IEM_MC_END();
9029 return VINF_SUCCESS;
9030 }
9031 return VINF_SUCCESS;
9032}
9033
9034
9035/** Opcode 0x0f 0xc1. */
9036FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9037{
9038 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9039 IEMOP_HLP_MIN_486();
9040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9041
9042 /*
9043 * If rm is denoting a register, no more instruction bytes.
9044 */
9045 if (IEM_IS_MODRM_REG_MODE(bRm))
9046 {
9047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9048
9049 switch (pVCpu->iem.s.enmEffOpSize)
9050 {
9051 case IEMMODE_16BIT:
9052 IEM_MC_BEGIN(3, 0);
9053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9054 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9056
9057 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9058 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9059 IEM_MC_REF_EFLAGS(pEFlags);
9060 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9061
9062 IEM_MC_ADVANCE_RIP();
9063 IEM_MC_END();
9064 return VINF_SUCCESS;
9065
9066 case IEMMODE_32BIT:
9067 IEM_MC_BEGIN(3, 0);
9068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9069 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9071
9072 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9073 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9074 IEM_MC_REF_EFLAGS(pEFlags);
9075 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9076
9077 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9078 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9079 IEM_MC_ADVANCE_RIP();
9080 IEM_MC_END();
9081 return VINF_SUCCESS;
9082
9083 case IEMMODE_64BIT:
9084 IEM_MC_BEGIN(3, 0);
9085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9086 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9088
9089 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9090 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9091 IEM_MC_REF_EFLAGS(pEFlags);
9092 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9093
9094 IEM_MC_ADVANCE_RIP();
9095 IEM_MC_END();
9096 return VINF_SUCCESS;
9097
9098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9099 }
9100 }
9101 else
9102 {
9103 /*
9104 * We're accessing memory.
9105 */
9106 switch (pVCpu->iem.s.enmEffOpSize)
9107 {
9108 case IEMMODE_16BIT:
9109 IEM_MC_BEGIN(3, 3);
9110 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9111 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9112 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9113 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9115
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9117 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9118 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9119 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9120 IEM_MC_FETCH_EFLAGS(EFlags);
9121 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9122 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9123 else
9124 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9125
9126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9127 IEM_MC_COMMIT_EFLAGS(EFlags);
9128 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9129 IEM_MC_ADVANCE_RIP();
9130 IEM_MC_END();
9131 return VINF_SUCCESS;
9132
9133 case IEMMODE_32BIT:
9134 IEM_MC_BEGIN(3, 3);
9135 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9136 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9137 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9138 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9140
9141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9142 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9143 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9144 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9145 IEM_MC_FETCH_EFLAGS(EFlags);
9146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9147 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9148 else
9149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9150
9151 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9152 IEM_MC_COMMIT_EFLAGS(EFlags);
9153 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9154 IEM_MC_ADVANCE_RIP();
9155 IEM_MC_END();
9156 return VINF_SUCCESS;
9157
9158 case IEMMODE_64BIT:
9159 IEM_MC_BEGIN(3, 3);
9160 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9161 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9162 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9163 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9165
9166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9167 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9168 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9169 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9170 IEM_MC_FETCH_EFLAGS(EFlags);
9171 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9172 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9173 else
9174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9175
9176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9177 IEM_MC_COMMIT_EFLAGS(EFlags);
9178 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9179 IEM_MC_ADVANCE_RIP();
9180 IEM_MC_END();
9181 return VINF_SUCCESS;
9182
9183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9184 }
9185 }
9186}
9187
9188
9189/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9190FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9191/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9192FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9193/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9194FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9195/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9196FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9197
9198
9199/** Opcode 0x0f 0xc3. */
9200FNIEMOP_DEF(iemOp_movnti_My_Gy)
9201{
9202 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9203
9204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9205
9206 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9207 if (IEM_IS_MODRM_MEM_MODE(bRm))
9208 {
9209 switch (pVCpu->iem.s.enmEffOpSize)
9210 {
9211 case IEMMODE_32BIT:
9212 IEM_MC_BEGIN(0, 2);
9213 IEM_MC_LOCAL(uint32_t, u32Value);
9214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9215
9216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9218 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9219 return IEMOP_RAISE_INVALID_OPCODE();
9220
9221 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9222 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9223 IEM_MC_ADVANCE_RIP();
9224 IEM_MC_END();
9225 break;
9226
9227 case IEMMODE_64BIT:
9228 IEM_MC_BEGIN(0, 2);
9229 IEM_MC_LOCAL(uint64_t, u64Value);
9230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9231
9232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9234 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9235 return IEMOP_RAISE_INVALID_OPCODE();
9236
9237 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9238 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9239 IEM_MC_ADVANCE_RIP();
9240 IEM_MC_END();
9241 break;
9242
9243 case IEMMODE_16BIT:
9244 /** @todo check this form. */
9245 return IEMOP_RAISE_INVALID_OPCODE();
9246 }
9247 }
9248 else
9249 return IEMOP_RAISE_INVALID_OPCODE();
9250 return VINF_SUCCESS;
9251}
9252/* Opcode 0x66 0x0f 0xc3 - invalid */
9253/* Opcode 0xf3 0x0f 0xc3 - invalid */
9254/* Opcode 0xf2 0x0f 0xc3 - invalid */
9255
9256/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9257FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9258/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9259FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9260/* Opcode 0xf3 0x0f 0xc4 - invalid */
9261/* Opcode 0xf2 0x0f 0xc4 - invalid */
9262
9263/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9264FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9265/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9266FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9267/* Opcode 0xf3 0x0f 0xc5 - invalid */
9268/* Opcode 0xf2 0x0f 0xc5 - invalid */
9269
9270/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9271FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9272/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9273FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9274/* Opcode 0xf3 0x0f 0xc6 - invalid */
9275/* Opcode 0xf2 0x0f 0xc6 - invalid */
9276
9277
9278/** Opcode 0x0f 0xc7 !11/1. */
9279FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9280{
9281 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9282
9283 IEM_MC_BEGIN(4, 3);
9284 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9285 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9286 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9287 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9288 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9289 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9291
9292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9293 IEMOP_HLP_DONE_DECODING();
9294 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9295
9296 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9297 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9298 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9299
9300 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9301 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9302 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9303
9304 IEM_MC_FETCH_EFLAGS(EFlags);
9305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9306 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9307 else
9308 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9309
9310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9311 IEM_MC_COMMIT_EFLAGS(EFlags);
9312 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9313 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9314 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9315 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9316 IEM_MC_ENDIF();
9317 IEM_MC_ADVANCE_RIP();
9318
9319 IEM_MC_END();
9320 return VINF_SUCCESS;
9321}
9322
9323
9324/** Opcode REX.W 0x0f 0xc7 !11/1. */
9325FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9326{
9327 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9328 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9329 {
9330#if 0
9331 RT_NOREF(bRm);
9332 IEMOP_BITCH_ABOUT_STUB();
9333 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9334#else
9335 IEM_MC_BEGIN(4, 3);
9336 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9337 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9338 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9339 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9340 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9341 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9343
9344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9345 IEMOP_HLP_DONE_DECODING();
9346 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9347 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9348
9349 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9350 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9351 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9352
9353 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9354 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9355 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9356
9357 IEM_MC_FETCH_EFLAGS(EFlags);
9358# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9359# if defined(RT_ARCH_AMD64)
9360 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9361# endif
9362 {
9363 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9365 else
9366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9367 }
9368# if defined(RT_ARCH_AMD64)
9369 else
9370# endif
9371# endif
9372# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9373 {
9374 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9375 accesses and not all all atomic, which works fine on in UNI CPU guest
9376 configuration (ignoring DMA). If guest SMP is active we have no choice
9377 but to use a rendezvous callback here. Sigh. */
9378 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9379 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9380 else
9381 {
9382 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9383 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9384 }
9385 }
9386# endif
9387
9388 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9389 IEM_MC_COMMIT_EFLAGS(EFlags);
9390 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9391 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9392 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9393 IEM_MC_ENDIF();
9394 IEM_MC_ADVANCE_RIP();
9395
9396 IEM_MC_END();
9397 return VINF_SUCCESS;
9398#endif
9399 }
9400 Log(("cmpxchg16b -> #UD\n"));
9401 return IEMOP_RAISE_INVALID_OPCODE();
9402}
9403
9404FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9405{
9406 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9407 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9408 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9409}
9410
9411/** Opcode 0x0f 0xc7 11/6. */
9412FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9413
9414/** Opcode 0x0f 0xc7 !11/6. */
9415#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9416FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9417{
9418 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9419 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9420 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9421 IEM_MC_BEGIN(2, 0);
9422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9425 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9426 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9427 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9428 IEM_MC_END();
9429 return VINF_SUCCESS;
9430}
9431#else
9432FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9433#endif
9434
9435/** Opcode 0x66 0x0f 0xc7 !11/6. */
9436#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9437FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9438{
9439 IEMOP_MNEMONIC(vmclear, "vmclear");
9440 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9441 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9442 IEM_MC_BEGIN(2, 0);
9443 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9444 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9446 IEMOP_HLP_DONE_DECODING();
9447 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9448 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9449 IEM_MC_END();
9450 return VINF_SUCCESS;
9451}
9452#else
9453FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9454#endif
9455
9456/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9457#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9458FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9459{
9460 IEMOP_MNEMONIC(vmxon, "vmxon");
9461 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9462 IEM_MC_BEGIN(2, 0);
9463 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9464 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9466 IEMOP_HLP_DONE_DECODING();
9467 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9468 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9469 IEM_MC_END();
9470 return VINF_SUCCESS;
9471}
9472#else
9473FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9474#endif
9475
9476/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9477#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9478FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9479{
9480 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9481 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9482 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9483 IEM_MC_BEGIN(2, 0);
9484 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9485 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9487 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9488 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9489 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9490 IEM_MC_END();
9491 return VINF_SUCCESS;
9492}
9493#else
9494FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9495#endif
9496
9497/** Opcode 0x0f 0xc7 11/7. */
9498FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9499
9500
9501/**
9502 * Group 9 jump table for register variant.
9503 */
9504IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9505{ /* pfx: none, 066h, 0f3h, 0f2h */
9506 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9507 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9508 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9509 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9510 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9511 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9512 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9513 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9514};
9515AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9516
9517
9518/**
9519 * Group 9 jump table for memory variant.
9520 */
9521IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9522{ /* pfx: none, 066h, 0f3h, 0f2h */
9523 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9524 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9525 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9526 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9527 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9528 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9529 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9530 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9531};
9532AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9533
9534
9535/** Opcode 0x0f 0xc7. */
9536FNIEMOP_DEF(iemOp_Grp9)
9537{
9538 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9539 if (IEM_IS_MODRM_REG_MODE(bRm))
9540 /* register, register */
9541 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9542 + pVCpu->iem.s.idxPrefix], bRm);
9543 /* memory, register */
9544 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9545 + pVCpu->iem.s.idxPrefix], bRm);
9546}
9547
9548
9549/**
9550 * Common 'bswap register' helper.
9551 */
9552FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9553{
9554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9555 switch (pVCpu->iem.s.enmEffOpSize)
9556 {
9557 case IEMMODE_16BIT:
9558 IEM_MC_BEGIN(1, 0);
9559 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9560 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9561 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9562 IEM_MC_ADVANCE_RIP();
9563 IEM_MC_END();
9564 return VINF_SUCCESS;
9565
9566 case IEMMODE_32BIT:
9567 IEM_MC_BEGIN(1, 0);
9568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9569 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9570 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9571 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9572 IEM_MC_ADVANCE_RIP();
9573 IEM_MC_END();
9574 return VINF_SUCCESS;
9575
9576 case IEMMODE_64BIT:
9577 IEM_MC_BEGIN(1, 0);
9578 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9579 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9580 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9581 IEM_MC_ADVANCE_RIP();
9582 IEM_MC_END();
9583 return VINF_SUCCESS;
9584
9585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9586 }
9587}
9588
9589
9590/** Opcode 0x0f 0xc8. */
9591FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9592{
9593 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9594 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9595 prefix. REX.B is the correct prefix it appears. For a parallel
9596 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9597 IEMOP_HLP_MIN_486();
9598 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9599}
9600
9601
9602/** Opcode 0x0f 0xc9. */
9603FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9604{
9605 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9606 IEMOP_HLP_MIN_486();
9607 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9608}
9609
9610
9611/** Opcode 0x0f 0xca. */
9612FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9613{
9614 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9615 IEMOP_HLP_MIN_486();
9616 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9617}
9618
9619
9620/** Opcode 0x0f 0xcb. */
9621FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9622{
9623 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9624 IEMOP_HLP_MIN_486();
9625 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9626}
9627
9628
9629/** Opcode 0x0f 0xcc. */
9630FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9631{
9632 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9633 IEMOP_HLP_MIN_486();
9634 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9635}
9636
9637
9638/** Opcode 0x0f 0xcd. */
9639FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9640{
9641 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9642 IEMOP_HLP_MIN_486();
9643 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9644}
9645
9646
9647/** Opcode 0x0f 0xce. */
9648FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9649{
9650 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9651 IEMOP_HLP_MIN_486();
9652 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9653}
9654
9655
9656/** Opcode 0x0f 0xcf. */
9657FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9658{
9659 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9660 IEMOP_HLP_MIN_486();
9661 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9662}
9663
9664
9665/* Opcode 0x0f 0xd0 - invalid */
9666/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9667FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9668/* Opcode 0xf3 0x0f 0xd0 - invalid */
9669/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9670FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9671
9672/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9673FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
9674{
9675 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9676 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
9677}
9678
9679/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
9680FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
9681{
9682 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9683 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
9684}
9685
9686/* Opcode 0xf3 0x0f 0xd1 - invalid */
9687/* Opcode 0xf2 0x0f 0xd1 - invalid */
9688
9689/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
9690FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
9691{
9692 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
9693 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
9694}
9695
9696
9697/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
9698FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
9699{
9700 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9701 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
9702}
9703
9704
9705/* Opcode 0xf3 0x0f 0xd2 - invalid */
9706/* Opcode 0xf2 0x0f 0xd2 - invalid */
9707
9708/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
9709FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
9710{
9711 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9712 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
9713}
9714
9715
9716/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
9717FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
9718{
9719 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9720 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
9721}
9722
9723
9724/* Opcode 0xf3 0x0f 0xd3 - invalid */
9725/* Opcode 0xf2 0x0f 0xd3 - invalid */
9726
9727
9728/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
9729FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
9730{
9731 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9732 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
9733}
9734
9735
9736/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
9737FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
9738{
9739 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9740 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
9741}
9742
9743
9744/* Opcode 0xf3 0x0f 0xd4 - invalid */
9745/* Opcode 0xf2 0x0f 0xd4 - invalid */
9746
9747/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
9748FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
9749{
9750 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9751 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
9752}
9753
9754/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
9755FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
9756{
9757 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9758 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
9759}
9760
9761
9762/* Opcode 0xf3 0x0f 0xd5 - invalid */
9763/* Opcode 0xf2 0x0f 0xd5 - invalid */
9764
9765/* Opcode 0x0f 0xd6 - invalid */
9766
9767/**
9768 * @opcode 0xd6
9769 * @oppfx 0x66
9770 * @opcpuid sse2
9771 * @opgroup og_sse2_pcksclr_datamove
9772 * @opxcpttype none
9773 * @optest op1=-1 op2=2 -> op1=2
9774 * @optest op1=0 op2=-42 -> op1=-42
9775 */
9776FNIEMOP_DEF(iemOp_movq_Wq_Vq)
9777{
9778 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
9779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9780 if (IEM_IS_MODRM_REG_MODE(bRm))
9781 {
9782 /*
9783 * Register, register.
9784 */
9785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9786 IEM_MC_BEGIN(0, 2);
9787 IEM_MC_LOCAL(uint64_t, uSrc);
9788
9789 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9790 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
9791
9792 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9793 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
9794
9795 IEM_MC_ADVANCE_RIP();
9796 IEM_MC_END();
9797 }
9798 else
9799 {
9800 /*
9801 * Memory, register.
9802 */
9803 IEM_MC_BEGIN(0, 2);
9804 IEM_MC_LOCAL(uint64_t, uSrc);
9805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9806
9807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9811
9812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9814
9815 IEM_MC_ADVANCE_RIP();
9816 IEM_MC_END();
9817 }
9818 return VINF_SUCCESS;
9819}
9820
9821
9822/**
9823 * @opcode 0xd6
9824 * @opcodesub 11 mr/reg
9825 * @oppfx f3
9826 * @opcpuid sse2
9827 * @opgroup og_sse2_simdint_datamove
9828 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9829 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9830 */
9831FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
9832{
9833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9834 if (IEM_IS_MODRM_REG_MODE(bRm))
9835 {
9836 /*
9837 * Register, register.
9838 */
9839 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9841 IEM_MC_BEGIN(0, 1);
9842 IEM_MC_LOCAL(uint64_t, uSrc);
9843
9844 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9845 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9846
9847 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
9848 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
9849 IEM_MC_FPU_TO_MMX_MODE();
9850
9851 IEM_MC_ADVANCE_RIP();
9852 IEM_MC_END();
9853 return VINF_SUCCESS;
9854 }
9855
9856 /**
9857 * @opdone
9858 * @opmnemonic udf30fd6mem
9859 * @opcode 0xd6
9860 * @opcodesub !11 mr/reg
9861 * @oppfx f3
9862 * @opunused intel-modrm
9863 * @opcpuid sse
9864 * @optest ->
9865 */
9866 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9867}
9868
9869
9870/**
9871 * @opcode 0xd6
9872 * @opcodesub 11 mr/reg
9873 * @oppfx f2
9874 * @opcpuid sse2
9875 * @opgroup og_sse2_simdint_datamove
9876 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9877 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9878 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9879 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9880 * @optest op1=-42 op2=0xfedcba9876543210
9881 * -> op1=0xfedcba9876543210 ftw=0xff
9882 */
9883FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9884{
9885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9886 if (IEM_IS_MODRM_REG_MODE(bRm))
9887 {
9888 /*
9889 * Register, register.
9890 */
9891 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9893 IEM_MC_BEGIN(0, 1);
9894 IEM_MC_LOCAL(uint64_t, uSrc);
9895
9896 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9897 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9898
9899 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9900 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
9901 IEM_MC_FPU_TO_MMX_MODE();
9902
9903 IEM_MC_ADVANCE_RIP();
9904 IEM_MC_END();
9905 return VINF_SUCCESS;
9906 }
9907
9908 /**
9909 * @opdone
9910 * @opmnemonic udf20fd6mem
9911 * @opcode 0xd6
9912 * @opcodesub !11 mr/reg
9913 * @oppfx f2
9914 * @opunused intel-modrm
9915 * @opcpuid sse
9916 * @optest ->
9917 */
9918 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9919}
9920
9921
9922/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9923FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9924{
9925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9926 /* Docs says register only. */
9927 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
9928 {
9929 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9930 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
9931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9932 IEM_MC_BEGIN(2, 0);
9933 IEM_MC_ARG(uint64_t *, puDst, 0);
9934 IEM_MC_ARG(uint64_t const *, puSrc, 1);
9935 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9936 IEM_MC_PREPARE_FPU_USAGE();
9937 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
9938 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
9939 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
9940 IEM_MC_FPU_TO_MMX_MODE();
9941 IEM_MC_ADVANCE_RIP();
9942 IEM_MC_END();
9943 return VINF_SUCCESS;
9944 }
9945 return IEMOP_RAISE_INVALID_OPCODE();
9946}
9947
9948
9949/** Opcode 0x66 0x0f 0xd7 - */
9950FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9951{
9952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9953 /* Docs says register only. */
9954 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
9955 {
9956 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9957 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
9958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9959 IEM_MC_BEGIN(2, 0);
9960 IEM_MC_ARG(uint64_t *, puDst, 0);
9961 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
9962 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9963 IEM_MC_PREPARE_SSE_USAGE();
9964 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
9965 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9966 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
9967 IEM_MC_ADVANCE_RIP();
9968 IEM_MC_END();
9969 return VINF_SUCCESS;
9970 }
9971 return IEMOP_RAISE_INVALID_OPCODE();
9972}
9973
9974
9975/* Opcode 0xf3 0x0f 0xd7 - invalid */
9976/* Opcode 0xf2 0x0f 0xd7 - invalid */
9977
9978
9979/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9980FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
9981{
9982 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9983 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
9984}
9985
9986
9987/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
9988FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
9989{
9990 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9991 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
9992}
9993
9994
9995/* Opcode 0xf3 0x0f 0xd8 - invalid */
9996/* Opcode 0xf2 0x0f 0xd8 - invalid */
9997
9998/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9999FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10000{
10001 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10003}
10004
10005
10006/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10007FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10008{
10009 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10011}
10012
10013
10014/* Opcode 0xf3 0x0f 0xd9 - invalid */
10015/* Opcode 0xf2 0x0f 0xd9 - invalid */
10016
10017/** Opcode 0x0f 0xda - pminub Pq, Qq */
10018FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10019{
10020 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10021 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10022}
10023
10024
10025/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10026FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10027{
10028 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10030}
10031
10032/* Opcode 0xf3 0x0f 0xda - invalid */
10033/* Opcode 0xf2 0x0f 0xda - invalid */
10034
10035/** Opcode 0x0f 0xdb - pand Pq, Qq */
10036FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10037{
10038 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10039 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10040}
10041
10042
10043/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10044FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10045{
10046 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10047 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10048}
10049
10050
10051/* Opcode 0xf3 0x0f 0xdb - invalid */
10052/* Opcode 0xf2 0x0f 0xdb - invalid */
10053
10054/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10055FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10056{
10057 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10058 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10059}
10060
10061
10062/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10063FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10064{
10065 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10066 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10067}
10068
10069
10070/* Opcode 0xf3 0x0f 0xdc - invalid */
10071/* Opcode 0xf2 0x0f 0xdc - invalid */
10072
10073/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10074FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10075{
10076 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10077 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10078}
10079
10080
10081/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10082FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10083{
10084 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10085 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10086}
10087
10088
10089/* Opcode 0xf3 0x0f 0xdd - invalid */
10090/* Opcode 0xf2 0x0f 0xdd - invalid */
10091
10092/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10093FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10094{
10095 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10096 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10097}
10098
10099
10100/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10101FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10102{
10103 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10104 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10105}
10106
10107/* Opcode 0xf3 0x0f 0xde - invalid */
10108/* Opcode 0xf2 0x0f 0xde - invalid */
10109
10110
10111/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10112FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10113{
10114 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10115 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10116}
10117
10118
10119/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10120FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10121{
10122 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10123 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10124}
10125
10126
10127/* Opcode 0xf3 0x0f 0xdf - invalid */
10128/* Opcode 0xf2 0x0f 0xdf - invalid */
10129
10130/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10131FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10132{
10133 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10134 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10135}
10136
10137
10138/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10139FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10140{
10141 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10142 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10143}
10144
10145
10146/* Opcode 0xf3 0x0f 0xe0 - invalid */
10147/* Opcode 0xf2 0x0f 0xe0 - invalid */
10148
10149/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10150FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10151{
10152 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10153 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10154}
10155
10156
10157/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10158FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10159{
10160 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10161 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10162}
10163
10164
10165/* Opcode 0xf3 0x0f 0xe1 - invalid */
10166/* Opcode 0xf2 0x0f 0xe1 - invalid */
10167
10168/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10169FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10170{
10171 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10172 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10173}
10174
10175
10176/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10177FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10178{
10179 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10180 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10181}
10182
10183
10184/* Opcode 0xf3 0x0f 0xe2 - invalid */
10185/* Opcode 0xf2 0x0f 0xe2 - invalid */
10186
10187/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10188FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10189{
10190 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10191 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10192}
10193
10194
10195/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10196FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10197{
10198 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10199 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10200}
10201
10202
10203/* Opcode 0xf3 0x0f 0xe3 - invalid */
10204/* Opcode 0xf2 0x0f 0xe3 - invalid */
10205
10206/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10207FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10208{
10209 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10210 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10211}
10212
10213
10214/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10215FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10216{
10217 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10218 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10219}
10220
10221
10222/* Opcode 0xf3 0x0f 0xe4 - invalid */
10223/* Opcode 0xf2 0x0f 0xe4 - invalid */
10224
10225/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10226FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10227{
10228 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10229 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10230}
10231
10232
10233/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10234FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10235{
10236 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10237 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10238}
10239
10240
10241/* Opcode 0xf3 0x0f 0xe5 - invalid */
10242/* Opcode 0xf2 0x0f 0xe5 - invalid */
10243
10244/* Opcode 0x0f 0xe6 - invalid */
10245/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10246FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10247/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10248FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10249/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10250FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10251
10252
10253/**
10254 * @opcode 0xe7
10255 * @opcodesub !11 mr/reg
10256 * @oppfx none
10257 * @opcpuid sse
10258 * @opgroup og_sse1_cachect
10259 * @opxcpttype none
10260 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10261 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10262 */
10263FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10264{
10265 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10267 if (IEM_IS_MODRM_MEM_MODE(bRm))
10268 {
10269 /* Register, memory. */
10270 IEM_MC_BEGIN(0, 2);
10271 IEM_MC_LOCAL(uint64_t, uSrc);
10272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10273
10274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10276 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10277 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10278
10279 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10280 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10281 IEM_MC_FPU_TO_MMX_MODE();
10282
10283 IEM_MC_ADVANCE_RIP();
10284 IEM_MC_END();
10285 return VINF_SUCCESS;
10286 }
10287 /**
10288 * @opdone
10289 * @opmnemonic ud0fe7reg
10290 * @opcode 0xe7
10291 * @opcodesub 11 mr/reg
10292 * @oppfx none
10293 * @opunused immediate
10294 * @opcpuid sse
10295 * @optest ->
10296 */
10297 return IEMOP_RAISE_INVALID_OPCODE();
10298}
10299
10300/**
10301 * @opcode 0xe7
10302 * @opcodesub !11 mr/reg
10303 * @oppfx 0x66
10304 * @opcpuid sse2
10305 * @opgroup og_sse2_cachect
10306 * @opxcpttype 1
10307 * @optest op1=-1 op2=2 -> op1=2
10308 * @optest op1=0 op2=-42 -> op1=-42
10309 */
10310FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10311{
10312 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10314 if (IEM_IS_MODRM_MEM_MODE(bRm))
10315 {
10316 /* Register, memory. */
10317 IEM_MC_BEGIN(0, 2);
10318 IEM_MC_LOCAL(RTUINT128U, uSrc);
10319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10320
10321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10323 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10324 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10325
10326 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10327 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10328
10329 IEM_MC_ADVANCE_RIP();
10330 IEM_MC_END();
10331 return VINF_SUCCESS;
10332 }
10333
10334 /**
10335 * @opdone
10336 * @opmnemonic ud660fe7reg
10337 * @opcode 0xe7
10338 * @opcodesub 11 mr/reg
10339 * @oppfx 0x66
10340 * @opunused immediate
10341 * @opcpuid sse
10342 * @optest ->
10343 */
10344 return IEMOP_RAISE_INVALID_OPCODE();
10345}
10346
10347/* Opcode 0xf3 0x0f 0xe7 - invalid */
10348/* Opcode 0xf2 0x0f 0xe7 - invalid */
10349
10350
10351/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10352FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10353{
10354 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10355 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10356}
10357
10358
10359/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10360FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10361{
10362 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10363 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10364}
10365
10366
10367/* Opcode 0xf3 0x0f 0xe8 - invalid */
10368/* Opcode 0xf2 0x0f 0xe8 - invalid */
10369
10370/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10371FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10372{
10373 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10374 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10375}
10376
10377
10378/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10379FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10380{
10381 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10382 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10383}
10384
10385
10386/* Opcode 0xf3 0x0f 0xe9 - invalid */
10387/* Opcode 0xf2 0x0f 0xe9 - invalid */
10388
10389
10390/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10391FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10392{
10393 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10394 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10395}
10396
10397
10398/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10399FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10400{
10401 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10402 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10403}
10404
10405
10406/* Opcode 0xf3 0x0f 0xea - invalid */
10407/* Opcode 0xf2 0x0f 0xea - invalid */
10408
10409
10410/** Opcode 0x0f 0xeb - por Pq, Qq */
10411FNIEMOP_DEF(iemOp_por_Pq_Qq)
10412{
10413 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10414 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10415}
10416
10417
10418/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10419FNIEMOP_DEF(iemOp_por_Vx_Wx)
10420{
10421 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10422 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10423}
10424
10425
10426/* Opcode 0xf3 0x0f 0xeb - invalid */
10427/* Opcode 0xf2 0x0f 0xeb - invalid */
10428
10429/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10430FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10431{
10432 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10433 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10434}
10435
10436
10437/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10438FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10439{
10440 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10441 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10442}
10443
10444
10445/* Opcode 0xf3 0x0f 0xec - invalid */
10446/* Opcode 0xf2 0x0f 0xec - invalid */
10447
10448/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10449FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10450{
10451 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10452 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10453}
10454
10455
10456/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10457FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10458{
10459 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10460 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10461}
10462
10463
10464/* Opcode 0xf3 0x0f 0xed - invalid */
10465/* Opcode 0xf2 0x0f 0xed - invalid */
10466
10467
10468/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10469FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10470{
10471 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10472 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10473}
10474
10475
10476/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10477FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10478{
10479 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10480 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10481}
10482
10483
10484/* Opcode 0xf3 0x0f 0xee - invalid */
10485/* Opcode 0xf2 0x0f 0xee - invalid */
10486
10487
10488/** Opcode 0x0f 0xef - pxor Pq, Qq */
10489FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10490{
10491 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10492 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10493}
10494
10495
10496/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10497FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10498{
10499 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10500 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10501}
10502
10503
10504/* Opcode 0xf3 0x0f 0xef - invalid */
10505/* Opcode 0xf2 0x0f 0xef - invalid */
10506
10507/* Opcode 0x0f 0xf0 - invalid */
10508/* Opcode 0x66 0x0f 0xf0 - invalid */
10509/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10510FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10511
10512
10513/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10514FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10515{
10516 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10517 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10518}
10519
10520
10521/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10522FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10523{
10524 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10525 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10526}
10527
10528
10529/* Opcode 0xf2 0x0f 0xf1 - invalid */
10530
10531/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10532FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10533{
10534 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10535 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10536}
10537
10538
10539/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10540FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10541{
10542 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10543 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10544}
10545
10546
10547/* Opcode 0xf2 0x0f 0xf2 - invalid */
10548
10549/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10550FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10551{
10552 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10553 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10554}
10555
10556
10557/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10558FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10559{
10560 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10561 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10562}
10563
10564/* Opcode 0xf2 0x0f 0xf3 - invalid */
10565
10566/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10567FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
10568{
10569 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10570 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
10571}
10572
10573
10574/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10575FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
10576{
10577 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10578 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
10579}
10580
10581
10582/* Opcode 0xf2 0x0f 0xf4 - invalid */
10583
10584/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10585FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10586{
10587 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10588 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10589}
10590
10591
10592/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10593FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10594{
10595 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10596 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10597}
10598
10599/* Opcode 0xf2 0x0f 0xf5 - invalid */
10600
10601/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10602FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
10603{
10604 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10605 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
10606}
10607
10608
10609/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10610FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
10611{
10612 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10613 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
10614}
10615
10616
10617/* Opcode 0xf2 0x0f 0xf6 - invalid */
10618
10619/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10620FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10621/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10622FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10623/* Opcode 0xf2 0x0f 0xf7 - invalid */
10624
10625
10626/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10627FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10628{
10629 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10630 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10631}
10632
10633
10634/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10635FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10636{
10637 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10638 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10639}
10640
10641
10642/* Opcode 0xf2 0x0f 0xf8 - invalid */
10643
10644
10645/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10646FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
10647{
10648 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10649 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
10650}
10651
10652
10653/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
10654FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
10655{
10656 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10657 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
10658}
10659
10660
10661/* Opcode 0xf2 0x0f 0xf9 - invalid */
10662
10663
10664/** Opcode 0x0f 0xfa - psubd Pq, Qq */
10665FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
10666{
10667 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10668 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
10669}
10670
10671
10672/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
10673FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
10674{
10675 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10676 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
10677}
10678
10679
10680/* Opcode 0xf2 0x0f 0xfa - invalid */
10681
10682
10683/** Opcode 0x0f 0xfb - psubq Pq, Qq */
10684FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
10685{
10686 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10687 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10688}
10689
10690
10691/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
10692FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
10693{
10694 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10695 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
10696}
10697
10698
10699/* Opcode 0xf2 0x0f 0xfb - invalid */
10700
10701
10702/** Opcode 0x0f 0xfc - paddb Pq, Qq */
10703FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
10704{
10705 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10706 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
10707}
10708
10709
10710/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
10711FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
10712{
10713 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10714 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
10715}
10716
10717
10718/* Opcode 0xf2 0x0f 0xfc - invalid */
10719
10720
10721/** Opcode 0x0f 0xfd - paddw Pq, Qq */
10722FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
10723{
10724 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10725 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
10726}
10727
10728
10729/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
10730FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
10731{
10732 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10733 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
10734}
10735
10736
10737/* Opcode 0xf2 0x0f 0xfd - invalid */
10738
10739
10740/** Opcode 0x0f 0xfe - paddd Pq, Qq */
10741FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
10742{
10743 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10744 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
10745}
10746
10747
10748/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
10749FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
10750{
10751 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10752 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
10753}
10754
10755
10756/* Opcode 0xf2 0x0f 0xfe - invalid */
10757
10758
10759/** Opcode **** 0x0f 0xff - UD0 */
10760FNIEMOP_DEF(iemOp_ud0)
10761{
10762 IEMOP_MNEMONIC(ud0, "ud0");
10763 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
10764 {
10765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
10766#ifndef TST_IEM_CHECK_MC
10767 if (IEM_IS_MODRM_MEM_MODE(bRm))
10768 {
10769 RTGCPTR GCPtrEff;
10770 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
10771 if (rcStrict != VINF_SUCCESS)
10772 return rcStrict;
10773 }
10774#endif
10775 IEMOP_HLP_DONE_DECODING();
10776 }
10777 return IEMOP_RAISE_INVALID_OPCODE();
10778}
10779
10780
10781
10782/**
10783 * Two byte opcode map, first byte 0x0f.
10784 *
10785 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
10786 * check if it needs updating as well when making changes.
10787 */
10788IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
10789{
10790 /* no prefix, 066h prefix f3h prefix, f2h prefix */
10791 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
10792 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
10793 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
10794 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
10795 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
10796 /* 0x05 */ IEMOP_X4(iemOp_syscall),
10797 /* 0x06 */ IEMOP_X4(iemOp_clts),
10798 /* 0x07 */ IEMOP_X4(iemOp_sysret),
10799 /* 0x08 */ IEMOP_X4(iemOp_invd),
10800 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
10801 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
10802 /* 0x0b */ IEMOP_X4(iemOp_ud2),
10803 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
10804 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
10805 /* 0x0e */ IEMOP_X4(iemOp_femms),
10806 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
10807
10808 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
10809 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
10810 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
10811 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10812 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10813 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10814 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
10815 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10816 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
10817 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
10818 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
10819 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
10820 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
10821 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
10822 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
10823 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
10824
10825 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
10826 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
10827 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
10828 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
10829 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
10830 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10831 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
10832 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10833 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10834 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10835 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
10836 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10837 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
10838 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
10839 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10840 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10841
10842 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
10843 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
10844 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
10845 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
10846 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
10847 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
10848 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
10849 /* 0x37 */ IEMOP_X4(iemOp_getsec),
10850 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
10851 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10852 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
10853 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10854 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10855 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10856 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10857 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10858
10859 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
10860 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
10861 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
10862 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
10863 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
10864 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
10865 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
10866 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
10867 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
10868 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
10869 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
10870 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
10871 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
10872 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
10873 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
10874 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
10875
10876 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10877 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
10878 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
10879 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
10880 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10881 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10882 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10883 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10884 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
10885 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
10886 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
10887 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
10888 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
10889 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
10890 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
10891 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
10892
10893 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10894 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10895 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10896 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10897 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10898 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10899 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10900 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10901 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10902 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10903 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10904 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10905 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10906 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10907 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10908 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
10909
10910 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
10911 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
10912 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
10913 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
10914 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10915 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10916 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10917 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10918
10919 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10920 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10921 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10922 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10923 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
10924 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
10925 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
10926 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
10927
10928 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
10929 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
10930 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
10931 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
10932 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
10933 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
10934 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
10935 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
10936 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
10937 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
10938 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
10939 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
10940 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
10941 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
10942 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
10943 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
10944
10945 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
10946 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
10947 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
10948 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
10949 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
10950 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
10951 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
10952 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
10953 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
10954 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
10955 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
10956 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
10957 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
10958 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
10959 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
10960 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
10961
10962 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
10963 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
10964 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
10965 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
10966 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
10967 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
10968 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
10969 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
10970 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
10971 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
10972 /* 0xaa */ IEMOP_X4(iemOp_rsm),
10973 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
10974 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
10975 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
10976 /* 0xae */ IEMOP_X4(iemOp_Grp15),
10977 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
10978
10979 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
10980 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
10981 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
10982 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
10983 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
10984 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
10985 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
10986 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
10987 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
10988 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
10989 /* 0xba */ IEMOP_X4(iemOp_Grp8),
10990 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
10991 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
10992 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
10993 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
10994 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
10995
10996 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
10997 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
10998 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
10999 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11000 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11001 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11002 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11003 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11004 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11005 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11006 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11007 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11008 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11009 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11010 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11011 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11012
11013 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11014 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11015 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11016 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11017 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11018 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11019 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11020 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11021 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11022 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11023 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11024 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11025 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11026 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11027 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11028 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11029
11030 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11031 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11032 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11033 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11034 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11035 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11036 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11037 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11038 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11039 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11040 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11041 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11042 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11043 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11044 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11045 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11046
11047 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11048 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11049 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11050 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11051 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11052 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11053 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11054 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11055 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11056 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11057 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11058 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11059 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11060 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11061 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11062 /* 0xff */ IEMOP_X4(iemOp_ud0),
11063};
11064AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11065
11066/** @} */
11067
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette