VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96119

Last change on this file since 96119 was 96109, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]unpck{l,h}p{s,d} instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 376.7 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96109 2022-08-08 11:41:33Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE2 instructions on the form:
740 * pxxxx xmm1, xmm2/mem128
741 *
742 * The 2nd operand is the second half of a register, which for SSE a 128-bit
743 * aligned access where it may read the full 128 bits or only the upper 64 bits.
744 *
745 * Exceptions type 4.
746 */
747FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(2, 0);
757 IEM_MC_ARG(PRTUINT128U, puDst, 0);
758 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
760 IEM_MC_PREPARE_SSE_USAGE();
761 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
762 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
763 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
764 IEM_MC_ADVANCE_RIP();
765 IEM_MC_END();
766 }
767 else
768 {
769 /*
770 * Register, memory.
771 */
772 IEM_MC_BEGIN(2, 2);
773 IEM_MC_ARG(PRTUINT128U, puDst, 0);
774 IEM_MC_LOCAL(RTUINT128U, uSrc);
775 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
777
778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
781 /** @todo Most CPUs probably only read the high qword. We read everything to
782 * make sure we apply segmentation and alignment checks correctly.
783 * When we have time, it would be interesting to explore what real
784 * CPUs actually does and whether it will do a TLB load for the lower
785 * part or skip any associated \#PF. */
786 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
787
788 IEM_MC_PREPARE_SSE_USAGE();
789 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
790 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
791
792 IEM_MC_ADVANCE_RIP();
793 IEM_MC_END();
794 }
795 return VINF_SUCCESS;
796}
797
798
799/** Opcode 0x0f 0x00 /0. */
800FNIEMOPRM_DEF(iemOp_Grp6_sldt)
801{
802 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
803 IEMOP_HLP_MIN_286();
804 IEMOP_HLP_NO_REAL_OR_V86_MODE();
805
806 if (IEM_IS_MODRM_REG_MODE(bRm))
807 {
808 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
809 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
810 }
811
812 /* Ignore operand size here, memory refs are always 16-bit. */
813 IEM_MC_BEGIN(2, 0);
814 IEM_MC_ARG(uint16_t, iEffSeg, 0);
815 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
817 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
818 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
819 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
820 IEM_MC_END();
821 return VINF_SUCCESS;
822}
823
824
825/** Opcode 0x0f 0x00 /1. */
826FNIEMOPRM_DEF(iemOp_Grp6_str)
827{
828 IEMOP_MNEMONIC(str, "str Rv/Mw");
829 IEMOP_HLP_MIN_286();
830 IEMOP_HLP_NO_REAL_OR_V86_MODE();
831
832
833 if (IEM_IS_MODRM_REG_MODE(bRm))
834 {
835 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
837 }
838
839 /* Ignore operand size here, memory refs are always 16-bit. */
840 IEM_MC_BEGIN(2, 0);
841 IEM_MC_ARG(uint16_t, iEffSeg, 0);
842 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
844 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
845 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
846 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
847 IEM_MC_END();
848 return VINF_SUCCESS;
849}
850
851
852/** Opcode 0x0f 0x00 /2. */
853FNIEMOPRM_DEF(iemOp_Grp6_lldt)
854{
855 IEMOP_MNEMONIC(lldt, "lldt Ew");
856 IEMOP_HLP_MIN_286();
857 IEMOP_HLP_NO_REAL_OR_V86_MODE();
858
859 if (IEM_IS_MODRM_REG_MODE(bRm))
860 {
861 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
862 IEM_MC_BEGIN(1, 0);
863 IEM_MC_ARG(uint16_t, u16Sel, 0);
864 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
865 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
866 IEM_MC_END();
867 }
868 else
869 {
870 IEM_MC_BEGIN(1, 1);
871 IEM_MC_ARG(uint16_t, u16Sel, 0);
872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
874 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
875 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
876 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
877 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
878 IEM_MC_END();
879 }
880 return VINF_SUCCESS;
881}
882
883
884/** Opcode 0x0f 0x00 /3. */
885FNIEMOPRM_DEF(iemOp_Grp6_ltr)
886{
887 IEMOP_MNEMONIC(ltr, "ltr Ew");
888 IEMOP_HLP_MIN_286();
889 IEMOP_HLP_NO_REAL_OR_V86_MODE();
890
891 if (IEM_IS_MODRM_REG_MODE(bRm))
892 {
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
894 IEM_MC_BEGIN(1, 0);
895 IEM_MC_ARG(uint16_t, u16Sel, 0);
896 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
897 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
898 IEM_MC_END();
899 }
900 else
901 {
902 IEM_MC_BEGIN(1, 1);
903 IEM_MC_ARG(uint16_t, u16Sel, 0);
904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
907 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
908 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
909 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
910 IEM_MC_END();
911 }
912 return VINF_SUCCESS;
913}
914
915
916/** Opcode 0x0f 0x00 /3. */
917FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
918{
919 IEMOP_HLP_MIN_286();
920 IEMOP_HLP_NO_REAL_OR_V86_MODE();
921
922 if (IEM_IS_MODRM_REG_MODE(bRm))
923 {
924 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
925 IEM_MC_BEGIN(2, 0);
926 IEM_MC_ARG(uint16_t, u16Sel, 0);
927 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
928 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
929 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
930 IEM_MC_END();
931 }
932 else
933 {
934 IEM_MC_BEGIN(2, 1);
935 IEM_MC_ARG(uint16_t, u16Sel, 0);
936 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
940 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
941 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
942 IEM_MC_END();
943 }
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x00 /4. */
949FNIEMOPRM_DEF(iemOp_Grp6_verr)
950{
951 IEMOP_MNEMONIC(verr, "verr Ew");
952 IEMOP_HLP_MIN_286();
953 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
954}
955
956
957/** Opcode 0x0f 0x00 /5. */
958FNIEMOPRM_DEF(iemOp_Grp6_verw)
959{
960 IEMOP_MNEMONIC(verw, "verw Ew");
961 IEMOP_HLP_MIN_286();
962 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
963}
964
965
966/**
967 * Group 6 jump table.
968 */
969IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
970{
971 iemOp_Grp6_sldt,
972 iemOp_Grp6_str,
973 iemOp_Grp6_lldt,
974 iemOp_Grp6_ltr,
975 iemOp_Grp6_verr,
976 iemOp_Grp6_verw,
977 iemOp_InvalidWithRM,
978 iemOp_InvalidWithRM
979};
980
981/** Opcode 0x0f 0x00. */
982FNIEMOP_DEF(iemOp_Grp6)
983{
984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
985 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
986}
987
988
989/** Opcode 0x0f 0x01 /0. */
990FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
991{
992 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
993 IEMOP_HLP_MIN_286();
994 IEMOP_HLP_64BIT_OP_SIZE();
995 IEM_MC_BEGIN(2, 1);
996 IEM_MC_ARG(uint8_t, iEffSeg, 0);
997 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1001 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004}
1005
1006
1007/** Opcode 0x0f 0x01 /0. */
1008FNIEMOP_DEF(iemOp_Grp7_vmcall)
1009{
1010 IEMOP_MNEMONIC(vmcall, "vmcall");
1011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1012
1013 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1014 want all hypercalls regardless of instruction used, and if a
1015 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1016 (NEM/win makes ASSUMPTIONS about this behavior.) */
1017 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1018}
1019
1020
1021/** Opcode 0x0f 0x01 /0. */
1022#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1023FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1024{
1025 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1026 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1027 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1028 IEMOP_HLP_DONE_DECODING();
1029 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1030}
1031#else
1032FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1033{
1034 IEMOP_BITCH_ABOUT_STUB();
1035 return IEMOP_RAISE_INVALID_OPCODE();
1036}
1037#endif
1038
1039
1040/** Opcode 0x0f 0x01 /0. */
1041#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1042FNIEMOP_DEF(iemOp_Grp7_vmresume)
1043{
1044 IEMOP_MNEMONIC(vmresume, "vmresume");
1045 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1046 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1047 IEMOP_HLP_DONE_DECODING();
1048 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1049}
1050#else
1051FNIEMOP_DEF(iemOp_Grp7_vmresume)
1052{
1053 IEMOP_BITCH_ABOUT_STUB();
1054 return IEMOP_RAISE_INVALID_OPCODE();
1055}
1056#endif
1057
1058
1059/** Opcode 0x0f 0x01 /0. */
1060#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1061FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1062{
1063 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1064 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1065 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1066 IEMOP_HLP_DONE_DECODING();
1067 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1068}
1069#else
1070FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1071{
1072 IEMOP_BITCH_ABOUT_STUB();
1073 return IEMOP_RAISE_INVALID_OPCODE();
1074}
1075#endif
1076
1077
1078/** Opcode 0x0f 0x01 /1. */
1079FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1080{
1081 IEMOP_MNEMONIC(sidt, "sidt Ms");
1082 IEMOP_HLP_MIN_286();
1083 IEMOP_HLP_64BIT_OP_SIZE();
1084 IEM_MC_BEGIN(2, 1);
1085 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1086 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1089 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1090 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093}
1094
1095
1096/** Opcode 0x0f 0x01 /1. */
1097FNIEMOP_DEF(iemOp_Grp7_monitor)
1098{
1099 IEMOP_MNEMONIC(monitor, "monitor");
1100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1101 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1102}
1103
1104
1105/** Opcode 0x0f 0x01 /1. */
1106FNIEMOP_DEF(iemOp_Grp7_mwait)
1107{
1108 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1111}
1112
1113
1114/** Opcode 0x0f 0x01 /2. */
1115FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1116{
1117 IEMOP_MNEMONIC(lgdt, "lgdt");
1118 IEMOP_HLP_64BIT_OP_SIZE();
1119 IEM_MC_BEGIN(3, 1);
1120 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1121 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1122 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1125 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1126 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129}
1130
1131
1132/** Opcode 0x0f 0x01 0xd0. */
1133FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1134{
1135 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1136 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1137 {
1138 /** @todo r=ramshankar: We should use
1139 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1140 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1142 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1143 }
1144 return IEMOP_RAISE_INVALID_OPCODE();
1145}
1146
1147
1148/** Opcode 0x0f 0x01 0xd1. */
1149FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1150{
1151 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1152 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1153 {
1154 /** @todo r=ramshankar: We should use
1155 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1156 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1157 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1158 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1159 }
1160 return IEMOP_RAISE_INVALID_OPCODE();
1161}
1162
1163
1164/** Opcode 0x0f 0x01 /3. */
1165FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1166{
1167 IEMOP_MNEMONIC(lidt, "lidt");
1168 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1169 ? IEMMODE_64BIT
1170 : pVCpu->iem.s.enmEffOpSize;
1171 IEM_MC_BEGIN(3, 1);
1172 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1173 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1174 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1178 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1179 IEM_MC_END();
1180 return VINF_SUCCESS;
1181}
1182
1183
1184/** Opcode 0x0f 0x01 0xd8. */
1185#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1186FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1187{
1188 IEMOP_MNEMONIC(vmrun, "vmrun");
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1190 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1191}
1192#else
1193FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1194#endif
1195
1196/** Opcode 0x0f 0x01 0xd9. */
1197FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1198{
1199 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1201
1202 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1203 want all hypercalls regardless of instruction used, and if a
1204 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1205 (NEM/win makes ASSUMPTIONS about this behavior.) */
1206 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1207}
1208
1209/** Opcode 0x0f 0x01 0xda. */
1210#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1211FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1212{
1213 IEMOP_MNEMONIC(vmload, "vmload");
1214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1215 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1216}
1217#else
1218FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1219#endif
1220
1221
1222/** Opcode 0x0f 0x01 0xdb. */
1223#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1224FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1225{
1226 IEMOP_MNEMONIC(vmsave, "vmsave");
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1228 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1229}
1230#else
1231FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1232#endif
1233
1234
1235/** Opcode 0x0f 0x01 0xdc. */
1236#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1237FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1238{
1239 IEMOP_MNEMONIC(stgi, "stgi");
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1242}
1243#else
1244FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1245#endif
1246
1247
1248/** Opcode 0x0f 0x01 0xdd. */
1249#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1250FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1251{
1252 IEMOP_MNEMONIC(clgi, "clgi");
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1254 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1255}
1256#else
1257FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1258#endif
1259
1260
1261/** Opcode 0x0f 0x01 0xdf. */
1262#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1263FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1264{
1265 IEMOP_MNEMONIC(invlpga, "invlpga");
1266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1267 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1268}
1269#else
1270FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1271#endif
1272
1273
1274/** Opcode 0x0f 0x01 0xde. */
1275#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1276FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1277{
1278 IEMOP_MNEMONIC(skinit, "skinit");
1279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1280 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1281}
1282#else
1283FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1284#endif
1285
1286
1287/** Opcode 0x0f 0x01 /4. */
1288FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1289{
1290 IEMOP_MNEMONIC(smsw, "smsw");
1291 IEMOP_HLP_MIN_286();
1292 if (IEM_IS_MODRM_REG_MODE(bRm))
1293 {
1294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1295 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1296 }
1297
1298 /* Ignore operand size here, memory refs are always 16-bit. */
1299 IEM_MC_BEGIN(2, 0);
1300 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1301 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1304 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1305 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1306 IEM_MC_END();
1307 return VINF_SUCCESS;
1308}
1309
1310
1311/** Opcode 0x0f 0x01 /6. */
1312FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1313{
1314 /* The operand size is effectively ignored, all is 16-bit and only the
1315 lower 3-bits are used. */
1316 IEMOP_MNEMONIC(lmsw, "lmsw");
1317 IEMOP_HLP_MIN_286();
1318 if (IEM_IS_MODRM_REG_MODE(bRm))
1319 {
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_BEGIN(2, 0);
1322 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1323 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1324 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1325 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1326 IEM_MC_END();
1327 }
1328 else
1329 {
1330 IEM_MC_BEGIN(2, 0);
1331 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1332 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1335 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1336 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1337 IEM_MC_END();
1338 }
1339 return VINF_SUCCESS;
1340}
1341
1342
1343/** Opcode 0x0f 0x01 /7. */
1344FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1345{
1346 IEMOP_MNEMONIC(invlpg, "invlpg");
1347 IEMOP_HLP_MIN_486();
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 IEM_MC_BEGIN(1, 1);
1350 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1352 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1353 IEM_MC_END();
1354 return VINF_SUCCESS;
1355}
1356
1357
1358/** Opcode 0x0f 0x01 /7. */
1359FNIEMOP_DEF(iemOp_Grp7_swapgs)
1360{
1361 IEMOP_MNEMONIC(swapgs, "swapgs");
1362 IEMOP_HLP_ONLY_64BIT();
1363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1364 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1365}
1366
1367
1368/** Opcode 0x0f 0x01 /7. */
1369FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1370{
1371 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1374}
1375
1376
1377/**
1378 * Group 7 jump table, memory variant.
1379 */
1380IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1381{
1382 iemOp_Grp7_sgdt,
1383 iemOp_Grp7_sidt,
1384 iemOp_Grp7_lgdt,
1385 iemOp_Grp7_lidt,
1386 iemOp_Grp7_smsw,
1387 iemOp_InvalidWithRM,
1388 iemOp_Grp7_lmsw,
1389 iemOp_Grp7_invlpg
1390};
1391
1392
1393/** Opcode 0x0f 0x01. */
1394FNIEMOP_DEF(iemOp_Grp7)
1395{
1396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1397 if (IEM_IS_MODRM_MEM_MODE(bRm))
1398 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1399
1400 switch (IEM_GET_MODRM_REG_8(bRm))
1401 {
1402 case 0:
1403 switch (IEM_GET_MODRM_RM_8(bRm))
1404 {
1405 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1406 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1407 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1408 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1409 }
1410 return IEMOP_RAISE_INVALID_OPCODE();
1411
1412 case 1:
1413 switch (IEM_GET_MODRM_RM_8(bRm))
1414 {
1415 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1416 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1417 }
1418 return IEMOP_RAISE_INVALID_OPCODE();
1419
1420 case 2:
1421 switch (IEM_GET_MODRM_RM_8(bRm))
1422 {
1423 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1424 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1425 }
1426 return IEMOP_RAISE_INVALID_OPCODE();
1427
1428 case 3:
1429 switch (IEM_GET_MODRM_RM_8(bRm))
1430 {
1431 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1432 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1433 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1434 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1435 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1436 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1437 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1438 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1440 }
1441
1442 case 4:
1443 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1444
1445 case 5:
1446 return IEMOP_RAISE_INVALID_OPCODE();
1447
1448 case 6:
1449 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1450
1451 case 7:
1452 switch (IEM_GET_MODRM_RM_8(bRm))
1453 {
1454 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1455 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1456 }
1457 return IEMOP_RAISE_INVALID_OPCODE();
1458
1459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1460 }
1461}
1462
1463/** Opcode 0x0f 0x00 /3. */
1464FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1465{
1466 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1468
1469 if (IEM_IS_MODRM_REG_MODE(bRm))
1470 {
1471 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1472 switch (pVCpu->iem.s.enmEffOpSize)
1473 {
1474 case IEMMODE_16BIT:
1475 {
1476 IEM_MC_BEGIN(3, 0);
1477 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1478 IEM_MC_ARG(uint16_t, u16Sel, 1);
1479 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1480
1481 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1482 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1483 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1484
1485 IEM_MC_END();
1486 return VINF_SUCCESS;
1487 }
1488
1489 case IEMMODE_32BIT:
1490 case IEMMODE_64BIT:
1491 {
1492 IEM_MC_BEGIN(3, 0);
1493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1494 IEM_MC_ARG(uint16_t, u16Sel, 1);
1495 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1496
1497 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1498 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1499 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1500
1501 IEM_MC_END();
1502 return VINF_SUCCESS;
1503 }
1504
1505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1506 }
1507 }
1508 else
1509 {
1510 switch (pVCpu->iem.s.enmEffOpSize)
1511 {
1512 case IEMMODE_16BIT:
1513 {
1514 IEM_MC_BEGIN(3, 1);
1515 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1516 IEM_MC_ARG(uint16_t, u16Sel, 1);
1517 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1519
1520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1521 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1522
1523 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1524 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1525 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1526
1527 IEM_MC_END();
1528 return VINF_SUCCESS;
1529 }
1530
1531 case IEMMODE_32BIT:
1532 case IEMMODE_64BIT:
1533 {
1534 IEM_MC_BEGIN(3, 1);
1535 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1536 IEM_MC_ARG(uint16_t, u16Sel, 1);
1537 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1539
1540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1541 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1542/** @todo testcase: make sure it's a 16-bit read. */
1543
1544 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1545 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1546 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1547
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1553 }
1554 }
1555}
1556
1557
1558
1559/** Opcode 0x0f 0x02. */
1560FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1561{
1562 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1563 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1564}
1565
1566
1567/** Opcode 0x0f 0x03. */
1568FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1569{
1570 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1571 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1572}
1573
1574
1575/** Opcode 0x0f 0x05. */
1576FNIEMOP_DEF(iemOp_syscall)
1577{
1578 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1581}
1582
1583
1584/** Opcode 0x0f 0x06. */
1585FNIEMOP_DEF(iemOp_clts)
1586{
1587 IEMOP_MNEMONIC(clts, "clts");
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1589 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1590}
1591
1592
1593/** Opcode 0x0f 0x07. */
1594FNIEMOP_DEF(iemOp_sysret)
1595{
1596 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1598 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1599}
1600
1601
1602/** Opcode 0x0f 0x08. */
1603FNIEMOP_DEF(iemOp_invd)
1604{
1605 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1606 IEMOP_HLP_MIN_486();
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1608 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1609}
1610
1611
1612/** Opcode 0x0f 0x09. */
1613FNIEMOP_DEF(iemOp_wbinvd)
1614{
1615 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1616 IEMOP_HLP_MIN_486();
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1619}
1620
1621
1622/** Opcode 0x0f 0x0b. */
1623FNIEMOP_DEF(iemOp_ud2)
1624{
1625 IEMOP_MNEMONIC(ud2, "ud2");
1626 return IEMOP_RAISE_INVALID_OPCODE();
1627}
1628
1629/** Opcode 0x0f 0x0d. */
1630FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1631{
1632 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1633 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1634 {
1635 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1636 return IEMOP_RAISE_INVALID_OPCODE();
1637 }
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if (IEM_IS_MODRM_REG_MODE(bRm))
1641 {
1642 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1643 return IEMOP_RAISE_INVALID_OPCODE();
1644 }
1645
1646 switch (IEM_GET_MODRM_REG_8(bRm))
1647 {
1648 case 2: /* Aliased to /0 for the time being. */
1649 case 4: /* Aliased to /0 for the time being. */
1650 case 5: /* Aliased to /0 for the time being. */
1651 case 6: /* Aliased to /0 for the time being. */
1652 case 7: /* Aliased to /0 for the time being. */
1653 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1654 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1655 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1657 }
1658
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1663 /* Currently a NOP. */
1664 NOREF(GCPtrEffSrc);
1665 IEM_MC_ADVANCE_RIP();
1666 IEM_MC_END();
1667 return VINF_SUCCESS;
1668}
1669
1670
1671/** Opcode 0x0f 0x0e. */
1672FNIEMOP_DEF(iemOp_femms)
1673{
1674 IEMOP_MNEMONIC(femms, "femms");
1675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1676
1677 IEM_MC_BEGIN(0,0);
1678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1680 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1681 IEM_MC_FPU_FROM_MMX_MODE();
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 return VINF_SUCCESS;
1685}
1686
1687
1688/** Opcode 0x0f 0x0f. */
1689FNIEMOP_DEF(iemOp_3Dnow)
1690{
1691 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1692 {
1693 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1694 return IEMOP_RAISE_INVALID_OPCODE();
1695 }
1696
1697#ifdef IEM_WITH_3DNOW
1698 /* This is pretty sparse, use switch instead of table. */
1699 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1700 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1701#else
1702 IEMOP_BITCH_ABOUT_STUB();
1703 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1704#endif
1705}
1706
1707
1708/**
1709 * @opcode 0x10
1710 * @oppfx none
1711 * @opcpuid sse
1712 * @opgroup og_sse_simdfp_datamove
1713 * @opxcpttype 4UA
1714 * @optest op1=1 op2=2 -> op1=2
1715 * @optest op1=0 op2=-22 -> op1=-22
1716 */
1717FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1718{
1719 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1721 if (IEM_IS_MODRM_REG_MODE(bRm))
1722 {
1723 /*
1724 * Register, register.
1725 */
1726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1727 IEM_MC_BEGIN(0, 0);
1728 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1729 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1730 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1731 IEM_GET_MODRM_RM(pVCpu, bRm));
1732 IEM_MC_ADVANCE_RIP();
1733 IEM_MC_END();
1734 }
1735 else
1736 {
1737 /*
1738 * Memory, register.
1739 */
1740 IEM_MC_BEGIN(0, 2);
1741 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1743
1744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1746 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1748
1749 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1750 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1751
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756
1757}
1758
1759
1760/**
1761 * @opcode 0x10
1762 * @oppfx 0x66
1763 * @opcpuid sse2
1764 * @opgroup og_sse2_pcksclr_datamove
1765 * @opxcpttype 4UA
1766 * @optest op1=1 op2=2 -> op1=2
1767 * @optest op1=0 op2=-42 -> op1=-42
1768 */
1769FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1770{
1771 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1773 if (IEM_IS_MODRM_REG_MODE(bRm))
1774 {
1775 /*
1776 * Register, register.
1777 */
1778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1779 IEM_MC_BEGIN(0, 0);
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1782 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1783 IEM_GET_MODRM_RM(pVCpu, bRm));
1784 IEM_MC_ADVANCE_RIP();
1785 IEM_MC_END();
1786 }
1787 else
1788 {
1789 /*
1790 * Memory, register.
1791 */
1792 IEM_MC_BEGIN(0, 2);
1793 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1795
1796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1798 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1799 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1800
1801 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1802 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1803
1804 IEM_MC_ADVANCE_RIP();
1805 IEM_MC_END();
1806 }
1807 return VINF_SUCCESS;
1808}
1809
1810
1811/**
1812 * @opcode 0x10
1813 * @oppfx 0xf3
1814 * @opcpuid sse
1815 * @opgroup og_sse_simdfp_datamove
1816 * @opxcpttype 5
1817 * @optest op1=1 op2=2 -> op1=2
1818 * @optest op1=0 op2=-22 -> op1=-22
1819 */
1820FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1821{
1822 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824 if (IEM_IS_MODRM_REG_MODE(bRm))
1825 {
1826 /*
1827 * Register, register.
1828 */
1829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1830 IEM_MC_BEGIN(0, 1);
1831 IEM_MC_LOCAL(uint32_t, uSrc);
1832
1833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1835 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1836 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1837
1838 IEM_MC_ADVANCE_RIP();
1839 IEM_MC_END();
1840 }
1841 else
1842 {
1843 /*
1844 * Memory, register.
1845 */
1846 IEM_MC_BEGIN(0, 2);
1847 IEM_MC_LOCAL(uint32_t, uSrc);
1848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1849
1850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1853 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1854
1855 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1856 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1857
1858 IEM_MC_ADVANCE_RIP();
1859 IEM_MC_END();
1860 }
1861 return VINF_SUCCESS;
1862}
1863
1864
1865/**
1866 * @opcode 0x10
1867 * @oppfx 0xf2
1868 * @opcpuid sse2
1869 * @opgroup og_sse2_pcksclr_datamove
1870 * @opxcpttype 5
1871 * @optest op1=1 op2=2 -> op1=2
1872 * @optest op1=0 op2=-42 -> op1=-42
1873 */
1874FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1875{
1876 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1878 if (IEM_IS_MODRM_REG_MODE(bRm))
1879 {
1880 /*
1881 * Register, register.
1882 */
1883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1884 IEM_MC_BEGIN(0, 1);
1885 IEM_MC_LOCAL(uint64_t, uSrc);
1886
1887 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1888 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1889 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1890 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1891
1892 IEM_MC_ADVANCE_RIP();
1893 IEM_MC_END();
1894 }
1895 else
1896 {
1897 /*
1898 * Memory, register.
1899 */
1900 IEM_MC_BEGIN(0, 2);
1901 IEM_MC_LOCAL(uint64_t, uSrc);
1902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1903
1904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1907 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1908
1909 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1910 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1911
1912 IEM_MC_ADVANCE_RIP();
1913 IEM_MC_END();
1914 }
1915 return VINF_SUCCESS;
1916}
1917
1918
1919/**
1920 * @opcode 0x11
1921 * @oppfx none
1922 * @opcpuid sse
1923 * @opgroup og_sse_simdfp_datamove
1924 * @opxcpttype 4UA
1925 * @optest op1=1 op2=2 -> op1=2
1926 * @optest op1=0 op2=-42 -> op1=-42
1927 */
1928FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1929{
1930 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1932 if (IEM_IS_MODRM_REG_MODE(bRm))
1933 {
1934 /*
1935 * Register, register.
1936 */
1937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1938 IEM_MC_BEGIN(0, 0);
1939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1941 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1942 IEM_GET_MODRM_REG(pVCpu, bRm));
1943 IEM_MC_ADVANCE_RIP();
1944 IEM_MC_END();
1945 }
1946 else
1947 {
1948 /*
1949 * Memory, register.
1950 */
1951 IEM_MC_BEGIN(0, 2);
1952 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1954
1955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1957 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1958 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1959
1960 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1961 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1962
1963 IEM_MC_ADVANCE_RIP();
1964 IEM_MC_END();
1965 }
1966 return VINF_SUCCESS;
1967}
1968
1969
1970/**
1971 * @opcode 0x11
1972 * @oppfx 0x66
1973 * @opcpuid sse2
1974 * @opgroup og_sse2_pcksclr_datamove
1975 * @opxcpttype 4UA
1976 * @optest op1=1 op2=2 -> op1=2
1977 * @optest op1=0 op2=-42 -> op1=-42
1978 */
1979FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1980{
1981 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1983 if (IEM_IS_MODRM_REG_MODE(bRm))
1984 {
1985 /*
1986 * Register, register.
1987 */
1988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1989 IEM_MC_BEGIN(0, 0);
1990 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1991 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1992 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1993 IEM_GET_MODRM_REG(pVCpu, bRm));
1994 IEM_MC_ADVANCE_RIP();
1995 IEM_MC_END();
1996 }
1997 else
1998 {
1999 /*
2000 * Memory, register.
2001 */
2002 IEM_MC_BEGIN(0, 2);
2003 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2005
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2010
2011 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2012 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2013
2014 IEM_MC_ADVANCE_RIP();
2015 IEM_MC_END();
2016 }
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/**
2022 * @opcode 0x11
2023 * @oppfx 0xf3
2024 * @opcpuid sse
2025 * @opgroup og_sse_simdfp_datamove
2026 * @opxcpttype 5
2027 * @optest op1=1 op2=2 -> op1=2
2028 * @optest op1=0 op2=-22 -> op1=-22
2029 */
2030FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2031{
2032 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2034 if (IEM_IS_MODRM_REG_MODE(bRm))
2035 {
2036 /*
2037 * Register, register.
2038 */
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 IEM_MC_BEGIN(0, 1);
2041 IEM_MC_LOCAL(uint32_t, uSrc);
2042
2043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2044 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2045 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2046 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2047
2048 IEM_MC_ADVANCE_RIP();
2049 IEM_MC_END();
2050 }
2051 else
2052 {
2053 /*
2054 * Memory, register.
2055 */
2056 IEM_MC_BEGIN(0, 2);
2057 IEM_MC_LOCAL(uint32_t, uSrc);
2058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2059
2060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2062 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2063 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2064
2065 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2066 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2067
2068 IEM_MC_ADVANCE_RIP();
2069 IEM_MC_END();
2070 }
2071 return VINF_SUCCESS;
2072}
2073
2074
2075/**
2076 * @opcode 0x11
2077 * @oppfx 0xf2
2078 * @opcpuid sse2
2079 * @opgroup og_sse2_pcksclr_datamove
2080 * @opxcpttype 5
2081 * @optest op1=1 op2=2 -> op1=2
2082 * @optest op1=0 op2=-42 -> op1=-42
2083 */
2084FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2085{
2086 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2088 if (IEM_IS_MODRM_REG_MODE(bRm))
2089 {
2090 /*
2091 * Register, register.
2092 */
2093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2094 IEM_MC_BEGIN(0, 1);
2095 IEM_MC_LOCAL(uint64_t, uSrc);
2096
2097 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2098 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2099 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2100 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2101
2102 IEM_MC_ADVANCE_RIP();
2103 IEM_MC_END();
2104 }
2105 else
2106 {
2107 /*
2108 * Memory, register.
2109 */
2110 IEM_MC_BEGIN(0, 2);
2111 IEM_MC_LOCAL(uint64_t, uSrc);
2112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2113
2114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2116 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2118
2119 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2120 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2121
2122 IEM_MC_ADVANCE_RIP();
2123 IEM_MC_END();
2124 }
2125 return VINF_SUCCESS;
2126}
2127
2128
2129FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2130{
2131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2132 if (IEM_IS_MODRM_REG_MODE(bRm))
2133 {
2134 /**
2135 * @opcode 0x12
2136 * @opcodesub 11 mr/reg
2137 * @oppfx none
2138 * @opcpuid sse
2139 * @opgroup og_sse_simdfp_datamove
2140 * @opxcpttype 5
2141 * @optest op1=1 op2=2 -> op1=2
2142 * @optest op1=0 op2=-42 -> op1=-42
2143 */
2144 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2145
2146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2147 IEM_MC_BEGIN(0, 1);
2148 IEM_MC_LOCAL(uint64_t, uSrc);
2149
2150 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2151 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2152 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2153 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2154
2155 IEM_MC_ADVANCE_RIP();
2156 IEM_MC_END();
2157 }
2158 else
2159 {
2160 /**
2161 * @opdone
2162 * @opcode 0x12
2163 * @opcodesub !11 mr/reg
2164 * @oppfx none
2165 * @opcpuid sse
2166 * @opgroup og_sse_simdfp_datamove
2167 * @opxcpttype 5
2168 * @optest op1=1 op2=2 -> op1=2
2169 * @optest op1=0 op2=-42 -> op1=-42
2170 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2171 */
2172 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2173
2174 IEM_MC_BEGIN(0, 2);
2175 IEM_MC_LOCAL(uint64_t, uSrc);
2176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2177
2178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2180 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2181 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2182
2183 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2184 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2185
2186 IEM_MC_ADVANCE_RIP();
2187 IEM_MC_END();
2188 }
2189 return VINF_SUCCESS;
2190}
2191
2192
2193/**
2194 * @opcode 0x12
2195 * @opcodesub !11 mr/reg
2196 * @oppfx 0x66
2197 * @opcpuid sse2
2198 * @opgroup og_sse2_pcksclr_datamove
2199 * @opxcpttype 5
2200 * @optest op1=1 op2=2 -> op1=2
2201 * @optest op1=0 op2=-42 -> op1=-42
2202 */
2203FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2204{
2205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2206 if (IEM_IS_MODRM_MEM_MODE(bRm))
2207 {
2208 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2209
2210 IEM_MC_BEGIN(0, 2);
2211 IEM_MC_LOCAL(uint64_t, uSrc);
2212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2213
2214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2217 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2218
2219 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2220 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2221
2222 IEM_MC_ADVANCE_RIP();
2223 IEM_MC_END();
2224 return VINF_SUCCESS;
2225 }
2226
2227 /**
2228 * @opdone
2229 * @opmnemonic ud660f12m3
2230 * @opcode 0x12
2231 * @opcodesub 11 mr/reg
2232 * @oppfx 0x66
2233 * @opunused immediate
2234 * @opcpuid sse
2235 * @optest ->
2236 */
2237 return IEMOP_RAISE_INVALID_OPCODE();
2238}
2239
2240
2241/**
2242 * @opcode 0x12
2243 * @oppfx 0xf3
2244 * @opcpuid sse3
2245 * @opgroup og_sse3_pcksclr_datamove
2246 * @opxcpttype 4
2247 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2248 * op1=0x00000002000000020000000100000001
2249 */
2250FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2251{
2252 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2254 if (IEM_IS_MODRM_REG_MODE(bRm))
2255 {
2256 /*
2257 * Register, register.
2258 */
2259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2260 IEM_MC_BEGIN(2, 0);
2261 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2262 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2263
2264 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2265 IEM_MC_PREPARE_SSE_USAGE();
2266
2267 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2268 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2269 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2270
2271 IEM_MC_ADVANCE_RIP();
2272 IEM_MC_END();
2273 }
2274 else
2275 {
2276 /*
2277 * Register, memory.
2278 */
2279 IEM_MC_BEGIN(2, 2);
2280 IEM_MC_LOCAL(RTUINT128U, uSrc);
2281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2282 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2283 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2284
2285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2288 IEM_MC_PREPARE_SSE_USAGE();
2289
2290 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2291 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2292 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2293
2294 IEM_MC_ADVANCE_RIP();
2295 IEM_MC_END();
2296 }
2297 return VINF_SUCCESS;
2298}
2299
2300
2301/**
2302 * @opcode 0x12
2303 * @oppfx 0xf2
2304 * @opcpuid sse3
2305 * @opgroup og_sse3_pcksclr_datamove
2306 * @opxcpttype 5
2307 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2308 * op1=0x22222222111111112222222211111111
2309 */
2310FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2311{
2312 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2314 if (IEM_IS_MODRM_REG_MODE(bRm))
2315 {
2316 /*
2317 * Register, register.
2318 */
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2320 IEM_MC_BEGIN(2, 0);
2321 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2322 IEM_MC_ARG(uint64_t, uSrc, 1);
2323
2324 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2325 IEM_MC_PREPARE_SSE_USAGE();
2326
2327 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2328 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2329 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2330
2331 IEM_MC_ADVANCE_RIP();
2332 IEM_MC_END();
2333 }
2334 else
2335 {
2336 /*
2337 * Register, memory.
2338 */
2339 IEM_MC_BEGIN(2, 2);
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2342 IEM_MC_ARG(uint64_t, uSrc, 1);
2343
2344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2347 IEM_MC_PREPARE_SSE_USAGE();
2348
2349 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2350 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP();
2354 IEM_MC_END();
2355 }
2356 return VINF_SUCCESS;
2357}
2358
2359
2360/**
2361 * @opcode 0x13
2362 * @opcodesub !11 mr/reg
2363 * @oppfx none
2364 * @opcpuid sse
2365 * @opgroup og_sse_simdfp_datamove
2366 * @opxcpttype 5
2367 * @optest op1=1 op2=2 -> op1=2
2368 * @optest op1=0 op2=-42 -> op1=-42
2369 */
2370FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2371{
2372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2373 if (IEM_IS_MODRM_MEM_MODE(bRm))
2374 {
2375 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2376
2377 IEM_MC_BEGIN(0, 2);
2378 IEM_MC_LOCAL(uint64_t, uSrc);
2379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2380
2381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2385
2386 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2387 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2388
2389 IEM_MC_ADVANCE_RIP();
2390 IEM_MC_END();
2391 return VINF_SUCCESS;
2392 }
2393
2394 /**
2395 * @opdone
2396 * @opmnemonic ud0f13m3
2397 * @opcode 0x13
2398 * @opcodesub 11 mr/reg
2399 * @oppfx none
2400 * @opunused immediate
2401 * @opcpuid sse
2402 * @optest ->
2403 */
2404 return IEMOP_RAISE_INVALID_OPCODE();
2405}
2406
2407
2408/**
2409 * @opcode 0x13
2410 * @opcodesub !11 mr/reg
2411 * @oppfx 0x66
2412 * @opcpuid sse2
2413 * @opgroup og_sse2_pcksclr_datamove
2414 * @opxcpttype 5
2415 * @optest op1=1 op2=2 -> op1=2
2416 * @optest op1=0 op2=-42 -> op1=-42
2417 */
2418FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2419{
2420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2421 if (IEM_IS_MODRM_MEM_MODE(bRm))
2422 {
2423 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2424 IEM_MC_BEGIN(0, 2);
2425 IEM_MC_LOCAL(uint64_t, uSrc);
2426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2427
2428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2430 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2431 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2432
2433 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2434 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2435
2436 IEM_MC_ADVANCE_RIP();
2437 IEM_MC_END();
2438 return VINF_SUCCESS;
2439 }
2440
2441 /**
2442 * @opdone
2443 * @opmnemonic ud660f13m3
2444 * @opcode 0x13
2445 * @opcodesub 11 mr/reg
2446 * @oppfx 0x66
2447 * @opunused immediate
2448 * @opcpuid sse
2449 * @optest ->
2450 */
2451 return IEMOP_RAISE_INVALID_OPCODE();
2452}
2453
2454
2455/**
2456 * @opmnemonic udf30f13
2457 * @opcode 0x13
2458 * @oppfx 0xf3
2459 * @opunused intel-modrm
2460 * @opcpuid sse
2461 * @optest ->
2462 * @opdone
2463 */
2464
2465/**
2466 * @opmnemonic udf20f13
2467 * @opcode 0x13
2468 * @oppfx 0xf2
2469 * @opunused intel-modrm
2470 * @opcpuid sse
2471 * @optest ->
2472 * @opdone
2473 */
2474
2475/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2476FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2477{
2478 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2479 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2480}
2481
2482
2483/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2484FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2485{
2486 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2487 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2488}
2489
2490
2491/**
2492 * @opdone
2493 * @opmnemonic udf30f14
2494 * @opcode 0x14
2495 * @oppfx 0xf3
2496 * @opunused intel-modrm
2497 * @opcpuid sse
2498 * @optest ->
2499 * @opdone
2500 */
2501
2502/**
2503 * @opmnemonic udf20f14
2504 * @opcode 0x14
2505 * @oppfx 0xf2
2506 * @opunused intel-modrm
2507 * @opcpuid sse
2508 * @optest ->
2509 * @opdone
2510 */
2511
2512/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2513FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2514{
2515 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2516 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2517}
2518
2519
2520/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2521FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2522{
2523 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2524 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2525}
2526
2527
2528/* Opcode 0xf3 0x0f 0x15 - invalid */
2529/* Opcode 0xf2 0x0f 0x15 - invalid */
2530
2531/**
2532 * @opdone
2533 * @opmnemonic udf30f15
2534 * @opcode 0x15
2535 * @oppfx 0xf3
2536 * @opunused intel-modrm
2537 * @opcpuid sse
2538 * @optest ->
2539 * @opdone
2540 */
2541
2542/**
2543 * @opmnemonic udf20f15
2544 * @opcode 0x15
2545 * @oppfx 0xf2
2546 * @opunused intel-modrm
2547 * @opcpuid sse
2548 * @optest ->
2549 * @opdone
2550 */
2551
2552FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2553{
2554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2555 if (IEM_IS_MODRM_REG_MODE(bRm))
2556 {
2557 /**
2558 * @opcode 0x16
2559 * @opcodesub 11 mr/reg
2560 * @oppfx none
2561 * @opcpuid sse
2562 * @opgroup og_sse_simdfp_datamove
2563 * @opxcpttype 5
2564 * @optest op1=1 op2=2 -> op1=2
2565 * @optest op1=0 op2=-42 -> op1=-42
2566 */
2567 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2568
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2570 IEM_MC_BEGIN(0, 1);
2571 IEM_MC_LOCAL(uint64_t, uSrc);
2572
2573 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2574 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2575 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2576 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 else
2582 {
2583 /**
2584 * @opdone
2585 * @opcode 0x16
2586 * @opcodesub !11 mr/reg
2587 * @oppfx none
2588 * @opcpuid sse
2589 * @opgroup og_sse_simdfp_datamove
2590 * @opxcpttype 5
2591 * @optest op1=1 op2=2 -> op1=2
2592 * @optest op1=0 op2=-42 -> op1=-42
2593 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2594 */
2595 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2596
2597 IEM_MC_BEGIN(0, 2);
2598 IEM_MC_LOCAL(uint64_t, uSrc);
2599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2600
2601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2603 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2605
2606 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2607 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2608
2609 IEM_MC_ADVANCE_RIP();
2610 IEM_MC_END();
2611 }
2612 return VINF_SUCCESS;
2613}
2614
2615
2616/**
2617 * @opcode 0x16
2618 * @opcodesub !11 mr/reg
2619 * @oppfx 0x66
2620 * @opcpuid sse2
2621 * @opgroup og_sse2_pcksclr_datamove
2622 * @opxcpttype 5
2623 * @optest op1=1 op2=2 -> op1=2
2624 * @optest op1=0 op2=-42 -> op1=-42
2625 */
2626FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2627{
2628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2629 if (IEM_IS_MODRM_MEM_MODE(bRm))
2630 {
2631 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2632 IEM_MC_BEGIN(0, 2);
2633 IEM_MC_LOCAL(uint64_t, uSrc);
2634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2635
2636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2639 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2640
2641 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2642 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2643
2644 IEM_MC_ADVANCE_RIP();
2645 IEM_MC_END();
2646 return VINF_SUCCESS;
2647 }
2648
2649 /**
2650 * @opdone
2651 * @opmnemonic ud660f16m3
2652 * @opcode 0x16
2653 * @opcodesub 11 mr/reg
2654 * @oppfx 0x66
2655 * @opunused immediate
2656 * @opcpuid sse
2657 * @optest ->
2658 */
2659 return IEMOP_RAISE_INVALID_OPCODE();
2660}
2661
2662
2663/**
2664 * @opcode 0x16
2665 * @oppfx 0xf3
2666 * @opcpuid sse3
2667 * @opgroup og_sse3_pcksclr_datamove
2668 * @opxcpttype 4
2669 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2670 * op1=0x00000002000000020000000100000001
2671 */
2672FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2673{
2674 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2676 if (IEM_IS_MODRM_REG_MODE(bRm))
2677 {
2678 /*
2679 * Register, register.
2680 */
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_BEGIN(2, 0);
2683 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2684 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2685
2686 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2687 IEM_MC_PREPARE_SSE_USAGE();
2688
2689 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2690 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2691 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2692
2693 IEM_MC_ADVANCE_RIP();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * Register, memory.
2700 */
2701 IEM_MC_BEGIN(2, 2);
2702 IEM_MC_LOCAL(RTUINT128U, uSrc);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2704 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2705 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2706
2707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2710 IEM_MC_PREPARE_SSE_USAGE();
2711
2712 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2713 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2714 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2715
2716 IEM_MC_ADVANCE_RIP();
2717 IEM_MC_END();
2718 }
2719 return VINF_SUCCESS;
2720}
2721
2722/**
2723 * @opdone
2724 * @opmnemonic udf30f16
2725 * @opcode 0x16
2726 * @oppfx 0xf2
2727 * @opunused intel-modrm
2728 * @opcpuid sse
2729 * @optest ->
2730 * @opdone
2731 */
2732
2733
2734/**
2735 * @opcode 0x17
2736 * @opcodesub !11 mr/reg
2737 * @oppfx none
2738 * @opcpuid sse
2739 * @opgroup og_sse_simdfp_datamove
2740 * @opxcpttype 5
2741 * @optest op1=1 op2=2 -> op1=2
2742 * @optest op1=0 op2=-42 -> op1=-42
2743 */
2744FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2745{
2746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2747 if (IEM_IS_MODRM_MEM_MODE(bRm))
2748 {
2749 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2750
2751 IEM_MC_BEGIN(0, 2);
2752 IEM_MC_LOCAL(uint64_t, uSrc);
2753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2754
2755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2757 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2759
2760 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2761 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2762
2763 IEM_MC_ADVANCE_RIP();
2764 IEM_MC_END();
2765 return VINF_SUCCESS;
2766 }
2767
2768 /**
2769 * @opdone
2770 * @opmnemonic ud0f17m3
2771 * @opcode 0x17
2772 * @opcodesub 11 mr/reg
2773 * @oppfx none
2774 * @opunused immediate
2775 * @opcpuid sse
2776 * @optest ->
2777 */
2778 return IEMOP_RAISE_INVALID_OPCODE();
2779}
2780
2781
2782/**
2783 * @opcode 0x17
2784 * @opcodesub !11 mr/reg
2785 * @oppfx 0x66
2786 * @opcpuid sse2
2787 * @opgroup og_sse2_pcksclr_datamove
2788 * @opxcpttype 5
2789 * @optest op1=1 op2=2 -> op1=2
2790 * @optest op1=0 op2=-42 -> op1=-42
2791 */
2792FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2793{
2794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2795 if (IEM_IS_MODRM_MEM_MODE(bRm))
2796 {
2797 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2798
2799 IEM_MC_BEGIN(0, 2);
2800 IEM_MC_LOCAL(uint64_t, uSrc);
2801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2802
2803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2805 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2806 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2807
2808 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2809 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2810
2811 IEM_MC_ADVANCE_RIP();
2812 IEM_MC_END();
2813 return VINF_SUCCESS;
2814 }
2815
2816 /**
2817 * @opdone
2818 * @opmnemonic ud660f17m3
2819 * @opcode 0x17
2820 * @opcodesub 11 mr/reg
2821 * @oppfx 0x66
2822 * @opunused immediate
2823 * @opcpuid sse
2824 * @optest ->
2825 */
2826 return IEMOP_RAISE_INVALID_OPCODE();
2827}
2828
2829
2830/**
2831 * @opdone
2832 * @opmnemonic udf30f17
2833 * @opcode 0x17
2834 * @oppfx 0xf3
2835 * @opunused intel-modrm
2836 * @opcpuid sse
2837 * @optest ->
2838 * @opdone
2839 */
2840
2841/**
2842 * @opmnemonic udf20f17
2843 * @opcode 0x17
2844 * @oppfx 0xf2
2845 * @opunused intel-modrm
2846 * @opcpuid sse
2847 * @optest ->
2848 * @opdone
2849 */
2850
2851
2852/** Opcode 0x0f 0x18. */
2853FNIEMOP_DEF(iemOp_prefetch_Grp16)
2854{
2855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2856 if (IEM_IS_MODRM_MEM_MODE(bRm))
2857 {
2858 switch (IEM_GET_MODRM_REG_8(bRm))
2859 {
2860 case 4: /* Aliased to /0 for the time being according to AMD. */
2861 case 5: /* Aliased to /0 for the time being according to AMD. */
2862 case 6: /* Aliased to /0 for the time being according to AMD. */
2863 case 7: /* Aliased to /0 for the time being according to AMD. */
2864 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2865 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2866 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2867 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2869 }
2870
2871 IEM_MC_BEGIN(0, 1);
2872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 /* Currently a NOP. */
2876 NOREF(GCPtrEffSrc);
2877 IEM_MC_ADVANCE_RIP();
2878 IEM_MC_END();
2879 return VINF_SUCCESS;
2880 }
2881
2882 return IEMOP_RAISE_INVALID_OPCODE();
2883}
2884
2885
2886/** Opcode 0x0f 0x19..0x1f. */
2887FNIEMOP_DEF(iemOp_nop_Ev)
2888{
2889 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2891 if (IEM_IS_MODRM_REG_MODE(bRm))
2892 {
2893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2894 IEM_MC_BEGIN(0, 0);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 IEM_MC_BEGIN(0, 1);
2901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2904 /* Currently a NOP. */
2905 NOREF(GCPtrEffSrc);
2906 IEM_MC_ADVANCE_RIP();
2907 IEM_MC_END();
2908 }
2909 return VINF_SUCCESS;
2910}
2911
2912
2913/** Opcode 0x0f 0x20. */
2914FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2915{
2916 /* mod is ignored, as is operand size overrides. */
2917 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2918 IEMOP_HLP_MIN_386();
2919 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2920 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2921 else
2922 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2923
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2926 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2927 {
2928 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2929 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2930 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2931 iCrReg |= 8;
2932 }
2933 switch (iCrReg)
2934 {
2935 case 0: case 2: case 3: case 4: case 8:
2936 break;
2937 default:
2938 return IEMOP_RAISE_INVALID_OPCODE();
2939 }
2940 IEMOP_HLP_DONE_DECODING();
2941
2942 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
2943}
2944
2945
2946/** Opcode 0x0f 0x21. */
2947FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2948{
2949 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2950 IEMOP_HLP_MIN_386();
2951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2954 return IEMOP_RAISE_INVALID_OPCODE();
2955 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2956 IEM_GET_MODRM_RM(pVCpu, bRm),
2957 IEM_GET_MODRM_REG_8(bRm));
2958}
2959
2960
2961/** Opcode 0x0f 0x22. */
2962FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2963{
2964 /* mod is ignored, as is operand size overrides. */
2965 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2966 IEMOP_HLP_MIN_386();
2967 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2968 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2969 else
2970 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2971
2972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2973 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2974 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2975 {
2976 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2977 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2978 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2979 iCrReg |= 8;
2980 }
2981 switch (iCrReg)
2982 {
2983 case 0: case 2: case 3: case 4: case 8:
2984 break;
2985 default:
2986 return IEMOP_RAISE_INVALID_OPCODE();
2987 }
2988 IEMOP_HLP_DONE_DECODING();
2989
2990 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
2991}
2992
2993
2994/** Opcode 0x0f 0x23. */
2995FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2996{
2997 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2998 IEMOP_HLP_MIN_386();
2999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3001 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3002 return IEMOP_RAISE_INVALID_OPCODE();
3003 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3004 IEM_GET_MODRM_REG_8(bRm),
3005 IEM_GET_MODRM_RM(pVCpu, bRm));
3006}
3007
3008
3009/** Opcode 0x0f 0x24. */
3010FNIEMOP_DEF(iemOp_mov_Rd_Td)
3011{
3012 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3013 IEMOP_HLP_MIN_386();
3014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3016 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3017 return IEMOP_RAISE_INVALID_OPCODE();
3018 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3019 IEM_GET_MODRM_RM(pVCpu, bRm),
3020 IEM_GET_MODRM_REG_8(bRm));
3021}
3022
3023
3024/** Opcode 0x0f 0x26. */
3025FNIEMOP_DEF(iemOp_mov_Td_Rd)
3026{
3027 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3028 IEMOP_HLP_MIN_386();
3029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3032 return IEMOP_RAISE_INVALID_OPCODE();
3033 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3034 IEM_GET_MODRM_REG_8(bRm),
3035 IEM_GET_MODRM_RM(pVCpu, bRm));
3036}
3037
3038
3039/**
3040 * @opcode 0x28
3041 * @oppfx none
3042 * @opcpuid sse
3043 * @opgroup og_sse_simdfp_datamove
3044 * @opxcpttype 1
3045 * @optest op1=1 op2=2 -> op1=2
3046 * @optest op1=0 op2=-42 -> op1=-42
3047 */
3048FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3049{
3050 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3052 if (IEM_IS_MODRM_REG_MODE(bRm))
3053 {
3054 /*
3055 * Register, register.
3056 */
3057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3058 IEM_MC_BEGIN(0, 0);
3059 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3060 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3061 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3062 IEM_GET_MODRM_RM(pVCpu, bRm));
3063 IEM_MC_ADVANCE_RIP();
3064 IEM_MC_END();
3065 }
3066 else
3067 {
3068 /*
3069 * Register, memory.
3070 */
3071 IEM_MC_BEGIN(0, 2);
3072 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3074
3075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3078 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3079
3080 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3081 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3082
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 }
3086 return VINF_SUCCESS;
3087}
3088
3089/**
3090 * @opcode 0x28
3091 * @oppfx 66
3092 * @opcpuid sse2
3093 * @opgroup og_sse2_pcksclr_datamove
3094 * @opxcpttype 1
3095 * @optest op1=1 op2=2 -> op1=2
3096 * @optest op1=0 op2=-42 -> op1=-42
3097 */
3098FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3099{
3100 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3102 if (IEM_IS_MODRM_REG_MODE(bRm))
3103 {
3104 /*
3105 * Register, register.
3106 */
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_BEGIN(0, 0);
3109 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3110 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3111 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3112 IEM_GET_MODRM_RM(pVCpu, bRm));
3113 IEM_MC_ADVANCE_RIP();
3114 IEM_MC_END();
3115 }
3116 else
3117 {
3118 /*
3119 * Register, memory.
3120 */
3121 IEM_MC_BEGIN(0, 2);
3122 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3124
3125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3128 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3129
3130 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3131 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3132
3133 IEM_MC_ADVANCE_RIP();
3134 IEM_MC_END();
3135 }
3136 return VINF_SUCCESS;
3137}
3138
3139/* Opcode 0xf3 0x0f 0x28 - invalid */
3140/* Opcode 0xf2 0x0f 0x28 - invalid */
3141
3142/**
3143 * @opcode 0x29
3144 * @oppfx none
3145 * @opcpuid sse
3146 * @opgroup og_sse_simdfp_datamove
3147 * @opxcpttype 1
3148 * @optest op1=1 op2=2 -> op1=2
3149 * @optest op1=0 op2=-42 -> op1=-42
3150 */
3151FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3152{
3153 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3155 if (IEM_IS_MODRM_REG_MODE(bRm))
3156 {
3157 /*
3158 * Register, register.
3159 */
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_BEGIN(0, 0);
3162 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3163 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3164 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3165 IEM_GET_MODRM_REG(pVCpu, bRm));
3166 IEM_MC_ADVANCE_RIP();
3167 IEM_MC_END();
3168 }
3169 else
3170 {
3171 /*
3172 * Memory, register.
3173 */
3174 IEM_MC_BEGIN(0, 2);
3175 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3177
3178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3181 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3182
3183 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3184 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3185
3186 IEM_MC_ADVANCE_RIP();
3187 IEM_MC_END();
3188 }
3189 return VINF_SUCCESS;
3190}
3191
3192/**
3193 * @opcode 0x29
3194 * @oppfx 66
3195 * @opcpuid sse2
3196 * @opgroup og_sse2_pcksclr_datamove
3197 * @opxcpttype 1
3198 * @optest op1=1 op2=2 -> op1=2
3199 * @optest op1=0 op2=-42 -> op1=-42
3200 */
3201FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3202{
3203 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3205 if (IEM_IS_MODRM_REG_MODE(bRm))
3206 {
3207 /*
3208 * Register, register.
3209 */
3210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3211 IEM_MC_BEGIN(0, 0);
3212 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3213 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3214 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3215 IEM_GET_MODRM_REG(pVCpu, bRm));
3216 IEM_MC_ADVANCE_RIP();
3217 IEM_MC_END();
3218 }
3219 else
3220 {
3221 /*
3222 * Memory, register.
3223 */
3224 IEM_MC_BEGIN(0, 2);
3225 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3230 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3231 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3232
3233 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3234 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3235
3236 IEM_MC_ADVANCE_RIP();
3237 IEM_MC_END();
3238 }
3239 return VINF_SUCCESS;
3240}
3241
3242/* Opcode 0xf3 0x0f 0x29 - invalid */
3243/* Opcode 0xf2 0x0f 0x29 - invalid */
3244
3245
3246/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3247FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3248/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3249FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3250/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3251FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3252/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3253FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3254
3255
3256/**
3257 * @opcode 0x2b
3258 * @opcodesub !11 mr/reg
3259 * @oppfx none
3260 * @opcpuid sse
3261 * @opgroup og_sse1_cachect
3262 * @opxcpttype 1
3263 * @optest op1=1 op2=2 -> op1=2
3264 * @optest op1=0 op2=-42 -> op1=-42
3265 */
3266FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3267{
3268 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3270 if (IEM_IS_MODRM_MEM_MODE(bRm))
3271 {
3272 /*
3273 * memory, register.
3274 */
3275 IEM_MC_BEGIN(0, 2);
3276 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3278
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3282 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3283
3284 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3285 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3286
3287 IEM_MC_ADVANCE_RIP();
3288 IEM_MC_END();
3289 }
3290 /* The register, register encoding is invalid. */
3291 else
3292 return IEMOP_RAISE_INVALID_OPCODE();
3293 return VINF_SUCCESS;
3294}
3295
3296/**
3297 * @opcode 0x2b
3298 * @opcodesub !11 mr/reg
3299 * @oppfx 0x66
3300 * @opcpuid sse2
3301 * @opgroup og_sse2_cachect
3302 * @opxcpttype 1
3303 * @optest op1=1 op2=2 -> op1=2
3304 * @optest op1=0 op2=-42 -> op1=-42
3305 */
3306FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3307{
3308 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3310 if (IEM_IS_MODRM_MEM_MODE(bRm))
3311 {
3312 /*
3313 * memory, register.
3314 */
3315 IEM_MC_BEGIN(0, 2);
3316 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3318
3319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3323
3324 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3325 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3326
3327 IEM_MC_ADVANCE_RIP();
3328 IEM_MC_END();
3329 }
3330 /* The register, register encoding is invalid. */
3331 else
3332 return IEMOP_RAISE_INVALID_OPCODE();
3333 return VINF_SUCCESS;
3334}
3335/* Opcode 0xf3 0x0f 0x2b - invalid */
3336/* Opcode 0xf2 0x0f 0x2b - invalid */
3337
3338
3339/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3340FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3341/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3342FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3343/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3344FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3345/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3346FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3347
3348/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3349FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3350/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3351FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3352/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3353FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3354/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3355FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3356
3357/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3358FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3359/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3360FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3361/* Opcode 0xf3 0x0f 0x2e - invalid */
3362/* Opcode 0xf2 0x0f 0x2e - invalid */
3363
3364/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3365FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3366/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3367FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3368/* Opcode 0xf3 0x0f 0x2f - invalid */
3369/* Opcode 0xf2 0x0f 0x2f - invalid */
3370
3371/** Opcode 0x0f 0x30. */
3372FNIEMOP_DEF(iemOp_wrmsr)
3373{
3374 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3377}
3378
3379
3380/** Opcode 0x0f 0x31. */
3381FNIEMOP_DEF(iemOp_rdtsc)
3382{
3383 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3386}
3387
3388
3389/** Opcode 0x0f 0x33. */
3390FNIEMOP_DEF(iemOp_rdmsr)
3391{
3392 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3394 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3395}
3396
3397
3398/** Opcode 0x0f 0x34. */
3399FNIEMOP_DEF(iemOp_rdpmc)
3400{
3401 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3404}
3405
3406
3407/** Opcode 0x0f 0x34. */
3408FNIEMOP_DEF(iemOp_sysenter)
3409{
3410 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3413}
3414
3415/** Opcode 0x0f 0x35. */
3416FNIEMOP_DEF(iemOp_sysexit)
3417{
3418 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3420 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3421}
3422
3423/** Opcode 0x0f 0x37. */
3424FNIEMOP_STUB(iemOp_getsec);
3425
3426
3427/** Opcode 0x0f 0x38. */
3428FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3429{
3430#ifdef IEM_WITH_THREE_0F_38
3431 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3432 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3433#else
3434 IEMOP_BITCH_ABOUT_STUB();
3435 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3436#endif
3437}
3438
3439
3440/** Opcode 0x0f 0x3a. */
3441FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3442{
3443#ifdef IEM_WITH_THREE_0F_3A
3444 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3445 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3446#else
3447 IEMOP_BITCH_ABOUT_STUB();
3448 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3449#endif
3450}
3451
3452
3453/**
3454 * Implements a conditional move.
3455 *
3456 * Wish there was an obvious way to do this where we could share and reduce
3457 * code bloat.
3458 *
3459 * @param a_Cnd The conditional "microcode" operation.
3460 */
3461#define CMOV_X(a_Cnd) \
3462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3463 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3464 { \
3465 switch (pVCpu->iem.s.enmEffOpSize) \
3466 { \
3467 case IEMMODE_16BIT: \
3468 IEM_MC_BEGIN(0, 1); \
3469 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3470 a_Cnd { \
3471 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3472 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3473 } IEM_MC_ENDIF(); \
3474 IEM_MC_ADVANCE_RIP(); \
3475 IEM_MC_END(); \
3476 return VINF_SUCCESS; \
3477 \
3478 case IEMMODE_32BIT: \
3479 IEM_MC_BEGIN(0, 1); \
3480 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3481 a_Cnd { \
3482 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3484 } IEM_MC_ELSE() { \
3485 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3486 } IEM_MC_ENDIF(); \
3487 IEM_MC_ADVANCE_RIP(); \
3488 IEM_MC_END(); \
3489 return VINF_SUCCESS; \
3490 \
3491 case IEMMODE_64BIT: \
3492 IEM_MC_BEGIN(0, 1); \
3493 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3494 a_Cnd { \
3495 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3496 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3497 } IEM_MC_ENDIF(); \
3498 IEM_MC_ADVANCE_RIP(); \
3499 IEM_MC_END(); \
3500 return VINF_SUCCESS; \
3501 \
3502 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3503 } \
3504 } \
3505 else \
3506 { \
3507 switch (pVCpu->iem.s.enmEffOpSize) \
3508 { \
3509 case IEMMODE_16BIT: \
3510 IEM_MC_BEGIN(0, 2); \
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3512 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3514 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3515 a_Cnd { \
3516 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3517 } IEM_MC_ENDIF(); \
3518 IEM_MC_ADVANCE_RIP(); \
3519 IEM_MC_END(); \
3520 return VINF_SUCCESS; \
3521 \
3522 case IEMMODE_32BIT: \
3523 IEM_MC_BEGIN(0, 2); \
3524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3525 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3527 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3528 a_Cnd { \
3529 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3530 } IEM_MC_ELSE() { \
3531 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3532 } IEM_MC_ENDIF(); \
3533 IEM_MC_ADVANCE_RIP(); \
3534 IEM_MC_END(); \
3535 return VINF_SUCCESS; \
3536 \
3537 case IEMMODE_64BIT: \
3538 IEM_MC_BEGIN(0, 2); \
3539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3540 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3542 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3543 a_Cnd { \
3544 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3545 } IEM_MC_ENDIF(); \
3546 IEM_MC_ADVANCE_RIP(); \
3547 IEM_MC_END(); \
3548 return VINF_SUCCESS; \
3549 \
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3551 } \
3552 } do {} while (0)
3553
3554
3555
3556/** Opcode 0x0f 0x40. */
3557FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3558{
3559 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3560 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3561}
3562
3563
3564/** Opcode 0x0f 0x41. */
3565FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3566{
3567 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3568 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3569}
3570
3571
3572/** Opcode 0x0f 0x42. */
3573FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3574{
3575 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3576 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3577}
3578
3579
3580/** Opcode 0x0f 0x43. */
3581FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3582{
3583 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3584 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3585}
3586
3587
3588/** Opcode 0x0f 0x44. */
3589FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3590{
3591 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3592 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3593}
3594
3595
3596/** Opcode 0x0f 0x45. */
3597FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3598{
3599 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3600 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3601}
3602
3603
3604/** Opcode 0x0f 0x46. */
3605FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3606{
3607 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3608 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3609}
3610
3611
3612/** Opcode 0x0f 0x47. */
3613FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3614{
3615 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3616 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3617}
3618
3619
3620/** Opcode 0x0f 0x48. */
3621FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3622{
3623 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3624 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3625}
3626
3627
3628/** Opcode 0x0f 0x49. */
3629FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3630{
3631 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3632 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3633}
3634
3635
3636/** Opcode 0x0f 0x4a. */
3637FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3638{
3639 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3640 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3641}
3642
3643
3644/** Opcode 0x0f 0x4b. */
3645FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3646{
3647 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3648 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3649}
3650
3651
3652/** Opcode 0x0f 0x4c. */
3653FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3654{
3655 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3656 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3657}
3658
3659
3660/** Opcode 0x0f 0x4d. */
3661FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3662{
3663 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3664 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3665}
3666
3667
3668/** Opcode 0x0f 0x4e. */
3669FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3670{
3671 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3672 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3673}
3674
3675
3676/** Opcode 0x0f 0x4f. */
3677FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3678{
3679 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3680 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3681}
3682
3683#undef CMOV_X
3684
3685/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3686FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3687/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3688FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3689/* Opcode 0xf3 0x0f 0x50 - invalid */
3690/* Opcode 0xf2 0x0f 0x50 - invalid */
3691
3692/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3693FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3694/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3695FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3696/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3697FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3698/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3699FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3700
3701/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3702FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3703/* Opcode 0x66 0x0f 0x52 - invalid */
3704/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3705FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3706/* Opcode 0xf2 0x0f 0x52 - invalid */
3707
3708/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3709FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3710/* Opcode 0x66 0x0f 0x53 - invalid */
3711/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3712FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3713/* Opcode 0xf2 0x0f 0x53 - invalid */
3714
3715
3716/** Opcode 0x0f 0x54 - andps Vps, Wps */
3717FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3718{
3719 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3720 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3721}
3722
3723
3724/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3725FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3726{
3727 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3728 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3729}
3730
3731
3732/* Opcode 0xf3 0x0f 0x54 - invalid */
3733/* Opcode 0xf2 0x0f 0x54 - invalid */
3734
3735
3736/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3737FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3738{
3739 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3740 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3741}
3742
3743
3744/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3745FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3746{
3747 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3748 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3749}
3750
3751
3752/* Opcode 0xf3 0x0f 0x55 - invalid */
3753/* Opcode 0xf2 0x0f 0x55 - invalid */
3754
3755
3756/** Opcode 0x0f 0x56 - orps Vps, Wps */
3757FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3758{
3759 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3760 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3761}
3762
3763
3764/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3765FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3766{
3767 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3768 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3769}
3770
3771
3772/* Opcode 0xf3 0x0f 0x56 - invalid */
3773/* Opcode 0xf2 0x0f 0x56 - invalid */
3774
3775
3776/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3777FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3778{
3779 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3780 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3781}
3782
3783
3784/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3785FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3786{
3787 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3788 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3789}
3790
3791
3792/* Opcode 0xf3 0x0f 0x57 - invalid */
3793/* Opcode 0xf2 0x0f 0x57 - invalid */
3794
3795/** Opcode 0x0f 0x58 - addps Vps, Wps */
3796FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3797/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3798FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3799/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3800FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3801/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3802FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3803
3804/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3805FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3806/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3807FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3808/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3809FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3810/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3811FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3812
3813/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3814FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3815/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3816FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3817/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3818FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3819/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3820FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3821
3822/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3823FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3824/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3825FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3826/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3827FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3828/* Opcode 0xf2 0x0f 0x5b - invalid */
3829
3830/** Opcode 0x0f 0x5c - subps Vps, Wps */
3831FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3832/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3833FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3834/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3835FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3836/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3837FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3838
3839/** Opcode 0x0f 0x5d - minps Vps, Wps */
3840FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3841/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3842FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3843/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3844FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3845/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3846FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3847
3848/** Opcode 0x0f 0x5e - divps Vps, Wps */
3849FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3850/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3851FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3852/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3853FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3854/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3855FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3856
3857/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3858FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3859/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3860FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3861/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3862FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3863/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3864FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3865
3866
3867/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3868FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3869{
3870 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3871 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
3872}
3873
3874
3875/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3876FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3877{
3878 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3879 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
3880}
3881
3882
3883/* Opcode 0xf3 0x0f 0x60 - invalid */
3884
3885
3886/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3887FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3888{
3889 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3890 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3891 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
3892}
3893
3894
3895/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3896FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3897{
3898 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3899 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
3900}
3901
3902
3903/* Opcode 0xf3 0x0f 0x61 - invalid */
3904
3905
3906/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3907FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3908{
3909 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3910 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
3911}
3912
3913
3914/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3915FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3916{
3917 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3918 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
3919}
3920
3921
3922/* Opcode 0xf3 0x0f 0x62 - invalid */
3923
3924
3925
3926/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3927FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
3928{
3929 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3930 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
3931}
3932
3933
3934/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3935FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
3936{
3937 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3938 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
3939}
3940
3941
3942/* Opcode 0xf3 0x0f 0x63 - invalid */
3943
3944
3945/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3946FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
3947{
3948 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3949 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
3950}
3951
3952
3953/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3954FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
3955{
3956 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3957 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
3958}
3959
3960
3961/* Opcode 0xf3 0x0f 0x64 - invalid */
3962
3963
3964/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3965FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
3966{
3967 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3968 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
3969}
3970
3971
3972/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3973FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
3974{
3975 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3976 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
3977}
3978
3979
3980/* Opcode 0xf3 0x0f 0x65 - invalid */
3981
3982
3983/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3984FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
3985{
3986 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3987 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
3988}
3989
3990
3991/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3992FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
3993{
3994 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3995 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
3996}
3997
3998
3999/* Opcode 0xf3 0x0f 0x66 - invalid */
4000
4001
4002/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4003FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4004{
4005 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4006 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4007}
4008
4009
4010/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4011FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4012{
4013 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4014 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4015}
4016
4017
4018/* Opcode 0xf3 0x0f 0x67 - invalid */
4019
4020
4021/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4022 * @note Intel and AMD both uses Qd for the second parameter, however they
4023 * both list it as a mmX/mem64 operand and intel describes it as being
4024 * loaded as a qword, so it should be Qq, shouldn't it? */
4025FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4026{
4027 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4028 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4029}
4030
4031
4032/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4033FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4034{
4035 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4036 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4037}
4038
4039
4040/* Opcode 0xf3 0x0f 0x68 - invalid */
4041
4042
4043/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4044 * @note Intel and AMD both uses Qd for the second parameter, however they
4045 * both list it as a mmX/mem64 operand and intel describes it as being
4046 * loaded as a qword, so it should be Qq, shouldn't it? */
4047FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4048{
4049 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4050 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4051}
4052
4053
4054/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4055FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4056{
4057 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4058 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4059
4060}
4061
4062
4063/* Opcode 0xf3 0x0f 0x69 - invalid */
4064
4065
4066/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4067 * @note Intel and AMD both uses Qd for the second parameter, however they
4068 * both list it as a mmX/mem64 operand and intel describes it as being
4069 * loaded as a qword, so it should be Qq, shouldn't it? */
4070FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4071{
4072 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4073 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4074}
4075
4076
4077/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4078FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4079{
4080 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4081 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4082}
4083
4084
4085/* Opcode 0xf3 0x0f 0x6a - invalid */
4086
4087
4088/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4089FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4090{
4091 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4092 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4093}
4094
4095
4096/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4097FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4098{
4099 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4100 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4101}
4102
4103
4104/* Opcode 0xf3 0x0f 0x6b - invalid */
4105
4106
4107/* Opcode 0x0f 0x6c - invalid */
4108
4109
4110/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4111FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4112{
4113 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4114 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4115}
4116
4117
4118/* Opcode 0xf3 0x0f 0x6c - invalid */
4119/* Opcode 0xf2 0x0f 0x6c - invalid */
4120
4121
4122/* Opcode 0x0f 0x6d - invalid */
4123
4124
4125/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4126FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4127{
4128 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4129 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4130}
4131
4132
4133/* Opcode 0xf3 0x0f 0x6d - invalid */
4134
4135
4136FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4137{
4138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4139 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4140 {
4141 /**
4142 * @opcode 0x6e
4143 * @opcodesub rex.w=1
4144 * @oppfx none
4145 * @opcpuid mmx
4146 * @opgroup og_mmx_datamove
4147 * @opxcpttype 5
4148 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4149 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4150 */
4151 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4152 if (IEM_IS_MODRM_REG_MODE(bRm))
4153 {
4154 /* MMX, greg64 */
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156 IEM_MC_BEGIN(0, 1);
4157 IEM_MC_LOCAL(uint64_t, u64Tmp);
4158
4159 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4160 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4161
4162 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4163 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4164 IEM_MC_FPU_TO_MMX_MODE();
4165
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 }
4169 else
4170 {
4171 /* MMX, [mem64] */
4172 IEM_MC_BEGIN(0, 2);
4173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4174 IEM_MC_LOCAL(uint64_t, u64Tmp);
4175
4176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4179 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4180
4181 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4182 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4183 IEM_MC_FPU_TO_MMX_MODE();
4184
4185 IEM_MC_ADVANCE_RIP();
4186 IEM_MC_END();
4187 }
4188 }
4189 else
4190 {
4191 /**
4192 * @opdone
4193 * @opcode 0x6e
4194 * @opcodesub rex.w=0
4195 * @oppfx none
4196 * @opcpuid mmx
4197 * @opgroup og_mmx_datamove
4198 * @opxcpttype 5
4199 * @opfunction iemOp_movd_q_Pd_Ey
4200 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4201 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4202 */
4203 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4204 if (IEM_IS_MODRM_REG_MODE(bRm))
4205 {
4206 /* MMX, greg */
4207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4208 IEM_MC_BEGIN(0, 1);
4209 IEM_MC_LOCAL(uint64_t, u64Tmp);
4210
4211 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4212 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4213
4214 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4215 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4216 IEM_MC_FPU_TO_MMX_MODE();
4217
4218 IEM_MC_ADVANCE_RIP();
4219 IEM_MC_END();
4220 }
4221 else
4222 {
4223 /* MMX, [mem] */
4224 IEM_MC_BEGIN(0, 2);
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4226 IEM_MC_LOCAL(uint32_t, u32Tmp);
4227
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4231 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4232
4233 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4234 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4235 IEM_MC_FPU_TO_MMX_MODE();
4236
4237 IEM_MC_ADVANCE_RIP();
4238 IEM_MC_END();
4239 }
4240 }
4241 return VINF_SUCCESS;
4242}
4243
4244FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4245{
4246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4247 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4248 {
4249 /**
4250 * @opcode 0x6e
4251 * @opcodesub rex.w=1
4252 * @oppfx 0x66
4253 * @opcpuid sse2
4254 * @opgroup og_sse2_simdint_datamove
4255 * @opxcpttype 5
4256 * @optest 64-bit / op1=1 op2=2 -> op1=2
4257 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4258 */
4259 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4260 if (IEM_IS_MODRM_REG_MODE(bRm))
4261 {
4262 /* XMM, greg64 */
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264 IEM_MC_BEGIN(0, 1);
4265 IEM_MC_LOCAL(uint64_t, u64Tmp);
4266
4267 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4269
4270 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4271 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4272
4273 IEM_MC_ADVANCE_RIP();
4274 IEM_MC_END();
4275 }
4276 else
4277 {
4278 /* XMM, [mem64] */
4279 IEM_MC_BEGIN(0, 2);
4280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4281 IEM_MC_LOCAL(uint64_t, u64Tmp);
4282
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4287
4288 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4289 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4290
4291 IEM_MC_ADVANCE_RIP();
4292 IEM_MC_END();
4293 }
4294 }
4295 else
4296 {
4297 /**
4298 * @opdone
4299 * @opcode 0x6e
4300 * @opcodesub rex.w=0
4301 * @oppfx 0x66
4302 * @opcpuid sse2
4303 * @opgroup og_sse2_simdint_datamove
4304 * @opxcpttype 5
4305 * @opfunction iemOp_movd_q_Vy_Ey
4306 * @optest op1=1 op2=2 -> op1=2
4307 * @optest op1=0 op2=-42 -> op1=-42
4308 */
4309 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4310 if (IEM_IS_MODRM_REG_MODE(bRm))
4311 {
4312 /* XMM, greg32 */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 1);
4315 IEM_MC_LOCAL(uint32_t, u32Tmp);
4316
4317 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4319
4320 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4321 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4322
4323 IEM_MC_ADVANCE_RIP();
4324 IEM_MC_END();
4325 }
4326 else
4327 {
4328 /* XMM, [mem32] */
4329 IEM_MC_BEGIN(0, 2);
4330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4331 IEM_MC_LOCAL(uint32_t, u32Tmp);
4332
4333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4335 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4337
4338 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4339 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4340
4341 IEM_MC_ADVANCE_RIP();
4342 IEM_MC_END();
4343 }
4344 }
4345 return VINF_SUCCESS;
4346}
4347
4348/* Opcode 0xf3 0x0f 0x6e - invalid */
4349
4350
4351/**
4352 * @opcode 0x6f
4353 * @oppfx none
4354 * @opcpuid mmx
4355 * @opgroup og_mmx_datamove
4356 * @opxcpttype 5
4357 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4358 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4359 */
4360FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4361{
4362 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4364 if (IEM_IS_MODRM_REG_MODE(bRm))
4365 {
4366 /*
4367 * Register, register.
4368 */
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4370 IEM_MC_BEGIN(0, 1);
4371 IEM_MC_LOCAL(uint64_t, u64Tmp);
4372
4373 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4374 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4375
4376 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4377 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4378 IEM_MC_FPU_TO_MMX_MODE();
4379
4380 IEM_MC_ADVANCE_RIP();
4381 IEM_MC_END();
4382 }
4383 else
4384 {
4385 /*
4386 * Register, memory.
4387 */
4388 IEM_MC_BEGIN(0, 2);
4389 IEM_MC_LOCAL(uint64_t, u64Tmp);
4390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4391
4392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4396
4397 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4398 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4399 IEM_MC_FPU_TO_MMX_MODE();
4400
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 }
4404 return VINF_SUCCESS;
4405}
4406
4407/**
4408 * @opcode 0x6f
4409 * @oppfx 0x66
4410 * @opcpuid sse2
4411 * @opgroup og_sse2_simdint_datamove
4412 * @opxcpttype 1
4413 * @optest op1=1 op2=2 -> op1=2
4414 * @optest op1=0 op2=-42 -> op1=-42
4415 */
4416FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4417{
4418 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4420 if (IEM_IS_MODRM_REG_MODE(bRm))
4421 {
4422 /*
4423 * Register, register.
4424 */
4425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4426 IEM_MC_BEGIN(0, 0);
4427
4428 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4430
4431 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4432 IEM_GET_MODRM_RM(pVCpu, bRm));
4433 IEM_MC_ADVANCE_RIP();
4434 IEM_MC_END();
4435 }
4436 else
4437 {
4438 /*
4439 * Register, memory.
4440 */
4441 IEM_MC_BEGIN(0, 2);
4442 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4444
4445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4447 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4449
4450 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4451 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4452
4453 IEM_MC_ADVANCE_RIP();
4454 IEM_MC_END();
4455 }
4456 return VINF_SUCCESS;
4457}
4458
4459/**
4460 * @opcode 0x6f
4461 * @oppfx 0xf3
4462 * @opcpuid sse2
4463 * @opgroup og_sse2_simdint_datamove
4464 * @opxcpttype 4UA
4465 * @optest op1=1 op2=2 -> op1=2
4466 * @optest op1=0 op2=-42 -> op1=-42
4467 */
4468FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4469{
4470 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4472 if (IEM_IS_MODRM_REG_MODE(bRm))
4473 {
4474 /*
4475 * Register, register.
4476 */
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4481 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4482 IEM_GET_MODRM_RM(pVCpu, bRm));
4483 IEM_MC_ADVANCE_RIP();
4484 IEM_MC_END();
4485 }
4486 else
4487 {
4488 /*
4489 * Register, memory.
4490 */
4491 IEM_MC_BEGIN(0, 2);
4492 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4494
4495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4499 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4500 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4501
4502 IEM_MC_ADVANCE_RIP();
4503 IEM_MC_END();
4504 }
4505 return VINF_SUCCESS;
4506}
4507
4508
4509/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4510FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4511{
4512 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4514 if (IEM_IS_MODRM_REG_MODE(bRm))
4515 {
4516 /*
4517 * Register, register.
4518 */
4519 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4521
4522 IEM_MC_BEGIN(3, 0);
4523 IEM_MC_ARG(uint64_t *, pDst, 0);
4524 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4525 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4526 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4527 IEM_MC_PREPARE_FPU_USAGE();
4528 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4529 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4530 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4531 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4532 IEM_MC_FPU_TO_MMX_MODE();
4533 IEM_MC_ADVANCE_RIP();
4534 IEM_MC_END();
4535 }
4536 else
4537 {
4538 /*
4539 * Register, memory.
4540 */
4541 IEM_MC_BEGIN(3, 2);
4542 IEM_MC_ARG(uint64_t *, pDst, 0);
4543 IEM_MC_LOCAL(uint64_t, uSrc);
4544 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4546
4547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4548 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4549 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4551 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4552
4553 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4554 IEM_MC_PREPARE_FPU_USAGE();
4555 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4557 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4558 IEM_MC_FPU_TO_MMX_MODE();
4559
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 }
4563 return VINF_SUCCESS;
4564}
4565
4566
4567/**
4568 * Common worker for SSE2 instructions on the forms:
4569 * pshufd xmm1, xmm2/mem128, imm8
4570 * pshufhw xmm1, xmm2/mem128, imm8
4571 * pshuflw xmm1, xmm2/mem128, imm8
4572 *
4573 * Proper alignment of the 128-bit operand is enforced.
4574 * Exceptions type 4. SSE2 cpuid checks.
4575 */
4576FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4577{
4578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4579 if (IEM_IS_MODRM_REG_MODE(bRm))
4580 {
4581 /*
4582 * Register, register.
4583 */
4584 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586
4587 IEM_MC_BEGIN(3, 0);
4588 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4589 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4590 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4592 IEM_MC_PREPARE_SSE_USAGE();
4593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4595 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /*
4602 * Register, memory.
4603 */
4604 IEM_MC_BEGIN(3, 2);
4605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4606 IEM_MC_LOCAL(RTUINT128U, uSrc);
4607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4609
4610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4611 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4612 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4615
4616 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4617 IEM_MC_PREPARE_SSE_USAGE();
4618 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4619 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4620
4621 IEM_MC_ADVANCE_RIP();
4622 IEM_MC_END();
4623 }
4624 return VINF_SUCCESS;
4625}
4626
4627
4628/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4629FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4630{
4631 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4632 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4633}
4634
4635
4636/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4637FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4638{
4639 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4640 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4641}
4642
4643
4644/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4645FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4646{
4647 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4648 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4649}
4650
4651
4652/**
4653 * Common worker for MMX instructions of the form:
4654 * psrlw mm, imm8
4655 * psraw mm, imm8
4656 * psllw mm, imm8
4657 * psrld mm, imm8
4658 * psrad mm, imm8
4659 * pslld mm, imm8
4660 * psrlq mm, imm8
4661 * psllq mm, imm8
4662 *
4663 */
4664FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4665{
4666 if (IEM_IS_MODRM_REG_MODE(bRm))
4667 {
4668 /*
4669 * Register, immediate.
4670 */
4671 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4673
4674 IEM_MC_BEGIN(2, 0);
4675 IEM_MC_ARG(uint64_t *, pDst, 0);
4676 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4677 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4678 IEM_MC_PREPARE_FPU_USAGE();
4679 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4680 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4681 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4682 IEM_MC_FPU_TO_MMX_MODE();
4683 IEM_MC_ADVANCE_RIP();
4684 IEM_MC_END();
4685 }
4686 else
4687 {
4688 /*
4689 * Register, memory not supported.
4690 */
4691 /// @todo Caller already enforced register mode?!
4692 }
4693 return VINF_SUCCESS;
4694}
4695
4696
4697/**
4698 * Common worker for SSE2 instructions of the form:
4699 * psrlw xmm, imm8
4700 * psraw xmm, imm8
4701 * psllw xmm, imm8
4702 * psrld xmm, imm8
4703 * psrad xmm, imm8
4704 * pslld xmm, imm8
4705 * psrlq xmm, imm8
4706 * psllq xmm, imm8
4707 *
4708 */
4709FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4710{
4711 if (IEM_IS_MODRM_REG_MODE(bRm))
4712 {
4713 /*
4714 * Register, immediate.
4715 */
4716 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718
4719 IEM_MC_BEGIN(2, 0);
4720 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4721 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4722 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4723 IEM_MC_PREPARE_SSE_USAGE();
4724 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4725 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /*
4732 * Register, memory.
4733 */
4734 /// @todo Caller already enforced register mode?!
4735 }
4736 return VINF_SUCCESS;
4737}
4738
4739
4740/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4741FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4742{
4743// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4744 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4745}
4746
4747
4748/** Opcode 0x66 0x0f 0x71 11/2. */
4749FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4750{
4751// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4752 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4753}
4754
4755
4756/** Opcode 0x0f 0x71 11/4. */
4757FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4758{
4759// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4760 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4761}
4762
4763
4764/** Opcode 0x66 0x0f 0x71 11/4. */
4765FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4766{
4767// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4768 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4769}
4770
4771
4772/** Opcode 0x0f 0x71 11/6. */
4773FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4774{
4775// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4776 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4777}
4778
4779
4780/** Opcode 0x66 0x0f 0x71 11/6. */
4781FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4782{
4783// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4784 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4785}
4786
4787
4788/**
4789 * Group 12 jump table for register variant.
4790 */
4791IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4792{
4793 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4794 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4795 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4796 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4797 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4798 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4799 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4800 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4801};
4802AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4803
4804
4805/** Opcode 0x0f 0x71. */
4806FNIEMOP_DEF(iemOp_Grp12)
4807{
4808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4809 if (IEM_IS_MODRM_REG_MODE(bRm))
4810 /* register, register */
4811 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4812 + pVCpu->iem.s.idxPrefix], bRm);
4813 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4814}
4815
4816
4817/** Opcode 0x0f 0x72 11/2. */
4818FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
4819{
4820// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4821 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
4822}
4823
4824
4825/** Opcode 0x66 0x0f 0x72 11/2. */
4826FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
4827{
4828// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4829 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
4830}
4831
4832
4833/** Opcode 0x0f 0x72 11/4. */
4834FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
4835{
4836// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4837 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
4838}
4839
4840
4841/** Opcode 0x66 0x0f 0x72 11/4. */
4842FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
4843{
4844// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4845 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
4846}
4847
4848
4849/** Opcode 0x0f 0x72 11/6. */
4850FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
4851{
4852// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4853 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
4854}
4855
4856/** Opcode 0x66 0x0f 0x72 11/6. */
4857FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
4858{
4859// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4860 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
4861}
4862
4863
4864/**
4865 * Group 13 jump table for register variant.
4866 */
4867IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4868{
4869 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4870 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4871 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4872 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4873 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4874 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4875 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4876 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4877};
4878AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4879
4880/** Opcode 0x0f 0x72. */
4881FNIEMOP_DEF(iemOp_Grp13)
4882{
4883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4884 if (IEM_IS_MODRM_REG_MODE(bRm))
4885 /* register, register */
4886 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4887 + pVCpu->iem.s.idxPrefix], bRm);
4888 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4889}
4890
4891
4892/** Opcode 0x0f 0x73 11/2. */
4893FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
4894{
4895// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4896 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
4897}
4898
4899
4900/** Opcode 0x66 0x0f 0x73 11/2. */
4901FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
4902{
4903// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4904 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
4905}
4906
4907
4908/** Opcode 0x66 0x0f 0x73 11/3. */
4909FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
4910{
4911// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4912 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
4913}
4914
4915
4916/** Opcode 0x0f 0x73 11/6. */
4917FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
4918{
4919// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4920 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
4921}
4922
4923
4924/** Opcode 0x66 0x0f 0x73 11/6. */
4925FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
4926{
4927// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4928 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
4929}
4930
4931
4932/** Opcode 0x66 0x0f 0x73 11/7. */
4933FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
4934{
4935// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4936 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
4937}
4938
4939/**
4940 * Group 14 jump table for register variant.
4941 */
4942IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4943{
4944 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4945 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4946 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4947 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4948 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4949 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4950 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4951 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4952};
4953AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4954
4955
4956/** Opcode 0x0f 0x73. */
4957FNIEMOP_DEF(iemOp_Grp14)
4958{
4959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4960 if (IEM_IS_MODRM_REG_MODE(bRm))
4961 /* register, register */
4962 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4963 + pVCpu->iem.s.idxPrefix], bRm);
4964 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4965}
4966
4967
4968/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4969FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4970{
4971 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4972 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
4973}
4974
4975
4976/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4977FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4978{
4979 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4980 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
4981}
4982
4983
4984/* Opcode 0xf3 0x0f 0x74 - invalid */
4985/* Opcode 0xf2 0x0f 0x74 - invalid */
4986
4987
4988/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4989FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4990{
4991 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4992 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
4993}
4994
4995
4996/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4997FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4998{
4999 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5000 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5001}
5002
5003
5004/* Opcode 0xf3 0x0f 0x75 - invalid */
5005/* Opcode 0xf2 0x0f 0x75 - invalid */
5006
5007
5008/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5009FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5010{
5011 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5012 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5013}
5014
5015
5016/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5017FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5018{
5019 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5020 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5021}
5022
5023
5024/* Opcode 0xf3 0x0f 0x76 - invalid */
5025/* Opcode 0xf2 0x0f 0x76 - invalid */
5026
5027
5028/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5029FNIEMOP_DEF(iemOp_emms)
5030{
5031 IEMOP_MNEMONIC(emms, "emms");
5032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5033
5034 IEM_MC_BEGIN(0,0);
5035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5037 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5038 IEM_MC_FPU_FROM_MMX_MODE();
5039 IEM_MC_ADVANCE_RIP();
5040 IEM_MC_END();
5041 return VINF_SUCCESS;
5042}
5043
5044/* Opcode 0x66 0x0f 0x77 - invalid */
5045/* Opcode 0xf3 0x0f 0x77 - invalid */
5046/* Opcode 0xf2 0x0f 0x77 - invalid */
5047
5048/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5049#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5050FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5051{
5052 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5053 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5054 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5055 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5056
5057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5058 if (IEM_IS_MODRM_REG_MODE(bRm))
5059 {
5060 /*
5061 * Register, register.
5062 */
5063 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5064 if (enmEffOpSize == IEMMODE_64BIT)
5065 {
5066 IEM_MC_BEGIN(2, 0);
5067 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5068 IEM_MC_ARG(uint64_t, u64Enc, 1);
5069 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5070 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5071 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5072 IEM_MC_END();
5073 }
5074 else
5075 {
5076 IEM_MC_BEGIN(2, 0);
5077 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5078 IEM_MC_ARG(uint32_t, u32Enc, 1);
5079 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5080 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5081 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5082 IEM_MC_END();
5083 }
5084 }
5085 else
5086 {
5087 /*
5088 * Memory, register.
5089 */
5090 if (enmEffOpSize == IEMMODE_64BIT)
5091 {
5092 IEM_MC_BEGIN(3, 0);
5093 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5094 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5095 IEM_MC_ARG(uint64_t, u64Enc, 2);
5096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5097 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5098 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5099 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5100 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5101 IEM_MC_END();
5102 }
5103 else
5104 {
5105 IEM_MC_BEGIN(3, 0);
5106 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5107 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5108 IEM_MC_ARG(uint32_t, u32Enc, 2);
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5110 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5111 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5112 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5113 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5114 IEM_MC_END();
5115 }
5116 }
5117 return VINF_SUCCESS;
5118}
5119#else
5120FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5121#endif
5122
5123/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5124FNIEMOP_STUB(iemOp_AmdGrp17);
5125/* Opcode 0xf3 0x0f 0x78 - invalid */
5126/* Opcode 0xf2 0x0f 0x78 - invalid */
5127
5128/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5129#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5130FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5131{
5132 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5133 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5134 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5135 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5136
5137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5138 if (IEM_IS_MODRM_REG_MODE(bRm))
5139 {
5140 /*
5141 * Register, register.
5142 */
5143 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5144 if (enmEffOpSize == IEMMODE_64BIT)
5145 {
5146 IEM_MC_BEGIN(2, 0);
5147 IEM_MC_ARG(uint64_t, u64Val, 0);
5148 IEM_MC_ARG(uint64_t, u64Enc, 1);
5149 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5150 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5151 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5152 IEM_MC_END();
5153 }
5154 else
5155 {
5156 IEM_MC_BEGIN(2, 0);
5157 IEM_MC_ARG(uint32_t, u32Val, 0);
5158 IEM_MC_ARG(uint32_t, u32Enc, 1);
5159 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5160 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5161 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5162 IEM_MC_END();
5163 }
5164 }
5165 else
5166 {
5167 /*
5168 * Register, memory.
5169 */
5170 if (enmEffOpSize == IEMMODE_64BIT)
5171 {
5172 IEM_MC_BEGIN(3, 0);
5173 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5174 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5175 IEM_MC_ARG(uint64_t, u64Enc, 2);
5176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5177 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5178 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5179 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5180 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5181 IEM_MC_END();
5182 }
5183 else
5184 {
5185 IEM_MC_BEGIN(3, 0);
5186 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5187 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5188 IEM_MC_ARG(uint32_t, u32Enc, 2);
5189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5190 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5191 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5192 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5193 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5194 IEM_MC_END();
5195 }
5196 }
5197 return VINF_SUCCESS;
5198}
5199#else
5200FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5201#endif
5202/* Opcode 0x66 0x0f 0x79 - invalid */
5203/* Opcode 0xf3 0x0f 0x79 - invalid */
5204/* Opcode 0xf2 0x0f 0x79 - invalid */
5205
5206/* Opcode 0x0f 0x7a - invalid */
5207/* Opcode 0x66 0x0f 0x7a - invalid */
5208/* Opcode 0xf3 0x0f 0x7a - invalid */
5209/* Opcode 0xf2 0x0f 0x7a - invalid */
5210
5211/* Opcode 0x0f 0x7b - invalid */
5212/* Opcode 0x66 0x0f 0x7b - invalid */
5213/* Opcode 0xf3 0x0f 0x7b - invalid */
5214/* Opcode 0xf2 0x0f 0x7b - invalid */
5215
5216/* Opcode 0x0f 0x7c - invalid */
5217/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5218FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5219/* Opcode 0xf3 0x0f 0x7c - invalid */
5220/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5221FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5222
5223/* Opcode 0x0f 0x7d - invalid */
5224/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5225FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5226/* Opcode 0xf3 0x0f 0x7d - invalid */
5227/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5228FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5229
5230
5231/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5232FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5233{
5234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5236 {
5237 /**
5238 * @opcode 0x7e
5239 * @opcodesub rex.w=1
5240 * @oppfx none
5241 * @opcpuid mmx
5242 * @opgroup og_mmx_datamove
5243 * @opxcpttype 5
5244 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5245 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5246 */
5247 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5248 if (IEM_IS_MODRM_REG_MODE(bRm))
5249 {
5250 /* greg64, MMX */
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_BEGIN(0, 1);
5253 IEM_MC_LOCAL(uint64_t, u64Tmp);
5254
5255 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5257
5258 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5259 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5260 IEM_MC_FPU_TO_MMX_MODE();
5261
5262 IEM_MC_ADVANCE_RIP();
5263 IEM_MC_END();
5264 }
5265 else
5266 {
5267 /* [mem64], MMX */
5268 IEM_MC_BEGIN(0, 2);
5269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5270 IEM_MC_LOCAL(uint64_t, u64Tmp);
5271
5272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5275 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5276
5277 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5278 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5279 IEM_MC_FPU_TO_MMX_MODE();
5280
5281 IEM_MC_ADVANCE_RIP();
5282 IEM_MC_END();
5283 }
5284 }
5285 else
5286 {
5287 /**
5288 * @opdone
5289 * @opcode 0x7e
5290 * @opcodesub rex.w=0
5291 * @oppfx none
5292 * @opcpuid mmx
5293 * @opgroup og_mmx_datamove
5294 * @opxcpttype 5
5295 * @opfunction iemOp_movd_q_Pd_Ey
5296 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5297 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5298 */
5299 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5300 if (IEM_IS_MODRM_REG_MODE(bRm))
5301 {
5302 /* greg32, MMX */
5303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5304 IEM_MC_BEGIN(0, 1);
5305 IEM_MC_LOCAL(uint32_t, u32Tmp);
5306
5307 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5308 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5309
5310 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5311 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5312 IEM_MC_FPU_TO_MMX_MODE();
5313
5314 IEM_MC_ADVANCE_RIP();
5315 IEM_MC_END();
5316 }
5317 else
5318 {
5319 /* [mem32], MMX */
5320 IEM_MC_BEGIN(0, 2);
5321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5322 IEM_MC_LOCAL(uint32_t, u32Tmp);
5323
5324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5327 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5328
5329 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5330 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5331 IEM_MC_FPU_TO_MMX_MODE();
5332
5333 IEM_MC_ADVANCE_RIP();
5334 IEM_MC_END();
5335 }
5336 }
5337 return VINF_SUCCESS;
5338
5339}
5340
5341
5342FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5343{
5344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5345 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5346 {
5347 /**
5348 * @opcode 0x7e
5349 * @opcodesub rex.w=1
5350 * @oppfx 0x66
5351 * @opcpuid sse2
5352 * @opgroup og_sse2_simdint_datamove
5353 * @opxcpttype 5
5354 * @optest 64-bit / op1=1 op2=2 -> op1=2
5355 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5356 */
5357 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5358 if (IEM_IS_MODRM_REG_MODE(bRm))
5359 {
5360 /* greg64, XMM */
5361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5362 IEM_MC_BEGIN(0, 1);
5363 IEM_MC_LOCAL(uint64_t, u64Tmp);
5364
5365 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5367
5368 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5369 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5370
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 }
5374 else
5375 {
5376 /* [mem64], XMM */
5377 IEM_MC_BEGIN(0, 2);
5378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5379 IEM_MC_LOCAL(uint64_t, u64Tmp);
5380
5381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5385
5386 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5387 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5388
5389 IEM_MC_ADVANCE_RIP();
5390 IEM_MC_END();
5391 }
5392 }
5393 else
5394 {
5395 /**
5396 * @opdone
5397 * @opcode 0x7e
5398 * @opcodesub rex.w=0
5399 * @oppfx 0x66
5400 * @opcpuid sse2
5401 * @opgroup og_sse2_simdint_datamove
5402 * @opxcpttype 5
5403 * @opfunction iemOp_movd_q_Vy_Ey
5404 * @optest op1=1 op2=2 -> op1=2
5405 * @optest op1=0 op2=-42 -> op1=-42
5406 */
5407 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5408 if (IEM_IS_MODRM_REG_MODE(bRm))
5409 {
5410 /* greg32, XMM */
5411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5412 IEM_MC_BEGIN(0, 1);
5413 IEM_MC_LOCAL(uint32_t, u32Tmp);
5414
5415 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5417
5418 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5419 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5420
5421 IEM_MC_ADVANCE_RIP();
5422 IEM_MC_END();
5423 }
5424 else
5425 {
5426 /* [mem32], XMM */
5427 IEM_MC_BEGIN(0, 2);
5428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5429 IEM_MC_LOCAL(uint32_t, u32Tmp);
5430
5431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5435
5436 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5437 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5438
5439 IEM_MC_ADVANCE_RIP();
5440 IEM_MC_END();
5441 }
5442 }
5443 return VINF_SUCCESS;
5444
5445}
5446
5447/**
5448 * @opcode 0x7e
5449 * @oppfx 0xf3
5450 * @opcpuid sse2
5451 * @opgroup og_sse2_pcksclr_datamove
5452 * @opxcpttype none
5453 * @optest op1=1 op2=2 -> op1=2
5454 * @optest op1=0 op2=-42 -> op1=-42
5455 */
5456FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5457{
5458 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5460 if (IEM_IS_MODRM_REG_MODE(bRm))
5461 {
5462 /*
5463 * Register, register.
5464 */
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_BEGIN(0, 2);
5467 IEM_MC_LOCAL(uint64_t, uSrc);
5468
5469 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5470 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5471
5472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5473 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5474
5475 IEM_MC_ADVANCE_RIP();
5476 IEM_MC_END();
5477 }
5478 else
5479 {
5480 /*
5481 * Memory, register.
5482 */
5483 IEM_MC_BEGIN(0, 2);
5484 IEM_MC_LOCAL(uint64_t, uSrc);
5485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5486
5487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5489 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5490 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5491
5492 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5493 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5494
5495 IEM_MC_ADVANCE_RIP();
5496 IEM_MC_END();
5497 }
5498 return VINF_SUCCESS;
5499}
5500
5501/* Opcode 0xf2 0x0f 0x7e - invalid */
5502
5503
5504/** Opcode 0x0f 0x7f - movq Qq, Pq */
5505FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5506{
5507 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5509 if (IEM_IS_MODRM_REG_MODE(bRm))
5510 {
5511 /*
5512 * Register, register.
5513 */
5514 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5515 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5517 IEM_MC_BEGIN(0, 1);
5518 IEM_MC_LOCAL(uint64_t, u64Tmp);
5519 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5520 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5521 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5522 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5523 IEM_MC_FPU_TO_MMX_MODE();
5524 IEM_MC_ADVANCE_RIP();
5525 IEM_MC_END();
5526 }
5527 else
5528 {
5529 /*
5530 * Memory, Register.
5531 */
5532 IEM_MC_BEGIN(0, 2);
5533 IEM_MC_LOCAL(uint64_t, u64Tmp);
5534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5535
5536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5538 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5540
5541 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5542 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5543 IEM_MC_FPU_TO_MMX_MODE();
5544
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 }
5548 return VINF_SUCCESS;
5549}
5550
5551/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5552FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5553{
5554 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5556 if (IEM_IS_MODRM_REG_MODE(bRm))
5557 {
5558 /*
5559 * Register, register.
5560 */
5561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5562 IEM_MC_BEGIN(0, 0);
5563 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5565 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5566 IEM_GET_MODRM_REG(pVCpu, bRm));
5567 IEM_MC_ADVANCE_RIP();
5568 IEM_MC_END();
5569 }
5570 else
5571 {
5572 /*
5573 * Register, memory.
5574 */
5575 IEM_MC_BEGIN(0, 2);
5576 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5578
5579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5582 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5583
5584 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5585 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5586
5587 IEM_MC_ADVANCE_RIP();
5588 IEM_MC_END();
5589 }
5590 return VINF_SUCCESS;
5591}
5592
5593/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5594FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5595{
5596 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5598 if (IEM_IS_MODRM_REG_MODE(bRm))
5599 {
5600 /*
5601 * Register, register.
5602 */
5603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5604 IEM_MC_BEGIN(0, 0);
5605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5607 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5608 IEM_GET_MODRM_REG(pVCpu, bRm));
5609 IEM_MC_ADVANCE_RIP();
5610 IEM_MC_END();
5611 }
5612 else
5613 {
5614 /*
5615 * Register, memory.
5616 */
5617 IEM_MC_BEGIN(0, 2);
5618 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5620
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5623 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5624 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5625
5626 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5627 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5628
5629 IEM_MC_ADVANCE_RIP();
5630 IEM_MC_END();
5631 }
5632 return VINF_SUCCESS;
5633}
5634
5635/* Opcode 0xf2 0x0f 0x7f - invalid */
5636
5637
5638
5639/** Opcode 0x0f 0x80. */
5640FNIEMOP_DEF(iemOp_jo_Jv)
5641{
5642 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5643 IEMOP_HLP_MIN_386();
5644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5645 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5646 {
5647 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5649
5650 IEM_MC_BEGIN(0, 0);
5651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5652 IEM_MC_REL_JMP_S16(i16Imm);
5653 } IEM_MC_ELSE() {
5654 IEM_MC_ADVANCE_RIP();
5655 } IEM_MC_ENDIF();
5656 IEM_MC_END();
5657 }
5658 else
5659 {
5660 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5662
5663 IEM_MC_BEGIN(0, 0);
5664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5665 IEM_MC_REL_JMP_S32(i32Imm);
5666 } IEM_MC_ELSE() {
5667 IEM_MC_ADVANCE_RIP();
5668 } IEM_MC_ENDIF();
5669 IEM_MC_END();
5670 }
5671 return VINF_SUCCESS;
5672}
5673
5674
5675/** Opcode 0x0f 0x81. */
5676FNIEMOP_DEF(iemOp_jno_Jv)
5677{
5678 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5679 IEMOP_HLP_MIN_386();
5680 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5681 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5682 {
5683 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5685
5686 IEM_MC_BEGIN(0, 0);
5687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5688 IEM_MC_ADVANCE_RIP();
5689 } IEM_MC_ELSE() {
5690 IEM_MC_REL_JMP_S16(i16Imm);
5691 } IEM_MC_ENDIF();
5692 IEM_MC_END();
5693 }
5694 else
5695 {
5696 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698
5699 IEM_MC_BEGIN(0, 0);
5700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5701 IEM_MC_ADVANCE_RIP();
5702 } IEM_MC_ELSE() {
5703 IEM_MC_REL_JMP_S32(i32Imm);
5704 } IEM_MC_ENDIF();
5705 IEM_MC_END();
5706 }
5707 return VINF_SUCCESS;
5708}
5709
5710
5711/** Opcode 0x0f 0x82. */
5712FNIEMOP_DEF(iemOp_jc_Jv)
5713{
5714 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5715 IEMOP_HLP_MIN_386();
5716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5717 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5718 {
5719 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5721
5722 IEM_MC_BEGIN(0, 0);
5723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5724 IEM_MC_REL_JMP_S16(i16Imm);
5725 } IEM_MC_ELSE() {
5726 IEM_MC_ADVANCE_RIP();
5727 } IEM_MC_ENDIF();
5728 IEM_MC_END();
5729 }
5730 else
5731 {
5732 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5734
5735 IEM_MC_BEGIN(0, 0);
5736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5737 IEM_MC_REL_JMP_S32(i32Imm);
5738 } IEM_MC_ELSE() {
5739 IEM_MC_ADVANCE_RIP();
5740 } IEM_MC_ENDIF();
5741 IEM_MC_END();
5742 }
5743 return VINF_SUCCESS;
5744}
5745
5746
5747/** Opcode 0x0f 0x83. */
5748FNIEMOP_DEF(iemOp_jnc_Jv)
5749{
5750 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5751 IEMOP_HLP_MIN_386();
5752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5753 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5754 {
5755 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757
5758 IEM_MC_BEGIN(0, 0);
5759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5760 IEM_MC_ADVANCE_RIP();
5761 } IEM_MC_ELSE() {
5762 IEM_MC_REL_JMP_S16(i16Imm);
5763 } IEM_MC_ENDIF();
5764 IEM_MC_END();
5765 }
5766 else
5767 {
5768 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770
5771 IEM_MC_BEGIN(0, 0);
5772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5773 IEM_MC_ADVANCE_RIP();
5774 } IEM_MC_ELSE() {
5775 IEM_MC_REL_JMP_S32(i32Imm);
5776 } IEM_MC_ENDIF();
5777 IEM_MC_END();
5778 }
5779 return VINF_SUCCESS;
5780}
5781
5782
5783/** Opcode 0x0f 0x84. */
5784FNIEMOP_DEF(iemOp_je_Jv)
5785{
5786 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5787 IEMOP_HLP_MIN_386();
5788 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5789 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5790 {
5791 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5793
5794 IEM_MC_BEGIN(0, 0);
5795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5796 IEM_MC_REL_JMP_S16(i16Imm);
5797 } IEM_MC_ELSE() {
5798 IEM_MC_ADVANCE_RIP();
5799 } IEM_MC_ENDIF();
5800 IEM_MC_END();
5801 }
5802 else
5803 {
5804 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5806
5807 IEM_MC_BEGIN(0, 0);
5808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5809 IEM_MC_REL_JMP_S32(i32Imm);
5810 } IEM_MC_ELSE() {
5811 IEM_MC_ADVANCE_RIP();
5812 } IEM_MC_ENDIF();
5813 IEM_MC_END();
5814 }
5815 return VINF_SUCCESS;
5816}
5817
5818
5819/** Opcode 0x0f 0x85. */
5820FNIEMOP_DEF(iemOp_jne_Jv)
5821{
5822 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5823 IEMOP_HLP_MIN_386();
5824 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5825 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5826 {
5827 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829
5830 IEM_MC_BEGIN(0, 0);
5831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5832 IEM_MC_ADVANCE_RIP();
5833 } IEM_MC_ELSE() {
5834 IEM_MC_REL_JMP_S16(i16Imm);
5835 } IEM_MC_ENDIF();
5836 IEM_MC_END();
5837 }
5838 else
5839 {
5840 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842
5843 IEM_MC_BEGIN(0, 0);
5844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5845 IEM_MC_ADVANCE_RIP();
5846 } IEM_MC_ELSE() {
5847 IEM_MC_REL_JMP_S32(i32Imm);
5848 } IEM_MC_ENDIF();
5849 IEM_MC_END();
5850 }
5851 return VINF_SUCCESS;
5852}
5853
5854
5855/** Opcode 0x0f 0x86. */
5856FNIEMOP_DEF(iemOp_jbe_Jv)
5857{
5858 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5859 IEMOP_HLP_MIN_386();
5860 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5861 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5862 {
5863 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5865
5866 IEM_MC_BEGIN(0, 0);
5867 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5868 IEM_MC_REL_JMP_S16(i16Imm);
5869 } IEM_MC_ELSE() {
5870 IEM_MC_ADVANCE_RIP();
5871 } IEM_MC_ENDIF();
5872 IEM_MC_END();
5873 }
5874 else
5875 {
5876 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878
5879 IEM_MC_BEGIN(0, 0);
5880 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5881 IEM_MC_REL_JMP_S32(i32Imm);
5882 } IEM_MC_ELSE() {
5883 IEM_MC_ADVANCE_RIP();
5884 } IEM_MC_ENDIF();
5885 IEM_MC_END();
5886 }
5887 return VINF_SUCCESS;
5888}
5889
5890
5891/** Opcode 0x0f 0x87. */
5892FNIEMOP_DEF(iemOp_jnbe_Jv)
5893{
5894 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5895 IEMOP_HLP_MIN_386();
5896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5897 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5898 {
5899 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901
5902 IEM_MC_BEGIN(0, 0);
5903 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5904 IEM_MC_ADVANCE_RIP();
5905 } IEM_MC_ELSE() {
5906 IEM_MC_REL_JMP_S16(i16Imm);
5907 } IEM_MC_ENDIF();
5908 IEM_MC_END();
5909 }
5910 else
5911 {
5912 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5914
5915 IEM_MC_BEGIN(0, 0);
5916 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5917 IEM_MC_ADVANCE_RIP();
5918 } IEM_MC_ELSE() {
5919 IEM_MC_REL_JMP_S32(i32Imm);
5920 } IEM_MC_ENDIF();
5921 IEM_MC_END();
5922 }
5923 return VINF_SUCCESS;
5924}
5925
5926
5927/** Opcode 0x0f 0x88. */
5928FNIEMOP_DEF(iemOp_js_Jv)
5929{
5930 IEMOP_MNEMONIC(js_Jv, "js Jv");
5931 IEMOP_HLP_MIN_386();
5932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5933 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5934 {
5935 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5937
5938 IEM_MC_BEGIN(0, 0);
5939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5940 IEM_MC_REL_JMP_S16(i16Imm);
5941 } IEM_MC_ELSE() {
5942 IEM_MC_ADVANCE_RIP();
5943 } IEM_MC_ENDIF();
5944 IEM_MC_END();
5945 }
5946 else
5947 {
5948 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5950
5951 IEM_MC_BEGIN(0, 0);
5952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5953 IEM_MC_REL_JMP_S32(i32Imm);
5954 } IEM_MC_ELSE() {
5955 IEM_MC_ADVANCE_RIP();
5956 } IEM_MC_ENDIF();
5957 IEM_MC_END();
5958 }
5959 return VINF_SUCCESS;
5960}
5961
5962
5963/** Opcode 0x0f 0x89. */
5964FNIEMOP_DEF(iemOp_jns_Jv)
5965{
5966 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5967 IEMOP_HLP_MIN_386();
5968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5969 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5970 {
5971 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5973
5974 IEM_MC_BEGIN(0, 0);
5975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5976 IEM_MC_ADVANCE_RIP();
5977 } IEM_MC_ELSE() {
5978 IEM_MC_REL_JMP_S16(i16Imm);
5979 } IEM_MC_ENDIF();
5980 IEM_MC_END();
5981 }
5982 else
5983 {
5984 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986
5987 IEM_MC_BEGIN(0, 0);
5988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5989 IEM_MC_ADVANCE_RIP();
5990 } IEM_MC_ELSE() {
5991 IEM_MC_REL_JMP_S32(i32Imm);
5992 } IEM_MC_ENDIF();
5993 IEM_MC_END();
5994 }
5995 return VINF_SUCCESS;
5996}
5997
5998
5999/** Opcode 0x0f 0x8a. */
6000FNIEMOP_DEF(iemOp_jp_Jv)
6001{
6002 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6003 IEMOP_HLP_MIN_386();
6004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6005 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6006 {
6007 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6009
6010 IEM_MC_BEGIN(0, 0);
6011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6012 IEM_MC_REL_JMP_S16(i16Imm);
6013 } IEM_MC_ELSE() {
6014 IEM_MC_ADVANCE_RIP();
6015 } IEM_MC_ENDIF();
6016 IEM_MC_END();
6017 }
6018 else
6019 {
6020 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022
6023 IEM_MC_BEGIN(0, 0);
6024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6025 IEM_MC_REL_JMP_S32(i32Imm);
6026 } IEM_MC_ELSE() {
6027 IEM_MC_ADVANCE_RIP();
6028 } IEM_MC_ENDIF();
6029 IEM_MC_END();
6030 }
6031 return VINF_SUCCESS;
6032}
6033
6034
6035/** Opcode 0x0f 0x8b. */
6036FNIEMOP_DEF(iemOp_jnp_Jv)
6037{
6038 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6039 IEMOP_HLP_MIN_386();
6040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6041 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6042 {
6043 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045
6046 IEM_MC_BEGIN(0, 0);
6047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6048 IEM_MC_ADVANCE_RIP();
6049 } IEM_MC_ELSE() {
6050 IEM_MC_REL_JMP_S16(i16Imm);
6051 } IEM_MC_ENDIF();
6052 IEM_MC_END();
6053 }
6054 else
6055 {
6056 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6058
6059 IEM_MC_BEGIN(0, 0);
6060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6061 IEM_MC_ADVANCE_RIP();
6062 } IEM_MC_ELSE() {
6063 IEM_MC_REL_JMP_S32(i32Imm);
6064 } IEM_MC_ENDIF();
6065 IEM_MC_END();
6066 }
6067 return VINF_SUCCESS;
6068}
6069
6070
6071/** Opcode 0x0f 0x8c. */
6072FNIEMOP_DEF(iemOp_jl_Jv)
6073{
6074 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6075 IEMOP_HLP_MIN_386();
6076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6077 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6078 {
6079 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6081
6082 IEM_MC_BEGIN(0, 0);
6083 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6084 IEM_MC_REL_JMP_S16(i16Imm);
6085 } IEM_MC_ELSE() {
6086 IEM_MC_ADVANCE_RIP();
6087 } IEM_MC_ENDIF();
6088 IEM_MC_END();
6089 }
6090 else
6091 {
6092 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6094
6095 IEM_MC_BEGIN(0, 0);
6096 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6097 IEM_MC_REL_JMP_S32(i32Imm);
6098 } IEM_MC_ELSE() {
6099 IEM_MC_ADVANCE_RIP();
6100 } IEM_MC_ENDIF();
6101 IEM_MC_END();
6102 }
6103 return VINF_SUCCESS;
6104}
6105
6106
6107/** Opcode 0x0f 0x8d. */
6108FNIEMOP_DEF(iemOp_jnl_Jv)
6109{
6110 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6111 IEMOP_HLP_MIN_386();
6112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6114 {
6115 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6117
6118 IEM_MC_BEGIN(0, 0);
6119 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6120 IEM_MC_ADVANCE_RIP();
6121 } IEM_MC_ELSE() {
6122 IEM_MC_REL_JMP_S16(i16Imm);
6123 } IEM_MC_ENDIF();
6124 IEM_MC_END();
6125 }
6126 else
6127 {
6128 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6130
6131 IEM_MC_BEGIN(0, 0);
6132 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6133 IEM_MC_ADVANCE_RIP();
6134 } IEM_MC_ELSE() {
6135 IEM_MC_REL_JMP_S32(i32Imm);
6136 } IEM_MC_ENDIF();
6137 IEM_MC_END();
6138 }
6139 return VINF_SUCCESS;
6140}
6141
6142
6143/** Opcode 0x0f 0x8e. */
6144FNIEMOP_DEF(iemOp_jle_Jv)
6145{
6146 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6147 IEMOP_HLP_MIN_386();
6148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6149 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6150 {
6151 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6153
6154 IEM_MC_BEGIN(0, 0);
6155 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6156 IEM_MC_REL_JMP_S16(i16Imm);
6157 } IEM_MC_ELSE() {
6158 IEM_MC_ADVANCE_RIP();
6159 } IEM_MC_ENDIF();
6160 IEM_MC_END();
6161 }
6162 else
6163 {
6164 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166
6167 IEM_MC_BEGIN(0, 0);
6168 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6169 IEM_MC_REL_JMP_S32(i32Imm);
6170 } IEM_MC_ELSE() {
6171 IEM_MC_ADVANCE_RIP();
6172 } IEM_MC_ENDIF();
6173 IEM_MC_END();
6174 }
6175 return VINF_SUCCESS;
6176}
6177
6178
6179/** Opcode 0x0f 0x8f. */
6180FNIEMOP_DEF(iemOp_jnle_Jv)
6181{
6182 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6183 IEMOP_HLP_MIN_386();
6184 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6185 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6186 {
6187 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6189
6190 IEM_MC_BEGIN(0, 0);
6191 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6192 IEM_MC_ADVANCE_RIP();
6193 } IEM_MC_ELSE() {
6194 IEM_MC_REL_JMP_S16(i16Imm);
6195 } IEM_MC_ENDIF();
6196 IEM_MC_END();
6197 }
6198 else
6199 {
6200 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6202
6203 IEM_MC_BEGIN(0, 0);
6204 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6205 IEM_MC_ADVANCE_RIP();
6206 } IEM_MC_ELSE() {
6207 IEM_MC_REL_JMP_S32(i32Imm);
6208 } IEM_MC_ENDIF();
6209 IEM_MC_END();
6210 }
6211 return VINF_SUCCESS;
6212}
6213
6214
6215/** Opcode 0x0f 0x90. */
6216FNIEMOP_DEF(iemOp_seto_Eb)
6217{
6218 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6219 IEMOP_HLP_MIN_386();
6220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6221
6222 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6223 * any way. AMD says it's "unused", whatever that means. We're
6224 * ignoring for now. */
6225 if (IEM_IS_MODRM_REG_MODE(bRm))
6226 {
6227 /* register target */
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229 IEM_MC_BEGIN(0, 0);
6230 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6231 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6232 } IEM_MC_ELSE() {
6233 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6234 } IEM_MC_ENDIF();
6235 IEM_MC_ADVANCE_RIP();
6236 IEM_MC_END();
6237 }
6238 else
6239 {
6240 /* memory target */
6241 IEM_MC_BEGIN(0, 1);
6242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6246 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6247 } IEM_MC_ELSE() {
6248 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6249 } IEM_MC_ENDIF();
6250 IEM_MC_ADVANCE_RIP();
6251 IEM_MC_END();
6252 }
6253 return VINF_SUCCESS;
6254}
6255
6256
6257/** Opcode 0x0f 0x91. */
6258FNIEMOP_DEF(iemOp_setno_Eb)
6259{
6260 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6261 IEMOP_HLP_MIN_386();
6262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6263
6264 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6265 * any way. AMD says it's "unused", whatever that means. We're
6266 * ignoring for now. */
6267 if (IEM_IS_MODRM_REG_MODE(bRm))
6268 {
6269 /* register target */
6270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6271 IEM_MC_BEGIN(0, 0);
6272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6273 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6274 } IEM_MC_ELSE() {
6275 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6276 } IEM_MC_ENDIF();
6277 IEM_MC_ADVANCE_RIP();
6278 IEM_MC_END();
6279 }
6280 else
6281 {
6282 /* memory target */
6283 IEM_MC_BEGIN(0, 1);
6284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6288 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 } IEM_MC_ELSE() {
6290 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6291 } IEM_MC_ENDIF();
6292 IEM_MC_ADVANCE_RIP();
6293 IEM_MC_END();
6294 }
6295 return VINF_SUCCESS;
6296}
6297
6298
6299/** Opcode 0x0f 0x92. */
6300FNIEMOP_DEF(iemOp_setc_Eb)
6301{
6302 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6303 IEMOP_HLP_MIN_386();
6304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6305
6306 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6307 * any way. AMD says it's "unused", whatever that means. We're
6308 * ignoring for now. */
6309 if (IEM_IS_MODRM_REG_MODE(bRm))
6310 {
6311 /* register target */
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6313 IEM_MC_BEGIN(0, 0);
6314 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6315 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6316 } IEM_MC_ELSE() {
6317 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6318 } IEM_MC_ENDIF();
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 }
6322 else
6323 {
6324 /* memory target */
6325 IEM_MC_BEGIN(0, 1);
6326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6330 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6331 } IEM_MC_ELSE() {
6332 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6333 } IEM_MC_ENDIF();
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 }
6337 return VINF_SUCCESS;
6338}
6339
6340
6341/** Opcode 0x0f 0x93. */
6342FNIEMOP_DEF(iemOp_setnc_Eb)
6343{
6344 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6345 IEMOP_HLP_MIN_386();
6346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6347
6348 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6349 * any way. AMD says it's "unused", whatever that means. We're
6350 * ignoring for now. */
6351 if (IEM_IS_MODRM_REG_MODE(bRm))
6352 {
6353 /* register target */
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6355 IEM_MC_BEGIN(0, 0);
6356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6357 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6358 } IEM_MC_ELSE() {
6359 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6360 } IEM_MC_ENDIF();
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 }
6364 else
6365 {
6366 /* memory target */
6367 IEM_MC_BEGIN(0, 1);
6368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6371 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6372 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6373 } IEM_MC_ELSE() {
6374 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6375 } IEM_MC_ENDIF();
6376 IEM_MC_ADVANCE_RIP();
6377 IEM_MC_END();
6378 }
6379 return VINF_SUCCESS;
6380}
6381
6382
6383/** Opcode 0x0f 0x94. */
6384FNIEMOP_DEF(iemOp_sete_Eb)
6385{
6386 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6387 IEMOP_HLP_MIN_386();
6388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6389
6390 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6391 * any way. AMD says it's "unused", whatever that means. We're
6392 * ignoring for now. */
6393 if (IEM_IS_MODRM_REG_MODE(bRm))
6394 {
6395 /* register target */
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6397 IEM_MC_BEGIN(0, 0);
6398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6399 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6400 } IEM_MC_ELSE() {
6401 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6402 } IEM_MC_ENDIF();
6403 IEM_MC_ADVANCE_RIP();
6404 IEM_MC_END();
6405 }
6406 else
6407 {
6408 /* memory target */
6409 IEM_MC_BEGIN(0, 1);
6410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6414 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6415 } IEM_MC_ELSE() {
6416 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6417 } IEM_MC_ENDIF();
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 }
6421 return VINF_SUCCESS;
6422}
6423
6424
6425/** Opcode 0x0f 0x95. */
6426FNIEMOP_DEF(iemOp_setne_Eb)
6427{
6428 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6429 IEMOP_HLP_MIN_386();
6430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6431
6432 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6433 * any way. AMD says it's "unused", whatever that means. We're
6434 * ignoring for now. */
6435 if (IEM_IS_MODRM_REG_MODE(bRm))
6436 {
6437 /* register target */
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_BEGIN(0, 0);
6440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6441 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6442 } IEM_MC_ELSE() {
6443 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6444 } IEM_MC_ENDIF();
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 }
6448 else
6449 {
6450 /* memory target */
6451 IEM_MC_BEGIN(0, 1);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6456 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6457 } IEM_MC_ELSE() {
6458 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6459 } IEM_MC_ENDIF();
6460 IEM_MC_ADVANCE_RIP();
6461 IEM_MC_END();
6462 }
6463 return VINF_SUCCESS;
6464}
6465
6466
6467/** Opcode 0x0f 0x96. */
6468FNIEMOP_DEF(iemOp_setbe_Eb)
6469{
6470 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6471 IEMOP_HLP_MIN_386();
6472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6473
6474 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6475 * any way. AMD says it's "unused", whatever that means. We're
6476 * ignoring for now. */
6477 if (IEM_IS_MODRM_REG_MODE(bRm))
6478 {
6479 /* register target */
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 IEM_MC_BEGIN(0, 0);
6482 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6483 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6484 } IEM_MC_ELSE() {
6485 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6486 } IEM_MC_ENDIF();
6487 IEM_MC_ADVANCE_RIP();
6488 IEM_MC_END();
6489 }
6490 else
6491 {
6492 /* memory target */
6493 IEM_MC_BEGIN(0, 1);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6498 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6499 } IEM_MC_ELSE() {
6500 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6501 } IEM_MC_ENDIF();
6502 IEM_MC_ADVANCE_RIP();
6503 IEM_MC_END();
6504 }
6505 return VINF_SUCCESS;
6506}
6507
6508
6509/** Opcode 0x0f 0x97. */
6510FNIEMOP_DEF(iemOp_setnbe_Eb)
6511{
6512 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6513 IEMOP_HLP_MIN_386();
6514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6515
6516 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6517 * any way. AMD says it's "unused", whatever that means. We're
6518 * ignoring for now. */
6519 if (IEM_IS_MODRM_REG_MODE(bRm))
6520 {
6521 /* register target */
6522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6523 IEM_MC_BEGIN(0, 0);
6524 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6525 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6526 } IEM_MC_ELSE() {
6527 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6528 } IEM_MC_ENDIF();
6529 IEM_MC_ADVANCE_RIP();
6530 IEM_MC_END();
6531 }
6532 else
6533 {
6534 /* memory target */
6535 IEM_MC_BEGIN(0, 1);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6540 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6541 } IEM_MC_ELSE() {
6542 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6543 } IEM_MC_ENDIF();
6544 IEM_MC_ADVANCE_RIP();
6545 IEM_MC_END();
6546 }
6547 return VINF_SUCCESS;
6548}
6549
6550
6551/** Opcode 0x0f 0x98. */
6552FNIEMOP_DEF(iemOp_sets_Eb)
6553{
6554 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6555 IEMOP_HLP_MIN_386();
6556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6557
6558 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6559 * any way. AMD says it's "unused", whatever that means. We're
6560 * ignoring for now. */
6561 if (IEM_IS_MODRM_REG_MODE(bRm))
6562 {
6563 /* register target */
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 IEM_MC_BEGIN(0, 0);
6566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6567 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6568 } IEM_MC_ELSE() {
6569 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6570 } IEM_MC_ENDIF();
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 }
6574 else
6575 {
6576 /* memory target */
6577 IEM_MC_BEGIN(0, 1);
6578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6582 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6583 } IEM_MC_ELSE() {
6584 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6585 } IEM_MC_ENDIF();
6586 IEM_MC_ADVANCE_RIP();
6587 IEM_MC_END();
6588 }
6589 return VINF_SUCCESS;
6590}
6591
6592
6593/** Opcode 0x0f 0x99. */
6594FNIEMOP_DEF(iemOp_setns_Eb)
6595{
6596 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6597 IEMOP_HLP_MIN_386();
6598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6599
6600 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6601 * any way. AMD says it's "unused", whatever that means. We're
6602 * ignoring for now. */
6603 if (IEM_IS_MODRM_REG_MODE(bRm))
6604 {
6605 /* register target */
6606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6607 IEM_MC_BEGIN(0, 0);
6608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6609 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6610 } IEM_MC_ELSE() {
6611 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6612 } IEM_MC_ENDIF();
6613 IEM_MC_ADVANCE_RIP();
6614 IEM_MC_END();
6615 }
6616 else
6617 {
6618 /* memory target */
6619 IEM_MC_BEGIN(0, 1);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6624 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6625 } IEM_MC_ELSE() {
6626 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6627 } IEM_MC_ENDIF();
6628 IEM_MC_ADVANCE_RIP();
6629 IEM_MC_END();
6630 }
6631 return VINF_SUCCESS;
6632}
6633
6634
6635/** Opcode 0x0f 0x9a. */
6636FNIEMOP_DEF(iemOp_setp_Eb)
6637{
6638 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6639 IEMOP_HLP_MIN_386();
6640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6641
6642 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6643 * any way. AMD says it's "unused", whatever that means. We're
6644 * ignoring for now. */
6645 if (IEM_IS_MODRM_REG_MODE(bRm))
6646 {
6647 /* register target */
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 IEM_MC_BEGIN(0, 0);
6650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6651 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6652 } IEM_MC_ELSE() {
6653 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6654 } IEM_MC_ENDIF();
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 }
6658 else
6659 {
6660 /* memory target */
6661 IEM_MC_BEGIN(0, 1);
6662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6666 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6667 } IEM_MC_ELSE() {
6668 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6669 } IEM_MC_ENDIF();
6670 IEM_MC_ADVANCE_RIP();
6671 IEM_MC_END();
6672 }
6673 return VINF_SUCCESS;
6674}
6675
6676
6677/** Opcode 0x0f 0x9b. */
6678FNIEMOP_DEF(iemOp_setnp_Eb)
6679{
6680 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6681 IEMOP_HLP_MIN_386();
6682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6683
6684 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6685 * any way. AMD says it's "unused", whatever that means. We're
6686 * ignoring for now. */
6687 if (IEM_IS_MODRM_REG_MODE(bRm))
6688 {
6689 /* register target */
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 IEM_MC_BEGIN(0, 0);
6692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6693 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6694 } IEM_MC_ELSE() {
6695 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6696 } IEM_MC_ENDIF();
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 }
6700 else
6701 {
6702 /* memory target */
6703 IEM_MC_BEGIN(0, 1);
6704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6708 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6709 } IEM_MC_ELSE() {
6710 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6711 } IEM_MC_ENDIF();
6712 IEM_MC_ADVANCE_RIP();
6713 IEM_MC_END();
6714 }
6715 return VINF_SUCCESS;
6716}
6717
6718
6719/** Opcode 0x0f 0x9c. */
6720FNIEMOP_DEF(iemOp_setl_Eb)
6721{
6722 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6723 IEMOP_HLP_MIN_386();
6724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6725
6726 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6727 * any way. AMD says it's "unused", whatever that means. We're
6728 * ignoring for now. */
6729 if (IEM_IS_MODRM_REG_MODE(bRm))
6730 {
6731 /* register target */
6732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6733 IEM_MC_BEGIN(0, 0);
6734 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6735 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6736 } IEM_MC_ELSE() {
6737 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6738 } IEM_MC_ENDIF();
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 }
6742 else
6743 {
6744 /* memory target */
6745 IEM_MC_BEGIN(0, 1);
6746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6749 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6751 } IEM_MC_ELSE() {
6752 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6753 } IEM_MC_ENDIF();
6754 IEM_MC_ADVANCE_RIP();
6755 IEM_MC_END();
6756 }
6757 return VINF_SUCCESS;
6758}
6759
6760
6761/** Opcode 0x0f 0x9d. */
6762FNIEMOP_DEF(iemOp_setnl_Eb)
6763{
6764 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6765 IEMOP_HLP_MIN_386();
6766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6767
6768 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6769 * any way. AMD says it's "unused", whatever that means. We're
6770 * ignoring for now. */
6771 if (IEM_IS_MODRM_REG_MODE(bRm))
6772 {
6773 /* register target */
6774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6775 IEM_MC_BEGIN(0, 0);
6776 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6777 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6778 } IEM_MC_ELSE() {
6779 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6780 } IEM_MC_ENDIF();
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 }
6784 else
6785 {
6786 /* memory target */
6787 IEM_MC_BEGIN(0, 1);
6788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6791 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6792 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6793 } IEM_MC_ELSE() {
6794 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6795 } IEM_MC_ENDIF();
6796 IEM_MC_ADVANCE_RIP();
6797 IEM_MC_END();
6798 }
6799 return VINF_SUCCESS;
6800}
6801
6802
6803/** Opcode 0x0f 0x9e. */
6804FNIEMOP_DEF(iemOp_setle_Eb)
6805{
6806 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6807 IEMOP_HLP_MIN_386();
6808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6809
6810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6811 * any way. AMD says it's "unused", whatever that means. We're
6812 * ignoring for now. */
6813 if (IEM_IS_MODRM_REG_MODE(bRm))
6814 {
6815 /* register target */
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 IEM_MC_BEGIN(0, 0);
6818 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6819 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6820 } IEM_MC_ELSE() {
6821 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6822 } IEM_MC_ENDIF();
6823 IEM_MC_ADVANCE_RIP();
6824 IEM_MC_END();
6825 }
6826 else
6827 {
6828 /* memory target */
6829 IEM_MC_BEGIN(0, 1);
6830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6833 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6834 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6835 } IEM_MC_ELSE() {
6836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6837 } IEM_MC_ENDIF();
6838 IEM_MC_ADVANCE_RIP();
6839 IEM_MC_END();
6840 }
6841 return VINF_SUCCESS;
6842}
6843
6844
6845/** Opcode 0x0f 0x9f. */
6846FNIEMOP_DEF(iemOp_setnle_Eb)
6847{
6848 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6849 IEMOP_HLP_MIN_386();
6850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6851
6852 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6853 * any way. AMD says it's "unused", whatever that means. We're
6854 * ignoring for now. */
6855 if (IEM_IS_MODRM_REG_MODE(bRm))
6856 {
6857 /* register target */
6858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6859 IEM_MC_BEGIN(0, 0);
6860 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6861 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6862 } IEM_MC_ELSE() {
6863 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6864 } IEM_MC_ENDIF();
6865 IEM_MC_ADVANCE_RIP();
6866 IEM_MC_END();
6867 }
6868 else
6869 {
6870 /* memory target */
6871 IEM_MC_BEGIN(0, 1);
6872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6875 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6876 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6877 } IEM_MC_ELSE() {
6878 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6879 } IEM_MC_ENDIF();
6880 IEM_MC_ADVANCE_RIP();
6881 IEM_MC_END();
6882 }
6883 return VINF_SUCCESS;
6884}
6885
6886
6887/**
6888 * Common 'push segment-register' helper.
6889 */
6890FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6891{
6892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6893 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6895
6896 switch (pVCpu->iem.s.enmEffOpSize)
6897 {
6898 case IEMMODE_16BIT:
6899 IEM_MC_BEGIN(0, 1);
6900 IEM_MC_LOCAL(uint16_t, u16Value);
6901 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6902 IEM_MC_PUSH_U16(u16Value);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 break;
6906
6907 case IEMMODE_32BIT:
6908 IEM_MC_BEGIN(0, 1);
6909 IEM_MC_LOCAL(uint32_t, u32Value);
6910 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6911 IEM_MC_PUSH_U32_SREG(u32Value);
6912 IEM_MC_ADVANCE_RIP();
6913 IEM_MC_END();
6914 break;
6915
6916 case IEMMODE_64BIT:
6917 IEM_MC_BEGIN(0, 1);
6918 IEM_MC_LOCAL(uint64_t, u64Value);
6919 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6920 IEM_MC_PUSH_U64(u64Value);
6921 IEM_MC_ADVANCE_RIP();
6922 IEM_MC_END();
6923 break;
6924 }
6925
6926 return VINF_SUCCESS;
6927}
6928
6929
6930/** Opcode 0x0f 0xa0. */
6931FNIEMOP_DEF(iemOp_push_fs)
6932{
6933 IEMOP_MNEMONIC(push_fs, "push fs");
6934 IEMOP_HLP_MIN_386();
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6937}
6938
6939
6940/** Opcode 0x0f 0xa1. */
6941FNIEMOP_DEF(iemOp_pop_fs)
6942{
6943 IEMOP_MNEMONIC(pop_fs, "pop fs");
6944 IEMOP_HLP_MIN_386();
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6947}
6948
6949
6950/** Opcode 0x0f 0xa2. */
6951FNIEMOP_DEF(iemOp_cpuid)
6952{
6953 IEMOP_MNEMONIC(cpuid, "cpuid");
6954 IEMOP_HLP_MIN_486(); /* not all 486es. */
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6957}
6958
6959
6960/**
6961 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6962 * iemOp_bts_Ev_Gv.
6963 */
6964FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6965{
6966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6967 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6968
6969 if (IEM_IS_MODRM_REG_MODE(bRm))
6970 {
6971 /* register destination. */
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 switch (pVCpu->iem.s.enmEffOpSize)
6974 {
6975 case IEMMODE_16BIT:
6976 IEM_MC_BEGIN(3, 0);
6977 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6978 IEM_MC_ARG(uint16_t, u16Src, 1);
6979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6980
6981 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6982 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6983 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6984 IEM_MC_REF_EFLAGS(pEFlags);
6985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6986
6987 IEM_MC_ADVANCE_RIP();
6988 IEM_MC_END();
6989 return VINF_SUCCESS;
6990
6991 case IEMMODE_32BIT:
6992 IEM_MC_BEGIN(3, 0);
6993 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6994 IEM_MC_ARG(uint32_t, u32Src, 1);
6995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6996
6997 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6998 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6999 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7000 IEM_MC_REF_EFLAGS(pEFlags);
7001 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7002
7003 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7004 IEM_MC_ADVANCE_RIP();
7005 IEM_MC_END();
7006 return VINF_SUCCESS;
7007
7008 case IEMMODE_64BIT:
7009 IEM_MC_BEGIN(3, 0);
7010 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7011 IEM_MC_ARG(uint64_t, u64Src, 1);
7012 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7013
7014 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7015 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7016 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7017 IEM_MC_REF_EFLAGS(pEFlags);
7018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7019
7020 IEM_MC_ADVANCE_RIP();
7021 IEM_MC_END();
7022 return VINF_SUCCESS;
7023
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 }
7027 else
7028 {
7029 /* memory destination. */
7030
7031 uint32_t fAccess;
7032 if (pImpl->pfnLockedU16)
7033 fAccess = IEM_ACCESS_DATA_RW;
7034 else /* BT */
7035 fAccess = IEM_ACCESS_DATA_R;
7036
7037 /** @todo test negative bit offsets! */
7038 switch (pVCpu->iem.s.enmEffOpSize)
7039 {
7040 case IEMMODE_16BIT:
7041 IEM_MC_BEGIN(3, 2);
7042 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7043 IEM_MC_ARG(uint16_t, u16Src, 1);
7044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7046 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7047
7048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7049 if (pImpl->pfnLockedU16)
7050 IEMOP_HLP_DONE_DECODING();
7051 else
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7054 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7055 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7056 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7057 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7058 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7059 IEM_MC_FETCH_EFLAGS(EFlags);
7060
7061 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7062 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7064 else
7065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7066 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7067
7068 IEM_MC_COMMIT_EFLAGS(EFlags);
7069 IEM_MC_ADVANCE_RIP();
7070 IEM_MC_END();
7071 return VINF_SUCCESS;
7072
7073 case IEMMODE_32BIT:
7074 IEM_MC_BEGIN(3, 2);
7075 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7076 IEM_MC_ARG(uint32_t, u32Src, 1);
7077 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7079 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7080
7081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7082 if (pImpl->pfnLockedU16)
7083 IEMOP_HLP_DONE_DECODING();
7084 else
7085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7086 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7087 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7088 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7089 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7090 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7091 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7092 IEM_MC_FETCH_EFLAGS(EFlags);
7093
7094 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7095 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7097 else
7098 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7099 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7100
7101 IEM_MC_COMMIT_EFLAGS(EFlags);
7102 IEM_MC_ADVANCE_RIP();
7103 IEM_MC_END();
7104 return VINF_SUCCESS;
7105
7106 case IEMMODE_64BIT:
7107 IEM_MC_BEGIN(3, 2);
7108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7109 IEM_MC_ARG(uint64_t, u64Src, 1);
7110 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7112 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7113
7114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7115 if (pImpl->pfnLockedU16)
7116 IEMOP_HLP_DONE_DECODING();
7117 else
7118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7119 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7120 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7121 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7122 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7123 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7124 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7125 IEM_MC_FETCH_EFLAGS(EFlags);
7126
7127 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7128 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7130 else
7131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7132 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7133
7134 IEM_MC_COMMIT_EFLAGS(EFlags);
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 return VINF_SUCCESS;
7138
7139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7140 }
7141 }
7142}
7143
7144
7145/** Opcode 0x0f 0xa3. */
7146FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7147{
7148 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7149 IEMOP_HLP_MIN_386();
7150 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7151}
7152
7153
7154/**
7155 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7156 */
7157FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7158{
7159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7161
7162 if (IEM_IS_MODRM_REG_MODE(bRm))
7163 {
7164 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7166
7167 switch (pVCpu->iem.s.enmEffOpSize)
7168 {
7169 case IEMMODE_16BIT:
7170 IEM_MC_BEGIN(4, 0);
7171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7172 IEM_MC_ARG(uint16_t, u16Src, 1);
7173 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7175
7176 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7177 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7180
7181 IEM_MC_ADVANCE_RIP();
7182 IEM_MC_END();
7183 return VINF_SUCCESS;
7184
7185 case IEMMODE_32BIT:
7186 IEM_MC_BEGIN(4, 0);
7187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7188 IEM_MC_ARG(uint32_t, u32Src, 1);
7189 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7190 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7191
7192 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7194 IEM_MC_REF_EFLAGS(pEFlags);
7195 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7196
7197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7198 IEM_MC_ADVANCE_RIP();
7199 IEM_MC_END();
7200 return VINF_SUCCESS;
7201
7202 case IEMMODE_64BIT:
7203 IEM_MC_BEGIN(4, 0);
7204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7205 IEM_MC_ARG(uint64_t, u64Src, 1);
7206 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7207 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7208
7209 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7210 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7211 IEM_MC_REF_EFLAGS(pEFlags);
7212 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7213
7214 IEM_MC_ADVANCE_RIP();
7215 IEM_MC_END();
7216 return VINF_SUCCESS;
7217
7218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7219 }
7220 }
7221 else
7222 {
7223 switch (pVCpu->iem.s.enmEffOpSize)
7224 {
7225 case IEMMODE_16BIT:
7226 IEM_MC_BEGIN(4, 2);
7227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7228 IEM_MC_ARG(uint16_t, u16Src, 1);
7229 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7232
7233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7234 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7235 IEM_MC_ASSIGN(cShiftArg, cShift);
7236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7237 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7238 IEM_MC_FETCH_EFLAGS(EFlags);
7239 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7240 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7241
7242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7243 IEM_MC_COMMIT_EFLAGS(EFlags);
7244 IEM_MC_ADVANCE_RIP();
7245 IEM_MC_END();
7246 return VINF_SUCCESS;
7247
7248 case IEMMODE_32BIT:
7249 IEM_MC_BEGIN(4, 2);
7250 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7251 IEM_MC_ARG(uint32_t, u32Src, 1);
7252 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7255
7256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7257 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7258 IEM_MC_ASSIGN(cShiftArg, cShift);
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7261 IEM_MC_FETCH_EFLAGS(EFlags);
7262 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7264
7265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7266 IEM_MC_COMMIT_EFLAGS(EFlags);
7267 IEM_MC_ADVANCE_RIP();
7268 IEM_MC_END();
7269 return VINF_SUCCESS;
7270
7271 case IEMMODE_64BIT:
7272 IEM_MC_BEGIN(4, 2);
7273 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7274 IEM_MC_ARG(uint64_t, u64Src, 1);
7275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7278
7279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7280 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7281 IEM_MC_ASSIGN(cShiftArg, cShift);
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7284 IEM_MC_FETCH_EFLAGS(EFlags);
7285 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7286 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7287
7288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7289 IEM_MC_COMMIT_EFLAGS(EFlags);
7290 IEM_MC_ADVANCE_RIP();
7291 IEM_MC_END();
7292 return VINF_SUCCESS;
7293
7294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7295 }
7296 }
7297}
7298
7299
7300/**
7301 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7302 */
7303FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7304{
7305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7307
7308 if (IEM_IS_MODRM_REG_MODE(bRm))
7309 {
7310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7311
7312 switch (pVCpu->iem.s.enmEffOpSize)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_BEGIN(4, 0);
7316 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7317 IEM_MC_ARG(uint16_t, u16Src, 1);
7318 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7319 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7320
7321 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7322 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7323 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7324 IEM_MC_REF_EFLAGS(pEFlags);
7325 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7326
7327 IEM_MC_ADVANCE_RIP();
7328 IEM_MC_END();
7329 return VINF_SUCCESS;
7330
7331 case IEMMODE_32BIT:
7332 IEM_MC_BEGIN(4, 0);
7333 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7334 IEM_MC_ARG(uint32_t, u32Src, 1);
7335 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7336 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7337
7338 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7339 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7340 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7341 IEM_MC_REF_EFLAGS(pEFlags);
7342 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7343
7344 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7345 IEM_MC_ADVANCE_RIP();
7346 IEM_MC_END();
7347 return VINF_SUCCESS;
7348
7349 case IEMMODE_64BIT:
7350 IEM_MC_BEGIN(4, 0);
7351 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7352 IEM_MC_ARG(uint64_t, u64Src, 1);
7353 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7354 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7355
7356 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7357 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7358 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7359 IEM_MC_REF_EFLAGS(pEFlags);
7360 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7361
7362 IEM_MC_ADVANCE_RIP();
7363 IEM_MC_END();
7364 return VINF_SUCCESS;
7365
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 }
7369 else
7370 {
7371 switch (pVCpu->iem.s.enmEffOpSize)
7372 {
7373 case IEMMODE_16BIT:
7374 IEM_MC_BEGIN(4, 2);
7375 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7376 IEM_MC_ARG(uint16_t, u16Src, 1);
7377 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7380
7381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7383 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7384 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7385 IEM_MC_FETCH_EFLAGS(EFlags);
7386 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7387 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7388
7389 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7390 IEM_MC_COMMIT_EFLAGS(EFlags);
7391 IEM_MC_ADVANCE_RIP();
7392 IEM_MC_END();
7393 return VINF_SUCCESS;
7394
7395 case IEMMODE_32BIT:
7396 IEM_MC_BEGIN(4, 2);
7397 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7398 IEM_MC_ARG(uint32_t, u32Src, 1);
7399 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7400 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7402
7403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7405 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7406 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7407 IEM_MC_FETCH_EFLAGS(EFlags);
7408 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7409 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7410
7411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7412 IEM_MC_COMMIT_EFLAGS(EFlags);
7413 IEM_MC_ADVANCE_RIP();
7414 IEM_MC_END();
7415 return VINF_SUCCESS;
7416
7417 case IEMMODE_64BIT:
7418 IEM_MC_BEGIN(4, 2);
7419 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7420 IEM_MC_ARG(uint64_t, u64Src, 1);
7421 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7424
7425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7427 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7428 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7429 IEM_MC_FETCH_EFLAGS(EFlags);
7430 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7431 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7432
7433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7434 IEM_MC_COMMIT_EFLAGS(EFlags);
7435 IEM_MC_ADVANCE_RIP();
7436 IEM_MC_END();
7437 return VINF_SUCCESS;
7438
7439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7440 }
7441 }
7442}
7443
7444
7445
7446/** Opcode 0x0f 0xa4. */
7447FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7448{
7449 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7450 IEMOP_HLP_MIN_386();
7451 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7452}
7453
7454
7455/** Opcode 0x0f 0xa5. */
7456FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7457{
7458 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7459 IEMOP_HLP_MIN_386();
7460 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7461}
7462
7463
7464/** Opcode 0x0f 0xa8. */
7465FNIEMOP_DEF(iemOp_push_gs)
7466{
7467 IEMOP_MNEMONIC(push_gs, "push gs");
7468 IEMOP_HLP_MIN_386();
7469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7470 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7471}
7472
7473
7474/** Opcode 0x0f 0xa9. */
7475FNIEMOP_DEF(iemOp_pop_gs)
7476{
7477 IEMOP_MNEMONIC(pop_gs, "pop gs");
7478 IEMOP_HLP_MIN_386();
7479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7480 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7481}
7482
7483
7484/** Opcode 0x0f 0xaa. */
7485FNIEMOP_DEF(iemOp_rsm)
7486{
7487 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7488 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7490 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7491}
7492
7493
7494
7495/** Opcode 0x0f 0xab. */
7496FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7497{
7498 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7499 IEMOP_HLP_MIN_386();
7500 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7501}
7502
7503
7504/** Opcode 0x0f 0xac. */
7505FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7506{
7507 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7508 IEMOP_HLP_MIN_386();
7509 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7510}
7511
7512
7513/** Opcode 0x0f 0xad. */
7514FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7515{
7516 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7517 IEMOP_HLP_MIN_386();
7518 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7519}
7520
7521
7522/** Opcode 0x0f 0xae mem/0. */
7523FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7524{
7525 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7526 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7527 return IEMOP_RAISE_INVALID_OPCODE();
7528
7529 IEM_MC_BEGIN(3, 1);
7530 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7531 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7532 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7536 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7537 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540}
7541
7542
7543/** Opcode 0x0f 0xae mem/1. */
7544FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7545{
7546 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7547 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7548 return IEMOP_RAISE_INVALID_OPCODE();
7549
7550 IEM_MC_BEGIN(3, 1);
7551 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7552 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7553 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7556 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7557 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7558 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7559 IEM_MC_END();
7560 return VINF_SUCCESS;
7561}
7562
7563
7564/**
7565 * @opmaps grp15
7566 * @opcode !11/2
7567 * @oppfx none
7568 * @opcpuid sse
7569 * @opgroup og_sse_mxcsrsm
7570 * @opxcpttype 5
7571 * @optest op1=0 -> mxcsr=0
7572 * @optest op1=0x2083 -> mxcsr=0x2083
7573 * @optest op1=0xfffffffe -> value.xcpt=0xd
7574 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7575 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7576 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7577 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7578 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7579 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7580 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7581 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7582 */
7583FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7584{
7585 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7587 return IEMOP_RAISE_INVALID_OPCODE();
7588
7589 IEM_MC_BEGIN(2, 0);
7590 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7591 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7594 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7595 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7596 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7597 IEM_MC_END();
7598 return VINF_SUCCESS;
7599}
7600
7601
7602/**
7603 * @opmaps grp15
7604 * @opcode !11/3
7605 * @oppfx none
7606 * @opcpuid sse
7607 * @opgroup og_sse_mxcsrsm
7608 * @opxcpttype 5
7609 * @optest mxcsr=0 -> op1=0
7610 * @optest mxcsr=0x2083 -> op1=0x2083
7611 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7612 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7613 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7614 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7615 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7616 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7617 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7618 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7619 */
7620FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7621{
7622 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7623 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7624 return IEMOP_RAISE_INVALID_OPCODE();
7625
7626 IEM_MC_BEGIN(2, 0);
7627 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7628 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7631 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7632 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7633 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7634 IEM_MC_END();
7635 return VINF_SUCCESS;
7636}
7637
7638
7639/**
7640 * @opmaps grp15
7641 * @opcode !11/4
7642 * @oppfx none
7643 * @opcpuid xsave
7644 * @opgroup og_system
7645 * @opxcpttype none
7646 */
7647FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7648{
7649 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7650 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7651 return IEMOP_RAISE_INVALID_OPCODE();
7652
7653 IEM_MC_BEGIN(3, 0);
7654 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7655 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7656 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7659 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7661 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7662 IEM_MC_END();
7663 return VINF_SUCCESS;
7664}
7665
7666
7667/**
7668 * @opmaps grp15
7669 * @opcode !11/5
7670 * @oppfx none
7671 * @opcpuid xsave
7672 * @opgroup og_system
7673 * @opxcpttype none
7674 */
7675FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7676{
7677 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7678 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7679 return IEMOP_RAISE_INVALID_OPCODE();
7680
7681 IEM_MC_BEGIN(3, 0);
7682 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7683 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7684 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7688 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7689 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7690 IEM_MC_END();
7691 return VINF_SUCCESS;
7692}
7693
7694/** Opcode 0x0f 0xae mem/6. */
7695FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7696
7697/**
7698 * @opmaps grp15
7699 * @opcode !11/7
7700 * @oppfx none
7701 * @opcpuid clfsh
7702 * @opgroup og_cachectl
7703 * @optest op1=1 ->
7704 */
7705FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7706{
7707 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7708 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7709 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7710
7711 IEM_MC_BEGIN(2, 0);
7712 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7713 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7717 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7718 IEM_MC_END();
7719 return VINF_SUCCESS;
7720}
7721
7722/**
7723 * @opmaps grp15
7724 * @opcode !11/7
7725 * @oppfx 0x66
7726 * @opcpuid clflushopt
7727 * @opgroup og_cachectl
7728 * @optest op1=1 ->
7729 */
7730FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7731{
7732 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7733 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7734 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7735
7736 IEM_MC_BEGIN(2, 0);
7737 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7742 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7743 IEM_MC_END();
7744 return VINF_SUCCESS;
7745}
7746
7747
7748/** Opcode 0x0f 0xae 11b/5. */
7749FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7750{
7751 RT_NOREF_PV(bRm);
7752 IEMOP_MNEMONIC(lfence, "lfence");
7753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7754 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7755 return IEMOP_RAISE_INVALID_OPCODE();
7756
7757 IEM_MC_BEGIN(0, 0);
7758#ifndef RT_ARCH_ARM64
7759 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7760#endif
7761 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7762#ifndef RT_ARCH_ARM64
7763 else
7764 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7765#endif
7766 IEM_MC_ADVANCE_RIP();
7767 IEM_MC_END();
7768 return VINF_SUCCESS;
7769}
7770
7771
7772/** Opcode 0x0f 0xae 11b/6. */
7773FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7774{
7775 RT_NOREF_PV(bRm);
7776 IEMOP_MNEMONIC(mfence, "mfence");
7777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7778 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7779 return IEMOP_RAISE_INVALID_OPCODE();
7780
7781 IEM_MC_BEGIN(0, 0);
7782#ifndef RT_ARCH_ARM64
7783 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7784#endif
7785 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7786#ifndef RT_ARCH_ARM64
7787 else
7788 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7789#endif
7790 IEM_MC_ADVANCE_RIP();
7791 IEM_MC_END();
7792 return VINF_SUCCESS;
7793}
7794
7795
7796/** Opcode 0x0f 0xae 11b/7. */
7797FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7798{
7799 RT_NOREF_PV(bRm);
7800 IEMOP_MNEMONIC(sfence, "sfence");
7801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7802 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7803 return IEMOP_RAISE_INVALID_OPCODE();
7804
7805 IEM_MC_BEGIN(0, 0);
7806#ifndef RT_ARCH_ARM64
7807 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7808#endif
7809 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7810#ifndef RT_ARCH_ARM64
7811 else
7812 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7813#endif
7814 IEM_MC_ADVANCE_RIP();
7815 IEM_MC_END();
7816 return VINF_SUCCESS;
7817}
7818
7819
7820/** Opcode 0xf3 0x0f 0xae 11b/0. */
7821FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7822{
7823 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7826 {
7827 IEM_MC_BEGIN(1, 0);
7828 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7829 IEM_MC_ARG(uint64_t, u64Dst, 0);
7830 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7831 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7832 IEM_MC_ADVANCE_RIP();
7833 IEM_MC_END();
7834 }
7835 else
7836 {
7837 IEM_MC_BEGIN(1, 0);
7838 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7839 IEM_MC_ARG(uint32_t, u32Dst, 0);
7840 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7841 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7842 IEM_MC_ADVANCE_RIP();
7843 IEM_MC_END();
7844 }
7845 return VINF_SUCCESS;
7846}
7847
7848
7849/** Opcode 0xf3 0x0f 0xae 11b/1. */
7850FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7851{
7852 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7854 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7855 {
7856 IEM_MC_BEGIN(1, 0);
7857 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7858 IEM_MC_ARG(uint64_t, u64Dst, 0);
7859 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7860 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7861 IEM_MC_ADVANCE_RIP();
7862 IEM_MC_END();
7863 }
7864 else
7865 {
7866 IEM_MC_BEGIN(1, 0);
7867 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7868 IEM_MC_ARG(uint32_t, u32Dst, 0);
7869 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7870 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7871 IEM_MC_ADVANCE_RIP();
7872 IEM_MC_END();
7873 }
7874 return VINF_SUCCESS;
7875}
7876
7877
7878/** Opcode 0xf3 0x0f 0xae 11b/2. */
7879FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7880{
7881 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7883 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7884 {
7885 IEM_MC_BEGIN(1, 0);
7886 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7887 IEM_MC_ARG(uint64_t, u64Dst, 0);
7888 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7889 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7890 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7891 IEM_MC_ADVANCE_RIP();
7892 IEM_MC_END();
7893 }
7894 else
7895 {
7896 IEM_MC_BEGIN(1, 0);
7897 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7898 IEM_MC_ARG(uint32_t, u32Dst, 0);
7899 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7900 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7901 IEM_MC_ADVANCE_RIP();
7902 IEM_MC_END();
7903 }
7904 return VINF_SUCCESS;
7905}
7906
7907
7908/** Opcode 0xf3 0x0f 0xae 11b/3. */
7909FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7910{
7911 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7913 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7914 {
7915 IEM_MC_BEGIN(1, 0);
7916 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7917 IEM_MC_ARG(uint64_t, u64Dst, 0);
7918 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7919 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7920 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7921 IEM_MC_ADVANCE_RIP();
7922 IEM_MC_END();
7923 }
7924 else
7925 {
7926 IEM_MC_BEGIN(1, 0);
7927 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7928 IEM_MC_ARG(uint32_t, u32Dst, 0);
7929 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7930 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7931 IEM_MC_ADVANCE_RIP();
7932 IEM_MC_END();
7933 }
7934 return VINF_SUCCESS;
7935}
7936
7937
7938/**
7939 * Group 15 jump table for register variant.
7940 */
7941IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7942{ /* pfx: none, 066h, 0f3h, 0f2h */
7943 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7944 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7945 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7946 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7947 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7948 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7949 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7950 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7951};
7952AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7953
7954
7955/**
7956 * Group 15 jump table for memory variant.
7957 */
7958IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7959{ /* pfx: none, 066h, 0f3h, 0f2h */
7960 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7961 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7962 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7963 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7964 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7965 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7966 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7967 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7968};
7969AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7970
7971
7972/** Opcode 0x0f 0xae. */
7973FNIEMOP_DEF(iemOp_Grp15)
7974{
7975 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7977 if (IEM_IS_MODRM_REG_MODE(bRm))
7978 /* register, register */
7979 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7980 + pVCpu->iem.s.idxPrefix], bRm);
7981 /* memory, register */
7982 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7983 + pVCpu->iem.s.idxPrefix], bRm);
7984}
7985
7986
7987/** Opcode 0x0f 0xaf. */
7988FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7989{
7990 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7991 IEMOP_HLP_MIN_386();
7992 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7993 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
7994}
7995
7996
7997/** Opcode 0x0f 0xb0. */
7998FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7999{
8000 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8001 IEMOP_HLP_MIN_486();
8002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8003
8004 if (IEM_IS_MODRM_REG_MODE(bRm))
8005 {
8006 IEMOP_HLP_DONE_DECODING();
8007 IEM_MC_BEGIN(4, 0);
8008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8009 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8010 IEM_MC_ARG(uint8_t, u8Src, 2);
8011 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8012
8013 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8014 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8015 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8016 IEM_MC_REF_EFLAGS(pEFlags);
8017 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8018 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8019 else
8020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8021
8022 IEM_MC_ADVANCE_RIP();
8023 IEM_MC_END();
8024 }
8025 else
8026 {
8027 IEM_MC_BEGIN(4, 3);
8028 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8029 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8030 IEM_MC_ARG(uint8_t, u8Src, 2);
8031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8033 IEM_MC_LOCAL(uint8_t, u8Al);
8034
8035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8036 IEMOP_HLP_DONE_DECODING();
8037 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8038 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8039 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8040 IEM_MC_FETCH_EFLAGS(EFlags);
8041 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8042 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8043 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8044 else
8045 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8046
8047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8048 IEM_MC_COMMIT_EFLAGS(EFlags);
8049 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8050 IEM_MC_ADVANCE_RIP();
8051 IEM_MC_END();
8052 }
8053 return VINF_SUCCESS;
8054}
8055
8056/** Opcode 0x0f 0xb1. */
8057FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8058{
8059 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8060 IEMOP_HLP_MIN_486();
8061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8062
8063 if (IEM_IS_MODRM_REG_MODE(bRm))
8064 {
8065 IEMOP_HLP_DONE_DECODING();
8066 switch (pVCpu->iem.s.enmEffOpSize)
8067 {
8068 case IEMMODE_16BIT:
8069 IEM_MC_BEGIN(4, 0);
8070 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8071 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8072 IEM_MC_ARG(uint16_t, u16Src, 2);
8073 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8074
8075 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8076 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8077 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8078 IEM_MC_REF_EFLAGS(pEFlags);
8079 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8080 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8081 else
8082 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8083
8084 IEM_MC_ADVANCE_RIP();
8085 IEM_MC_END();
8086 return VINF_SUCCESS;
8087
8088 case IEMMODE_32BIT:
8089 IEM_MC_BEGIN(4, 0);
8090 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8091 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8092 IEM_MC_ARG(uint32_t, u32Src, 2);
8093 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8094
8095 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8096 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8097 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8098 IEM_MC_REF_EFLAGS(pEFlags);
8099 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8100 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8101 else
8102 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8103
8104 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8105 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8106 IEM_MC_ADVANCE_RIP();
8107 IEM_MC_END();
8108 return VINF_SUCCESS;
8109
8110 case IEMMODE_64BIT:
8111 IEM_MC_BEGIN(4, 0);
8112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8113 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8114#ifdef RT_ARCH_X86
8115 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8116#else
8117 IEM_MC_ARG(uint64_t, u64Src, 2);
8118#endif
8119 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8120
8121 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8122 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8123 IEM_MC_REF_EFLAGS(pEFlags);
8124#ifdef RT_ARCH_X86
8125 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8126 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8127 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8128 else
8129 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8130#else
8131 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8132 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8133 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8134 else
8135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8136#endif
8137
8138 IEM_MC_ADVANCE_RIP();
8139 IEM_MC_END();
8140 return VINF_SUCCESS;
8141
8142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8143 }
8144 }
8145 else
8146 {
8147 switch (pVCpu->iem.s.enmEffOpSize)
8148 {
8149 case IEMMODE_16BIT:
8150 IEM_MC_BEGIN(4, 3);
8151 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8152 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8153 IEM_MC_ARG(uint16_t, u16Src, 2);
8154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8156 IEM_MC_LOCAL(uint16_t, u16Ax);
8157
8158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8159 IEMOP_HLP_DONE_DECODING();
8160 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8161 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8162 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8163 IEM_MC_FETCH_EFLAGS(EFlags);
8164 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8167 else
8168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8169
8170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8171 IEM_MC_COMMIT_EFLAGS(EFlags);
8172 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8173 IEM_MC_ADVANCE_RIP();
8174 IEM_MC_END();
8175 return VINF_SUCCESS;
8176
8177 case IEMMODE_32BIT:
8178 IEM_MC_BEGIN(4, 3);
8179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8180 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8181 IEM_MC_ARG(uint32_t, u32Src, 2);
8182 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8184 IEM_MC_LOCAL(uint32_t, u32Eax);
8185
8186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8187 IEMOP_HLP_DONE_DECODING();
8188 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8189 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8190 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8191 IEM_MC_FETCH_EFLAGS(EFlags);
8192 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8193 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8194 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8195 else
8196 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8197
8198 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8199 IEM_MC_COMMIT_EFLAGS(EFlags);
8200 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8201 IEM_MC_ADVANCE_RIP();
8202 IEM_MC_END();
8203 return VINF_SUCCESS;
8204
8205 case IEMMODE_64BIT:
8206 IEM_MC_BEGIN(4, 3);
8207 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8208 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8209#ifdef RT_ARCH_X86
8210 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8211#else
8212 IEM_MC_ARG(uint64_t, u64Src, 2);
8213#endif
8214 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8216 IEM_MC_LOCAL(uint64_t, u64Rax);
8217
8218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8219 IEMOP_HLP_DONE_DECODING();
8220 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8221 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8222 IEM_MC_FETCH_EFLAGS(EFlags);
8223 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8224#ifdef RT_ARCH_X86
8225 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8226 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8227 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8228 else
8229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8230#else
8231 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8234 else
8235 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8236#endif
8237
8238 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8239 IEM_MC_COMMIT_EFLAGS(EFlags);
8240 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8241 IEM_MC_ADVANCE_RIP();
8242 IEM_MC_END();
8243 return VINF_SUCCESS;
8244
8245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8246 }
8247 }
8248}
8249
8250
8251FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8252{
8253 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8254 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8255
8256 switch (pVCpu->iem.s.enmEffOpSize)
8257 {
8258 case IEMMODE_16BIT:
8259 IEM_MC_BEGIN(5, 1);
8260 IEM_MC_ARG(uint16_t, uSel, 0);
8261 IEM_MC_ARG(uint16_t, offSeg, 1);
8262 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8263 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8264 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8265 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8268 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8269 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8270 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8271 IEM_MC_END();
8272 return VINF_SUCCESS;
8273
8274 case IEMMODE_32BIT:
8275 IEM_MC_BEGIN(5, 1);
8276 IEM_MC_ARG(uint16_t, uSel, 0);
8277 IEM_MC_ARG(uint32_t, offSeg, 1);
8278 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8279 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8280 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8281 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8284 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8285 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8286 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8287 IEM_MC_END();
8288 return VINF_SUCCESS;
8289
8290 case IEMMODE_64BIT:
8291 IEM_MC_BEGIN(5, 1);
8292 IEM_MC_ARG(uint16_t, uSel, 0);
8293 IEM_MC_ARG(uint64_t, offSeg, 1);
8294 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8295 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8296 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8297 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8300 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8301 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8302 else
8303 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8304 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8305 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8306 IEM_MC_END();
8307 return VINF_SUCCESS;
8308
8309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8310 }
8311}
8312
8313
8314/** Opcode 0x0f 0xb2. */
8315FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8316{
8317 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8318 IEMOP_HLP_MIN_386();
8319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8320 if (IEM_IS_MODRM_REG_MODE(bRm))
8321 return IEMOP_RAISE_INVALID_OPCODE();
8322 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8323}
8324
8325
8326/** Opcode 0x0f 0xb3. */
8327FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8328{
8329 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8330 IEMOP_HLP_MIN_386();
8331 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8332}
8333
8334
8335/** Opcode 0x0f 0xb4. */
8336FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8337{
8338 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8339 IEMOP_HLP_MIN_386();
8340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8341 if (IEM_IS_MODRM_REG_MODE(bRm))
8342 return IEMOP_RAISE_INVALID_OPCODE();
8343 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8344}
8345
8346
8347/** Opcode 0x0f 0xb5. */
8348FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8349{
8350 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8351 IEMOP_HLP_MIN_386();
8352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8353 if (IEM_IS_MODRM_REG_MODE(bRm))
8354 return IEMOP_RAISE_INVALID_OPCODE();
8355 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8356}
8357
8358
8359/** Opcode 0x0f 0xb6. */
8360FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8361{
8362 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8363 IEMOP_HLP_MIN_386();
8364
8365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8366
8367 /*
8368 * If rm is denoting a register, no more instruction bytes.
8369 */
8370 if (IEM_IS_MODRM_REG_MODE(bRm))
8371 {
8372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8373 switch (pVCpu->iem.s.enmEffOpSize)
8374 {
8375 case IEMMODE_16BIT:
8376 IEM_MC_BEGIN(0, 1);
8377 IEM_MC_LOCAL(uint16_t, u16Value);
8378 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8379 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8380 IEM_MC_ADVANCE_RIP();
8381 IEM_MC_END();
8382 return VINF_SUCCESS;
8383
8384 case IEMMODE_32BIT:
8385 IEM_MC_BEGIN(0, 1);
8386 IEM_MC_LOCAL(uint32_t, u32Value);
8387 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8388 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8389 IEM_MC_ADVANCE_RIP();
8390 IEM_MC_END();
8391 return VINF_SUCCESS;
8392
8393 case IEMMODE_64BIT:
8394 IEM_MC_BEGIN(0, 1);
8395 IEM_MC_LOCAL(uint64_t, u64Value);
8396 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8397 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8398 IEM_MC_ADVANCE_RIP();
8399 IEM_MC_END();
8400 return VINF_SUCCESS;
8401
8402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8403 }
8404 }
8405 else
8406 {
8407 /*
8408 * We're loading a register from memory.
8409 */
8410 switch (pVCpu->iem.s.enmEffOpSize)
8411 {
8412 case IEMMODE_16BIT:
8413 IEM_MC_BEGIN(0, 2);
8414 IEM_MC_LOCAL(uint16_t, u16Value);
8415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8419 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 return VINF_SUCCESS;
8423
8424 case IEMMODE_32BIT:
8425 IEM_MC_BEGIN(0, 2);
8426 IEM_MC_LOCAL(uint32_t, u32Value);
8427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8430 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8431 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8432 IEM_MC_ADVANCE_RIP();
8433 IEM_MC_END();
8434 return VINF_SUCCESS;
8435
8436 case IEMMODE_64BIT:
8437 IEM_MC_BEGIN(0, 2);
8438 IEM_MC_LOCAL(uint64_t, u64Value);
8439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8443 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8444 IEM_MC_ADVANCE_RIP();
8445 IEM_MC_END();
8446 return VINF_SUCCESS;
8447
8448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8449 }
8450 }
8451}
8452
8453
8454/** Opcode 0x0f 0xb7. */
8455FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8456{
8457 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8458 IEMOP_HLP_MIN_386();
8459
8460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8461
8462 /** @todo Not entirely sure how the operand size prefix is handled here,
8463 * assuming that it will be ignored. Would be nice to have a few
8464 * test for this. */
8465 /*
8466 * If rm is denoting a register, no more instruction bytes.
8467 */
8468 if (IEM_IS_MODRM_REG_MODE(bRm))
8469 {
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8472 {
8473 IEM_MC_BEGIN(0, 1);
8474 IEM_MC_LOCAL(uint32_t, u32Value);
8475 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8476 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8477 IEM_MC_ADVANCE_RIP();
8478 IEM_MC_END();
8479 }
8480 else
8481 {
8482 IEM_MC_BEGIN(0, 1);
8483 IEM_MC_LOCAL(uint64_t, u64Value);
8484 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8485 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8486 IEM_MC_ADVANCE_RIP();
8487 IEM_MC_END();
8488 }
8489 }
8490 else
8491 {
8492 /*
8493 * We're loading a register from memory.
8494 */
8495 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8496 {
8497 IEM_MC_BEGIN(0, 2);
8498 IEM_MC_LOCAL(uint32_t, u32Value);
8499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8502 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8503 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8504 IEM_MC_ADVANCE_RIP();
8505 IEM_MC_END();
8506 }
8507 else
8508 {
8509 IEM_MC_BEGIN(0, 2);
8510 IEM_MC_LOCAL(uint64_t, u64Value);
8511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8515 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8516 IEM_MC_ADVANCE_RIP();
8517 IEM_MC_END();
8518 }
8519 }
8520 return VINF_SUCCESS;
8521}
8522
8523
8524/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8525FNIEMOP_UD_STUB(iemOp_jmpe);
8526
8527
8528/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8529FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8530{
8531 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8532 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8533 return iemOp_InvalidNeedRM(pVCpu);
8534#ifndef TST_IEM_CHECK_MC
8535# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8536 static const IEMOPBINSIZES s_Native =
8537 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8538# endif
8539 static const IEMOPBINSIZES s_Fallback =
8540 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8541#endif
8542 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8543}
8544
8545
8546/**
8547 * @opcode 0xb9
8548 * @opinvalid intel-modrm
8549 * @optest ->
8550 */
8551FNIEMOP_DEF(iemOp_Grp10)
8552{
8553 /*
8554 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8555 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8556 */
8557 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8558 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8559 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8560}
8561
8562
8563/** Opcode 0x0f 0xba. */
8564FNIEMOP_DEF(iemOp_Grp8)
8565{
8566 IEMOP_HLP_MIN_386();
8567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8568 PCIEMOPBINSIZES pImpl;
8569 switch (IEM_GET_MODRM_REG_8(bRm))
8570 {
8571 case 0: case 1: case 2: case 3:
8572 /* Both AMD and Intel want full modr/m decoding and imm8. */
8573 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8574 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8575 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8576 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8577 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8579 }
8580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8581
8582 if (IEM_IS_MODRM_REG_MODE(bRm))
8583 {
8584 /* register destination. */
8585 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8587
8588 switch (pVCpu->iem.s.enmEffOpSize)
8589 {
8590 case IEMMODE_16BIT:
8591 IEM_MC_BEGIN(3, 0);
8592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8593 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8595
8596 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8597 IEM_MC_REF_EFLAGS(pEFlags);
8598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8599
8600 IEM_MC_ADVANCE_RIP();
8601 IEM_MC_END();
8602 return VINF_SUCCESS;
8603
8604 case IEMMODE_32BIT:
8605 IEM_MC_BEGIN(3, 0);
8606 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8607 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8608 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8609
8610 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8611 IEM_MC_REF_EFLAGS(pEFlags);
8612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8613
8614 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8615 IEM_MC_ADVANCE_RIP();
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618
8619 case IEMMODE_64BIT:
8620 IEM_MC_BEGIN(3, 0);
8621 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8622 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8623 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8624
8625 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8626 IEM_MC_REF_EFLAGS(pEFlags);
8627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8628
8629 IEM_MC_ADVANCE_RIP();
8630 IEM_MC_END();
8631 return VINF_SUCCESS;
8632
8633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8634 }
8635 }
8636 else
8637 {
8638 /* memory destination. */
8639
8640 uint32_t fAccess;
8641 if (pImpl->pfnLockedU16)
8642 fAccess = IEM_ACCESS_DATA_RW;
8643 else /* BT */
8644 fAccess = IEM_ACCESS_DATA_R;
8645
8646 /** @todo test negative bit offsets! */
8647 switch (pVCpu->iem.s.enmEffOpSize)
8648 {
8649 case IEMMODE_16BIT:
8650 IEM_MC_BEGIN(3, 1);
8651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8652 IEM_MC_ARG(uint16_t, u16Src, 1);
8653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8655
8656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8657 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8658 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8659 if (pImpl->pfnLockedU16)
8660 IEMOP_HLP_DONE_DECODING();
8661 else
8662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8663 IEM_MC_FETCH_EFLAGS(EFlags);
8664 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8665 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8667 else
8668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8670
8671 IEM_MC_COMMIT_EFLAGS(EFlags);
8672 IEM_MC_ADVANCE_RIP();
8673 IEM_MC_END();
8674 return VINF_SUCCESS;
8675
8676 case IEMMODE_32BIT:
8677 IEM_MC_BEGIN(3, 1);
8678 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8679 IEM_MC_ARG(uint32_t, u32Src, 1);
8680 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8682
8683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8684 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8685 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8686 if (pImpl->pfnLockedU16)
8687 IEMOP_HLP_DONE_DECODING();
8688 else
8689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8690 IEM_MC_FETCH_EFLAGS(EFlags);
8691 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8692 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8694 else
8695 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8696 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8697
8698 IEM_MC_COMMIT_EFLAGS(EFlags);
8699 IEM_MC_ADVANCE_RIP();
8700 IEM_MC_END();
8701 return VINF_SUCCESS;
8702
8703 case IEMMODE_64BIT:
8704 IEM_MC_BEGIN(3, 1);
8705 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8706 IEM_MC_ARG(uint64_t, u64Src, 1);
8707 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8709
8710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8711 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8712 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8713 if (pImpl->pfnLockedU16)
8714 IEMOP_HLP_DONE_DECODING();
8715 else
8716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8717 IEM_MC_FETCH_EFLAGS(EFlags);
8718 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8719 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8721 else
8722 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8724
8725 IEM_MC_COMMIT_EFLAGS(EFlags);
8726 IEM_MC_ADVANCE_RIP();
8727 IEM_MC_END();
8728 return VINF_SUCCESS;
8729
8730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8731 }
8732 }
8733}
8734
8735
8736/** Opcode 0x0f 0xbb. */
8737FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8738{
8739 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8740 IEMOP_HLP_MIN_386();
8741 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8742}
8743
8744
8745/**
8746 * Common worker for BSF and BSR instructions.
8747 *
8748 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8749 * the destination register, which means that for 32-bit operations the high
8750 * bits must be left alone.
8751 *
8752 * @param pImpl Pointer to the instruction implementation (assembly).
8753 */
8754FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8755{
8756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8757
8758 /*
8759 * If rm is denoting a register, no more instruction bytes.
8760 */
8761 if (IEM_IS_MODRM_REG_MODE(bRm))
8762 {
8763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8764 switch (pVCpu->iem.s.enmEffOpSize)
8765 {
8766 case IEMMODE_16BIT:
8767 IEM_MC_BEGIN(3, 0);
8768 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8769 IEM_MC_ARG(uint16_t, u16Src, 1);
8770 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8771
8772 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8773 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8774 IEM_MC_REF_EFLAGS(pEFlags);
8775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8776
8777 IEM_MC_ADVANCE_RIP();
8778 IEM_MC_END();
8779 break;
8780
8781 case IEMMODE_32BIT:
8782 IEM_MC_BEGIN(3, 0);
8783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8784 IEM_MC_ARG(uint32_t, u32Src, 1);
8785 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8786
8787 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8788 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8789 IEM_MC_REF_EFLAGS(pEFlags);
8790 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8791 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8792 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8793 IEM_MC_ENDIF();
8794 IEM_MC_ADVANCE_RIP();
8795 IEM_MC_END();
8796 break;
8797
8798 case IEMMODE_64BIT:
8799 IEM_MC_BEGIN(3, 0);
8800 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8801 IEM_MC_ARG(uint64_t, u64Src, 1);
8802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8803
8804 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8805 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8806 IEM_MC_REF_EFLAGS(pEFlags);
8807 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8808
8809 IEM_MC_ADVANCE_RIP();
8810 IEM_MC_END();
8811 break;
8812 }
8813 }
8814 else
8815 {
8816 /*
8817 * We're accessing memory.
8818 */
8819 switch (pVCpu->iem.s.enmEffOpSize)
8820 {
8821 case IEMMODE_16BIT:
8822 IEM_MC_BEGIN(3, 1);
8823 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8824 IEM_MC_ARG(uint16_t, u16Src, 1);
8825 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8827
8828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8830 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8831 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8832 IEM_MC_REF_EFLAGS(pEFlags);
8833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8834
8835 IEM_MC_ADVANCE_RIP();
8836 IEM_MC_END();
8837 break;
8838
8839 case IEMMODE_32BIT:
8840 IEM_MC_BEGIN(3, 1);
8841 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8842 IEM_MC_ARG(uint32_t, u32Src, 1);
8843 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8845
8846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8848 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8849 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8850 IEM_MC_REF_EFLAGS(pEFlags);
8851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8852
8853 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8854 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8855 IEM_MC_ENDIF();
8856 IEM_MC_ADVANCE_RIP();
8857 IEM_MC_END();
8858 break;
8859
8860 case IEMMODE_64BIT:
8861 IEM_MC_BEGIN(3, 1);
8862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8863 IEM_MC_ARG(uint64_t, u64Src, 1);
8864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8866
8867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8871 IEM_MC_REF_EFLAGS(pEFlags);
8872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8873
8874 IEM_MC_ADVANCE_RIP();
8875 IEM_MC_END();
8876 break;
8877 }
8878 }
8879 return VINF_SUCCESS;
8880}
8881
8882
8883/** Opcode 0x0f 0xbc. */
8884FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
8885{
8886 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
8887 IEMOP_HLP_MIN_386();
8888 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8889 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
8890}
8891
8892
8893/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
8894FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
8895{
8896 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8897 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
8898 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8899
8900#ifndef TST_IEM_CHECK_MC
8901 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
8902 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
8903 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
8904 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
8905 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
8906 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
8907 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
8908 {
8909 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
8910 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
8911 };
8912#endif
8913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8914 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8915 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8916}
8917
8918
8919/** Opcode 0x0f 0xbd. */
8920FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
8921{
8922 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8923 IEMOP_HLP_MIN_386();
8924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8925 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
8926}
8927
8928
8929/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8930FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
8931{
8932 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8933 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
8934 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8935
8936#ifndef TST_IEM_CHECK_MC
8937 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
8938 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
8939 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
8940 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
8941 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
8942 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
8943 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
8944 {
8945 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
8946 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
8947 };
8948#endif
8949 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8950 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8951 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8952}
8953
8954
8955
8956/** Opcode 0x0f 0xbe. */
8957FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8958{
8959 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8960 IEMOP_HLP_MIN_386();
8961
8962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8963
8964 /*
8965 * If rm is denoting a register, no more instruction bytes.
8966 */
8967 if (IEM_IS_MODRM_REG_MODE(bRm))
8968 {
8969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8970 switch (pVCpu->iem.s.enmEffOpSize)
8971 {
8972 case IEMMODE_16BIT:
8973 IEM_MC_BEGIN(0, 1);
8974 IEM_MC_LOCAL(uint16_t, u16Value);
8975 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8976 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8977 IEM_MC_ADVANCE_RIP();
8978 IEM_MC_END();
8979 return VINF_SUCCESS;
8980
8981 case IEMMODE_32BIT:
8982 IEM_MC_BEGIN(0, 1);
8983 IEM_MC_LOCAL(uint32_t, u32Value);
8984 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8985 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8986 IEM_MC_ADVANCE_RIP();
8987 IEM_MC_END();
8988 return VINF_SUCCESS;
8989
8990 case IEMMODE_64BIT:
8991 IEM_MC_BEGIN(0, 1);
8992 IEM_MC_LOCAL(uint64_t, u64Value);
8993 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8994 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8995 IEM_MC_ADVANCE_RIP();
8996 IEM_MC_END();
8997 return VINF_SUCCESS;
8998
8999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9000 }
9001 }
9002 else
9003 {
9004 /*
9005 * We're loading a register from memory.
9006 */
9007 switch (pVCpu->iem.s.enmEffOpSize)
9008 {
9009 case IEMMODE_16BIT:
9010 IEM_MC_BEGIN(0, 2);
9011 IEM_MC_LOCAL(uint16_t, u16Value);
9012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9015 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9016 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9017 IEM_MC_ADVANCE_RIP();
9018 IEM_MC_END();
9019 return VINF_SUCCESS;
9020
9021 case IEMMODE_32BIT:
9022 IEM_MC_BEGIN(0, 2);
9023 IEM_MC_LOCAL(uint32_t, u32Value);
9024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9027 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9028 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9029 IEM_MC_ADVANCE_RIP();
9030 IEM_MC_END();
9031 return VINF_SUCCESS;
9032
9033 case IEMMODE_64BIT:
9034 IEM_MC_BEGIN(0, 2);
9035 IEM_MC_LOCAL(uint64_t, u64Value);
9036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9039 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9041 IEM_MC_ADVANCE_RIP();
9042 IEM_MC_END();
9043 return VINF_SUCCESS;
9044
9045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9046 }
9047 }
9048}
9049
9050
9051/** Opcode 0x0f 0xbf. */
9052FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9053{
9054 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9055 IEMOP_HLP_MIN_386();
9056
9057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9058
9059 /** @todo Not entirely sure how the operand size prefix is handled here,
9060 * assuming that it will be ignored. Would be nice to have a few
9061 * test for this. */
9062 /*
9063 * If rm is denoting a register, no more instruction bytes.
9064 */
9065 if (IEM_IS_MODRM_REG_MODE(bRm))
9066 {
9067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9068 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9069 {
9070 IEM_MC_BEGIN(0, 1);
9071 IEM_MC_LOCAL(uint32_t, u32Value);
9072 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9073 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9074 IEM_MC_ADVANCE_RIP();
9075 IEM_MC_END();
9076 }
9077 else
9078 {
9079 IEM_MC_BEGIN(0, 1);
9080 IEM_MC_LOCAL(uint64_t, u64Value);
9081 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9082 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9083 IEM_MC_ADVANCE_RIP();
9084 IEM_MC_END();
9085 }
9086 }
9087 else
9088 {
9089 /*
9090 * We're loading a register from memory.
9091 */
9092 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9093 {
9094 IEM_MC_BEGIN(0, 2);
9095 IEM_MC_LOCAL(uint32_t, u32Value);
9096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9099 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9100 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9101 IEM_MC_ADVANCE_RIP();
9102 IEM_MC_END();
9103 }
9104 else
9105 {
9106 IEM_MC_BEGIN(0, 2);
9107 IEM_MC_LOCAL(uint64_t, u64Value);
9108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9111 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9112 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9113 IEM_MC_ADVANCE_RIP();
9114 IEM_MC_END();
9115 }
9116 }
9117 return VINF_SUCCESS;
9118}
9119
9120
9121/** Opcode 0x0f 0xc0. */
9122FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9123{
9124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9125 IEMOP_HLP_MIN_486();
9126 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9127
9128 /*
9129 * If rm is denoting a register, no more instruction bytes.
9130 */
9131 if (IEM_IS_MODRM_REG_MODE(bRm))
9132 {
9133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9134
9135 IEM_MC_BEGIN(3, 0);
9136 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9137 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9138 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9139
9140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9141 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9142 IEM_MC_REF_EFLAGS(pEFlags);
9143 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9144
9145 IEM_MC_ADVANCE_RIP();
9146 IEM_MC_END();
9147 }
9148 else
9149 {
9150 /*
9151 * We're accessing memory.
9152 */
9153 IEM_MC_BEGIN(3, 3);
9154 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9155 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9156 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9157 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9159
9160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9161 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9162 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9163 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9164 IEM_MC_FETCH_EFLAGS(EFlags);
9165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9167 else
9168 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9169
9170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9171 IEM_MC_COMMIT_EFLAGS(EFlags);
9172 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9173 IEM_MC_ADVANCE_RIP();
9174 IEM_MC_END();
9175 return VINF_SUCCESS;
9176 }
9177 return VINF_SUCCESS;
9178}
9179
9180
9181/** Opcode 0x0f 0xc1. */
9182FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9183{
9184 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9185 IEMOP_HLP_MIN_486();
9186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9187
9188 /*
9189 * If rm is denoting a register, no more instruction bytes.
9190 */
9191 if (IEM_IS_MODRM_REG_MODE(bRm))
9192 {
9193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9194
9195 switch (pVCpu->iem.s.enmEffOpSize)
9196 {
9197 case IEMMODE_16BIT:
9198 IEM_MC_BEGIN(3, 0);
9199 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9200 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9201 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9202
9203 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9204 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9205 IEM_MC_REF_EFLAGS(pEFlags);
9206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9207
9208 IEM_MC_ADVANCE_RIP();
9209 IEM_MC_END();
9210 return VINF_SUCCESS;
9211
9212 case IEMMODE_32BIT:
9213 IEM_MC_BEGIN(3, 0);
9214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9215 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9217
9218 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9219 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9220 IEM_MC_REF_EFLAGS(pEFlags);
9221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9222
9223 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9224 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9225 IEM_MC_ADVANCE_RIP();
9226 IEM_MC_END();
9227 return VINF_SUCCESS;
9228
9229 case IEMMODE_64BIT:
9230 IEM_MC_BEGIN(3, 0);
9231 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9232 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9234
9235 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9236 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9237 IEM_MC_REF_EFLAGS(pEFlags);
9238 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9239
9240 IEM_MC_ADVANCE_RIP();
9241 IEM_MC_END();
9242 return VINF_SUCCESS;
9243
9244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9245 }
9246 }
9247 else
9248 {
9249 /*
9250 * We're accessing memory.
9251 */
9252 switch (pVCpu->iem.s.enmEffOpSize)
9253 {
9254 case IEMMODE_16BIT:
9255 IEM_MC_BEGIN(3, 3);
9256 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9257 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9258 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9259 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9261
9262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9263 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9264 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9265 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9266 IEM_MC_FETCH_EFLAGS(EFlags);
9267 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9269 else
9270 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9271
9272 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9273 IEM_MC_COMMIT_EFLAGS(EFlags);
9274 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9275 IEM_MC_ADVANCE_RIP();
9276 IEM_MC_END();
9277 return VINF_SUCCESS;
9278
9279 case IEMMODE_32BIT:
9280 IEM_MC_BEGIN(3, 3);
9281 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9282 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9283 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9284 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9286
9287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9288 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9289 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9290 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9291 IEM_MC_FETCH_EFLAGS(EFlags);
9292 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9294 else
9295 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9296
9297 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9298 IEM_MC_COMMIT_EFLAGS(EFlags);
9299 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9300 IEM_MC_ADVANCE_RIP();
9301 IEM_MC_END();
9302 return VINF_SUCCESS;
9303
9304 case IEMMODE_64BIT:
9305 IEM_MC_BEGIN(3, 3);
9306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9307 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9308 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9309 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9311
9312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9313 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9314 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9315 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9316 IEM_MC_FETCH_EFLAGS(EFlags);
9317 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9318 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9319 else
9320 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9321
9322 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9323 IEM_MC_COMMIT_EFLAGS(EFlags);
9324 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9325 IEM_MC_ADVANCE_RIP();
9326 IEM_MC_END();
9327 return VINF_SUCCESS;
9328
9329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9330 }
9331 }
9332}
9333
9334
9335/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9336FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9337/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9338FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9339/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9340FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9341/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9342FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9343
9344
9345/** Opcode 0x0f 0xc3. */
9346FNIEMOP_DEF(iemOp_movnti_My_Gy)
9347{
9348 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9349
9350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9351
9352 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9353 if (IEM_IS_MODRM_MEM_MODE(bRm))
9354 {
9355 switch (pVCpu->iem.s.enmEffOpSize)
9356 {
9357 case IEMMODE_32BIT:
9358 IEM_MC_BEGIN(0, 2);
9359 IEM_MC_LOCAL(uint32_t, u32Value);
9360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9361
9362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9364 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9365 return IEMOP_RAISE_INVALID_OPCODE();
9366
9367 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9368 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9369 IEM_MC_ADVANCE_RIP();
9370 IEM_MC_END();
9371 break;
9372
9373 case IEMMODE_64BIT:
9374 IEM_MC_BEGIN(0, 2);
9375 IEM_MC_LOCAL(uint64_t, u64Value);
9376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9377
9378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9380 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9381 return IEMOP_RAISE_INVALID_OPCODE();
9382
9383 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9384 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9385 IEM_MC_ADVANCE_RIP();
9386 IEM_MC_END();
9387 break;
9388
9389 case IEMMODE_16BIT:
9390 /** @todo check this form. */
9391 return IEMOP_RAISE_INVALID_OPCODE();
9392 }
9393 }
9394 else
9395 return IEMOP_RAISE_INVALID_OPCODE();
9396 return VINF_SUCCESS;
9397}
9398/* Opcode 0x66 0x0f 0xc3 - invalid */
9399/* Opcode 0xf3 0x0f 0xc3 - invalid */
9400/* Opcode 0xf2 0x0f 0xc3 - invalid */
9401
9402/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9403FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9404/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9405FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9406/* Opcode 0xf3 0x0f 0xc4 - invalid */
9407/* Opcode 0xf2 0x0f 0xc4 - invalid */
9408
9409/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9410FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9411/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9412FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9413/* Opcode 0xf3 0x0f 0xc5 - invalid */
9414/* Opcode 0xf2 0x0f 0xc5 - invalid */
9415
9416/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9417FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9418/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9419FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9420/* Opcode 0xf3 0x0f 0xc6 - invalid */
9421/* Opcode 0xf2 0x0f 0xc6 - invalid */
9422
9423
9424/** Opcode 0x0f 0xc7 !11/1. */
9425FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9426{
9427 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9428
9429 IEM_MC_BEGIN(4, 3);
9430 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9431 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9432 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9433 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9434 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9435 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9437
9438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9439 IEMOP_HLP_DONE_DECODING();
9440 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9441
9442 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9443 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9444 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9445
9446 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9447 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9448 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9449
9450 IEM_MC_FETCH_EFLAGS(EFlags);
9451 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9452 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9453 else
9454 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9455
9456 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9457 IEM_MC_COMMIT_EFLAGS(EFlags);
9458 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9459 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9460 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9461 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9462 IEM_MC_ENDIF();
9463 IEM_MC_ADVANCE_RIP();
9464
9465 IEM_MC_END();
9466 return VINF_SUCCESS;
9467}
9468
9469
9470/** Opcode REX.W 0x0f 0xc7 !11/1. */
9471FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9472{
9473 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9474 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9475 {
9476#if 0
9477 RT_NOREF(bRm);
9478 IEMOP_BITCH_ABOUT_STUB();
9479 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9480#else
9481 IEM_MC_BEGIN(4, 3);
9482 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9483 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9484 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9485 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9486 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9487 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9489
9490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9491 IEMOP_HLP_DONE_DECODING();
9492 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9493 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9494
9495 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9496 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9497 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9498
9499 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9500 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9501 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9502
9503 IEM_MC_FETCH_EFLAGS(EFlags);
9504# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9505# if defined(RT_ARCH_AMD64)
9506 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9507# endif
9508 {
9509 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9510 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9511 else
9512 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9513 }
9514# if defined(RT_ARCH_AMD64)
9515 else
9516# endif
9517# endif
9518# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9519 {
9520 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9521 accesses and not all all atomic, which works fine on in UNI CPU guest
9522 configuration (ignoring DMA). If guest SMP is active we have no choice
9523 but to use a rendezvous callback here. Sigh. */
9524 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9525 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9526 else
9527 {
9528 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9529 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9530 }
9531 }
9532# endif
9533
9534 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9535 IEM_MC_COMMIT_EFLAGS(EFlags);
9536 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9537 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9538 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9539 IEM_MC_ENDIF();
9540 IEM_MC_ADVANCE_RIP();
9541
9542 IEM_MC_END();
9543 return VINF_SUCCESS;
9544#endif
9545 }
9546 Log(("cmpxchg16b -> #UD\n"));
9547 return IEMOP_RAISE_INVALID_OPCODE();
9548}
9549
9550FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9551{
9552 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9553 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9554 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9555}
9556
9557/** Opcode 0x0f 0xc7 11/6. */
9558FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9559
9560/** Opcode 0x0f 0xc7 !11/6. */
9561#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9562FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9563{
9564 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9565 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9566 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9567 IEM_MC_BEGIN(2, 0);
9568 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9569 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9571 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9572 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9573 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9574 IEM_MC_END();
9575 return VINF_SUCCESS;
9576}
9577#else
9578FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9579#endif
9580
9581/** Opcode 0x66 0x0f 0xc7 !11/6. */
9582#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9583FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(vmclear, "vmclear");
9586 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9587 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9588 IEM_MC_BEGIN(2, 0);
9589 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9590 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9592 IEMOP_HLP_DONE_DECODING();
9593 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9594 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9595 IEM_MC_END();
9596 return VINF_SUCCESS;
9597}
9598#else
9599FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9600#endif
9601
9602/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9603#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9604FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9605{
9606 IEMOP_MNEMONIC(vmxon, "vmxon");
9607 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9608 IEM_MC_BEGIN(2, 0);
9609 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9610 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9612 IEMOP_HLP_DONE_DECODING();
9613 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9614 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9615 IEM_MC_END();
9616 return VINF_SUCCESS;
9617}
9618#else
9619FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9620#endif
9621
9622/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9623#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9624FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9625{
9626 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9627 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9628 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9629 IEM_MC_BEGIN(2, 0);
9630 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9633 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9634 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9635 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9636 IEM_MC_END();
9637 return VINF_SUCCESS;
9638}
9639#else
9640FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9641#endif
9642
9643/** Opcode 0x0f 0xc7 11/7. */
9644FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9645
9646
9647/**
9648 * Group 9 jump table for register variant.
9649 */
9650IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9651{ /* pfx: none, 066h, 0f3h, 0f2h */
9652 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9653 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9654 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9655 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9656 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9657 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9658 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9659 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9660};
9661AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9662
9663
9664/**
9665 * Group 9 jump table for memory variant.
9666 */
9667IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9668{ /* pfx: none, 066h, 0f3h, 0f2h */
9669 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9670 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9671 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9672 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9673 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9674 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9675 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9676 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9677};
9678AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9679
9680
9681/** Opcode 0x0f 0xc7. */
9682FNIEMOP_DEF(iemOp_Grp9)
9683{
9684 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9685 if (IEM_IS_MODRM_REG_MODE(bRm))
9686 /* register, register */
9687 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9688 + pVCpu->iem.s.idxPrefix], bRm);
9689 /* memory, register */
9690 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9691 + pVCpu->iem.s.idxPrefix], bRm);
9692}
9693
9694
9695/**
9696 * Common 'bswap register' helper.
9697 */
9698FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9699{
9700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9701 switch (pVCpu->iem.s.enmEffOpSize)
9702 {
9703 case IEMMODE_16BIT:
9704 IEM_MC_BEGIN(1, 0);
9705 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9706 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9707 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 return VINF_SUCCESS;
9711
9712 case IEMMODE_32BIT:
9713 IEM_MC_BEGIN(1, 0);
9714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9715 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9716 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9717 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9718 IEM_MC_ADVANCE_RIP();
9719 IEM_MC_END();
9720 return VINF_SUCCESS;
9721
9722 case IEMMODE_64BIT:
9723 IEM_MC_BEGIN(1, 0);
9724 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9725 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9726 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9727 IEM_MC_ADVANCE_RIP();
9728 IEM_MC_END();
9729 return VINF_SUCCESS;
9730
9731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9732 }
9733}
9734
9735
9736/** Opcode 0x0f 0xc8. */
9737FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9738{
9739 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9740 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9741 prefix. REX.B is the correct prefix it appears. For a parallel
9742 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9743 IEMOP_HLP_MIN_486();
9744 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9745}
9746
9747
9748/** Opcode 0x0f 0xc9. */
9749FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9750{
9751 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9752 IEMOP_HLP_MIN_486();
9753 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9754}
9755
9756
9757/** Opcode 0x0f 0xca. */
9758FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9759{
9760 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9761 IEMOP_HLP_MIN_486();
9762 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9763}
9764
9765
9766/** Opcode 0x0f 0xcb. */
9767FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9768{
9769 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9770 IEMOP_HLP_MIN_486();
9771 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9772}
9773
9774
9775/** Opcode 0x0f 0xcc. */
9776FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9777{
9778 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9779 IEMOP_HLP_MIN_486();
9780 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9781}
9782
9783
9784/** Opcode 0x0f 0xcd. */
9785FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9786{
9787 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9788 IEMOP_HLP_MIN_486();
9789 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9790}
9791
9792
9793/** Opcode 0x0f 0xce. */
9794FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9795{
9796 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9797 IEMOP_HLP_MIN_486();
9798 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9799}
9800
9801
9802/** Opcode 0x0f 0xcf. */
9803FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9804{
9805 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9806 IEMOP_HLP_MIN_486();
9807 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9808}
9809
9810
9811/* Opcode 0x0f 0xd0 - invalid */
9812/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9813FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9814/* Opcode 0xf3 0x0f 0xd0 - invalid */
9815/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9816FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9817
9818/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9819FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
9820{
9821 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9822 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
9823}
9824
9825/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
9826FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
9827{
9828 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9829 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
9830}
9831
9832/* Opcode 0xf3 0x0f 0xd1 - invalid */
9833/* Opcode 0xf2 0x0f 0xd1 - invalid */
9834
9835/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
9836FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
9837{
9838 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
9839 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
9840}
9841
9842
9843/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
9844FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
9845{
9846 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9847 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
9848}
9849
9850
9851/* Opcode 0xf3 0x0f 0xd2 - invalid */
9852/* Opcode 0xf2 0x0f 0xd2 - invalid */
9853
9854/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
9855FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
9856{
9857 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9858 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
9859}
9860
9861
9862/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
9863FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
9864{
9865 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9866 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
9867}
9868
9869
9870/* Opcode 0xf3 0x0f 0xd3 - invalid */
9871/* Opcode 0xf2 0x0f 0xd3 - invalid */
9872
9873
9874/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
9875FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
9876{
9877 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9878 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
9879}
9880
9881
9882/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
9883FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
9884{
9885 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9886 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
9887}
9888
9889
9890/* Opcode 0xf3 0x0f 0xd4 - invalid */
9891/* Opcode 0xf2 0x0f 0xd4 - invalid */
9892
9893/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
9894FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
9895{
9896 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9897 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
9898}
9899
9900/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
9901FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
9902{
9903 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9904 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
9905}
9906
9907
9908/* Opcode 0xf3 0x0f 0xd5 - invalid */
9909/* Opcode 0xf2 0x0f 0xd5 - invalid */
9910
9911/* Opcode 0x0f 0xd6 - invalid */
9912
9913/**
9914 * @opcode 0xd6
9915 * @oppfx 0x66
9916 * @opcpuid sse2
9917 * @opgroup og_sse2_pcksclr_datamove
9918 * @opxcpttype none
9919 * @optest op1=-1 op2=2 -> op1=2
9920 * @optest op1=0 op2=-42 -> op1=-42
9921 */
9922FNIEMOP_DEF(iemOp_movq_Wq_Vq)
9923{
9924 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
9925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9926 if (IEM_IS_MODRM_REG_MODE(bRm))
9927 {
9928 /*
9929 * Register, register.
9930 */
9931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9932 IEM_MC_BEGIN(0, 2);
9933 IEM_MC_LOCAL(uint64_t, uSrc);
9934
9935 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9936 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
9937
9938 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9939 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
9940
9941 IEM_MC_ADVANCE_RIP();
9942 IEM_MC_END();
9943 }
9944 else
9945 {
9946 /*
9947 * Memory, register.
9948 */
9949 IEM_MC_BEGIN(0, 2);
9950 IEM_MC_LOCAL(uint64_t, uSrc);
9951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9952
9953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9957
9958 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9959 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9960
9961 IEM_MC_ADVANCE_RIP();
9962 IEM_MC_END();
9963 }
9964 return VINF_SUCCESS;
9965}
9966
9967
9968/**
9969 * @opcode 0xd6
9970 * @opcodesub 11 mr/reg
9971 * @oppfx f3
9972 * @opcpuid sse2
9973 * @opgroup og_sse2_simdint_datamove
9974 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9975 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9976 */
9977FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
9978{
9979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9980 if (IEM_IS_MODRM_REG_MODE(bRm))
9981 {
9982 /*
9983 * Register, register.
9984 */
9985 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9987 IEM_MC_BEGIN(0, 1);
9988 IEM_MC_LOCAL(uint64_t, uSrc);
9989
9990 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9991 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9992
9993 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
9994 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
9995 IEM_MC_FPU_TO_MMX_MODE();
9996
9997 IEM_MC_ADVANCE_RIP();
9998 IEM_MC_END();
9999 return VINF_SUCCESS;
10000 }
10001
10002 /**
10003 * @opdone
10004 * @opmnemonic udf30fd6mem
10005 * @opcode 0xd6
10006 * @opcodesub !11 mr/reg
10007 * @oppfx f3
10008 * @opunused intel-modrm
10009 * @opcpuid sse
10010 * @optest ->
10011 */
10012 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10013}
10014
10015
10016/**
10017 * @opcode 0xd6
10018 * @opcodesub 11 mr/reg
10019 * @oppfx f2
10020 * @opcpuid sse2
10021 * @opgroup og_sse2_simdint_datamove
10022 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10023 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10024 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10025 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10026 * @optest op1=-42 op2=0xfedcba9876543210
10027 * -> op1=0xfedcba9876543210 ftw=0xff
10028 */
10029FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10030{
10031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10032 if (IEM_IS_MODRM_REG_MODE(bRm))
10033 {
10034 /*
10035 * Register, register.
10036 */
10037 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10039 IEM_MC_BEGIN(0, 1);
10040 IEM_MC_LOCAL(uint64_t, uSrc);
10041
10042 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10043 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10044
10045 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10046 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10047 IEM_MC_FPU_TO_MMX_MODE();
10048
10049 IEM_MC_ADVANCE_RIP();
10050 IEM_MC_END();
10051 return VINF_SUCCESS;
10052 }
10053
10054 /**
10055 * @opdone
10056 * @opmnemonic udf20fd6mem
10057 * @opcode 0xd6
10058 * @opcodesub !11 mr/reg
10059 * @oppfx f2
10060 * @opunused intel-modrm
10061 * @opcpuid sse
10062 * @optest ->
10063 */
10064 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10065}
10066
10067
10068/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10069FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10070{
10071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10072 /* Docs says register only. */
10073 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10074 {
10075 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10076 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10078 IEM_MC_BEGIN(2, 0);
10079 IEM_MC_ARG(uint64_t *, puDst, 0);
10080 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10081 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10082 IEM_MC_PREPARE_FPU_USAGE();
10083 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10084 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10085 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10086 IEM_MC_FPU_TO_MMX_MODE();
10087 IEM_MC_ADVANCE_RIP();
10088 IEM_MC_END();
10089 return VINF_SUCCESS;
10090 }
10091 return IEMOP_RAISE_INVALID_OPCODE();
10092}
10093
10094
10095/** Opcode 0x66 0x0f 0xd7 - */
10096FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10097{
10098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10099 /* Docs says register only. */
10100 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10101 {
10102 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10103 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10105 IEM_MC_BEGIN(2, 0);
10106 IEM_MC_ARG(uint64_t *, puDst, 0);
10107 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10108 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10109 IEM_MC_PREPARE_SSE_USAGE();
10110 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10111 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10112 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10113 IEM_MC_ADVANCE_RIP();
10114 IEM_MC_END();
10115 return VINF_SUCCESS;
10116 }
10117 return IEMOP_RAISE_INVALID_OPCODE();
10118}
10119
10120
10121/* Opcode 0xf3 0x0f 0xd7 - invalid */
10122/* Opcode 0xf2 0x0f 0xd7 - invalid */
10123
10124
10125/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10126FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10127{
10128 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10129 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10130}
10131
10132
10133/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10134FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10135{
10136 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10137 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10138}
10139
10140
10141/* Opcode 0xf3 0x0f 0xd8 - invalid */
10142/* Opcode 0xf2 0x0f 0xd8 - invalid */
10143
10144/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10145FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10146{
10147 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10148 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10149}
10150
10151
10152/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10153FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10154{
10155 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10156 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10157}
10158
10159
10160/* Opcode 0xf3 0x0f 0xd9 - invalid */
10161/* Opcode 0xf2 0x0f 0xd9 - invalid */
10162
10163/** Opcode 0x0f 0xda - pminub Pq, Qq */
10164FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10165{
10166 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10167 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10168}
10169
10170
10171/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10172FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10173{
10174 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10175 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10176}
10177
10178/* Opcode 0xf3 0x0f 0xda - invalid */
10179/* Opcode 0xf2 0x0f 0xda - invalid */
10180
10181/** Opcode 0x0f 0xdb - pand Pq, Qq */
10182FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10183{
10184 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10185 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10186}
10187
10188
10189/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10190FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10191{
10192 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10193 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10194}
10195
10196
10197/* Opcode 0xf3 0x0f 0xdb - invalid */
10198/* Opcode 0xf2 0x0f 0xdb - invalid */
10199
10200/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10201FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10202{
10203 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10204 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10205}
10206
10207
10208/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10209FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10210{
10211 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10212 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10213}
10214
10215
10216/* Opcode 0xf3 0x0f 0xdc - invalid */
10217/* Opcode 0xf2 0x0f 0xdc - invalid */
10218
10219/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10220FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10221{
10222 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10223 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10224}
10225
10226
10227/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10228FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10229{
10230 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10231 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10232}
10233
10234
10235/* Opcode 0xf3 0x0f 0xdd - invalid */
10236/* Opcode 0xf2 0x0f 0xdd - invalid */
10237
10238/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10239FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10240{
10241 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10242 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10243}
10244
10245
10246/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10247FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10248{
10249 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10250 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10251}
10252
10253/* Opcode 0xf3 0x0f 0xde - invalid */
10254/* Opcode 0xf2 0x0f 0xde - invalid */
10255
10256
10257/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10258FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10259{
10260 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10261 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10262}
10263
10264
10265/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10266FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10267{
10268 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10269 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10270}
10271
10272
10273/* Opcode 0xf3 0x0f 0xdf - invalid */
10274/* Opcode 0xf2 0x0f 0xdf - invalid */
10275
10276/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10277FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10278{
10279 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10280 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10281}
10282
10283
10284/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10285FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10286{
10287 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10288 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10289}
10290
10291
10292/* Opcode 0xf3 0x0f 0xe0 - invalid */
10293/* Opcode 0xf2 0x0f 0xe0 - invalid */
10294
10295/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10296FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10297{
10298 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10299 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10300}
10301
10302
10303/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10304FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10305{
10306 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10307 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10308}
10309
10310
10311/* Opcode 0xf3 0x0f 0xe1 - invalid */
10312/* Opcode 0xf2 0x0f 0xe1 - invalid */
10313
10314/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10315FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10316{
10317 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10318 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10319}
10320
10321
10322/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10323FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10324{
10325 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10326 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10327}
10328
10329
10330/* Opcode 0xf3 0x0f 0xe2 - invalid */
10331/* Opcode 0xf2 0x0f 0xe2 - invalid */
10332
10333/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10334FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10335{
10336 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10337 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10338}
10339
10340
10341/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10342FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10343{
10344 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10345 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10346}
10347
10348
10349/* Opcode 0xf3 0x0f 0xe3 - invalid */
10350/* Opcode 0xf2 0x0f 0xe3 - invalid */
10351
10352/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10353FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10354{
10355 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10356 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10357}
10358
10359
10360/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10361FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10362{
10363 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10364 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10365}
10366
10367
10368/* Opcode 0xf3 0x0f 0xe4 - invalid */
10369/* Opcode 0xf2 0x0f 0xe4 - invalid */
10370
10371/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10372FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10373{
10374 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10375 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10376}
10377
10378
10379/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10380FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10381{
10382 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10383 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10384}
10385
10386
10387/* Opcode 0xf3 0x0f 0xe5 - invalid */
10388/* Opcode 0xf2 0x0f 0xe5 - invalid */
10389
10390/* Opcode 0x0f 0xe6 - invalid */
10391/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10392FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10393/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10394FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10395/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10396FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10397
10398
10399/**
10400 * @opcode 0xe7
10401 * @opcodesub !11 mr/reg
10402 * @oppfx none
10403 * @opcpuid sse
10404 * @opgroup og_sse1_cachect
10405 * @opxcpttype none
10406 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10407 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10408 */
10409FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10410{
10411 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10413 if (IEM_IS_MODRM_MEM_MODE(bRm))
10414 {
10415 /* Register, memory. */
10416 IEM_MC_BEGIN(0, 2);
10417 IEM_MC_LOCAL(uint64_t, uSrc);
10418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10419
10420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10422 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10423 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10424
10425 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10426 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10427 IEM_MC_FPU_TO_MMX_MODE();
10428
10429 IEM_MC_ADVANCE_RIP();
10430 IEM_MC_END();
10431 return VINF_SUCCESS;
10432 }
10433 /**
10434 * @opdone
10435 * @opmnemonic ud0fe7reg
10436 * @opcode 0xe7
10437 * @opcodesub 11 mr/reg
10438 * @oppfx none
10439 * @opunused immediate
10440 * @opcpuid sse
10441 * @optest ->
10442 */
10443 return IEMOP_RAISE_INVALID_OPCODE();
10444}
10445
10446/**
10447 * @opcode 0xe7
10448 * @opcodesub !11 mr/reg
10449 * @oppfx 0x66
10450 * @opcpuid sse2
10451 * @opgroup og_sse2_cachect
10452 * @opxcpttype 1
10453 * @optest op1=-1 op2=2 -> op1=2
10454 * @optest op1=0 op2=-42 -> op1=-42
10455 */
10456FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10457{
10458 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10460 if (IEM_IS_MODRM_MEM_MODE(bRm))
10461 {
10462 /* Register, memory. */
10463 IEM_MC_BEGIN(0, 2);
10464 IEM_MC_LOCAL(RTUINT128U, uSrc);
10465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10466
10467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10470 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10471
10472 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10473 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10474
10475 IEM_MC_ADVANCE_RIP();
10476 IEM_MC_END();
10477 return VINF_SUCCESS;
10478 }
10479
10480 /**
10481 * @opdone
10482 * @opmnemonic ud660fe7reg
10483 * @opcode 0xe7
10484 * @opcodesub 11 mr/reg
10485 * @oppfx 0x66
10486 * @opunused immediate
10487 * @opcpuid sse
10488 * @optest ->
10489 */
10490 return IEMOP_RAISE_INVALID_OPCODE();
10491}
10492
10493/* Opcode 0xf3 0x0f 0xe7 - invalid */
10494/* Opcode 0xf2 0x0f 0xe7 - invalid */
10495
10496
10497/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10498FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10499{
10500 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10501 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10502}
10503
10504
10505/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10506FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10507{
10508 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10509 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10510}
10511
10512
10513/* Opcode 0xf3 0x0f 0xe8 - invalid */
10514/* Opcode 0xf2 0x0f 0xe8 - invalid */
10515
10516/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10517FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10518{
10519 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10520 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10521}
10522
10523
10524/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10525FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10526{
10527 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10528 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10529}
10530
10531
10532/* Opcode 0xf3 0x0f 0xe9 - invalid */
10533/* Opcode 0xf2 0x0f 0xe9 - invalid */
10534
10535
10536/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10537FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10538{
10539 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10540 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10541}
10542
10543
10544/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10545FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10546{
10547 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10548 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10549}
10550
10551
10552/* Opcode 0xf3 0x0f 0xea - invalid */
10553/* Opcode 0xf2 0x0f 0xea - invalid */
10554
10555
10556/** Opcode 0x0f 0xeb - por Pq, Qq */
10557FNIEMOP_DEF(iemOp_por_Pq_Qq)
10558{
10559 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10560 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10561}
10562
10563
10564/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10565FNIEMOP_DEF(iemOp_por_Vx_Wx)
10566{
10567 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10568 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10569}
10570
10571
10572/* Opcode 0xf3 0x0f 0xeb - invalid */
10573/* Opcode 0xf2 0x0f 0xeb - invalid */
10574
10575/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10576FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10577{
10578 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10579 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10580}
10581
10582
10583/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10584FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10585{
10586 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10587 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10588}
10589
10590
10591/* Opcode 0xf3 0x0f 0xec - invalid */
10592/* Opcode 0xf2 0x0f 0xec - invalid */
10593
10594/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10595FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10596{
10597 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10598 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10599}
10600
10601
10602/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10603FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10604{
10605 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10606 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10607}
10608
10609
10610/* Opcode 0xf3 0x0f 0xed - invalid */
10611/* Opcode 0xf2 0x0f 0xed - invalid */
10612
10613
10614/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10615FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10616{
10617 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10619}
10620
10621
10622/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10623FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10624{
10625 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10626 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10627}
10628
10629
10630/* Opcode 0xf3 0x0f 0xee - invalid */
10631/* Opcode 0xf2 0x0f 0xee - invalid */
10632
10633
10634/** Opcode 0x0f 0xef - pxor Pq, Qq */
10635FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10636{
10637 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10638 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10639}
10640
10641
10642/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10643FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10644{
10645 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10646 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10647}
10648
10649
10650/* Opcode 0xf3 0x0f 0xef - invalid */
10651/* Opcode 0xf2 0x0f 0xef - invalid */
10652
10653/* Opcode 0x0f 0xf0 - invalid */
10654/* Opcode 0x66 0x0f 0xf0 - invalid */
10655/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10656FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10657
10658
10659/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10660FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10661{
10662 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10663 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10664}
10665
10666
10667/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10668FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10669{
10670 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10671 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10672}
10673
10674
10675/* Opcode 0xf2 0x0f 0xf1 - invalid */
10676
10677/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10678FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10679{
10680 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10681 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10682}
10683
10684
10685/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10686FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10687{
10688 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10689 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10690}
10691
10692
10693/* Opcode 0xf2 0x0f 0xf2 - invalid */
10694
10695/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10696FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10697{
10698 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10699 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10700}
10701
10702
10703/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10704FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10705{
10706 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10707 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10708}
10709
10710/* Opcode 0xf2 0x0f 0xf3 - invalid */
10711
10712/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10713FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
10714{
10715 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10716 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
10717}
10718
10719
10720/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10721FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
10722{
10723 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10724 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
10725}
10726
10727
10728/* Opcode 0xf2 0x0f 0xf4 - invalid */
10729
10730/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10731FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10732{
10733 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10734 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10735}
10736
10737
10738/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10739FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10740{
10741 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10742 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10743}
10744
10745/* Opcode 0xf2 0x0f 0xf5 - invalid */
10746
10747/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10748FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
10749{
10750 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10751 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
10752}
10753
10754
10755/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10756FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
10757{
10758 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10759 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
10760}
10761
10762
10763/* Opcode 0xf2 0x0f 0xf6 - invalid */
10764
10765/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10766FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10767/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10768FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10769/* Opcode 0xf2 0x0f 0xf7 - invalid */
10770
10771
10772/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10773FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10774{
10775 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10776 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10777}
10778
10779
10780/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10781FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10782{
10783 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10784 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10785}
10786
10787
10788/* Opcode 0xf2 0x0f 0xf8 - invalid */
10789
10790
10791/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10792FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
10793{
10794 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10795 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
10796}
10797
10798
10799/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
10800FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
10801{
10802 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10803 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
10804}
10805
10806
10807/* Opcode 0xf2 0x0f 0xf9 - invalid */
10808
10809
10810/** Opcode 0x0f 0xfa - psubd Pq, Qq */
10811FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
10812{
10813 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10814 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
10815}
10816
10817
10818/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
10819FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
10820{
10821 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10822 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
10823}
10824
10825
10826/* Opcode 0xf2 0x0f 0xfa - invalid */
10827
10828
10829/** Opcode 0x0f 0xfb - psubq Pq, Qq */
10830FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
10831{
10832 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10833 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10834}
10835
10836
10837/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
10838FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
10839{
10840 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10841 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
10842}
10843
10844
10845/* Opcode 0xf2 0x0f 0xfb - invalid */
10846
10847
10848/** Opcode 0x0f 0xfc - paddb Pq, Qq */
10849FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
10850{
10851 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10852 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
10853}
10854
10855
10856/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
10857FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
10858{
10859 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10860 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
10861}
10862
10863
10864/* Opcode 0xf2 0x0f 0xfc - invalid */
10865
10866
10867/** Opcode 0x0f 0xfd - paddw Pq, Qq */
10868FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
10869{
10870 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10871 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
10872}
10873
10874
10875/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
10876FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
10877{
10878 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10879 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
10880}
10881
10882
10883/* Opcode 0xf2 0x0f 0xfd - invalid */
10884
10885
10886/** Opcode 0x0f 0xfe - paddd Pq, Qq */
10887FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
10888{
10889 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10890 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
10891}
10892
10893
10894/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
10895FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
10896{
10897 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
10899}
10900
10901
10902/* Opcode 0xf2 0x0f 0xfe - invalid */
10903
10904
10905/** Opcode **** 0x0f 0xff - UD0 */
10906FNIEMOP_DEF(iemOp_ud0)
10907{
10908 IEMOP_MNEMONIC(ud0, "ud0");
10909 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
10910 {
10911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
10912#ifndef TST_IEM_CHECK_MC
10913 if (IEM_IS_MODRM_MEM_MODE(bRm))
10914 {
10915 RTGCPTR GCPtrEff;
10916 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
10917 if (rcStrict != VINF_SUCCESS)
10918 return rcStrict;
10919 }
10920#endif
10921 IEMOP_HLP_DONE_DECODING();
10922 }
10923 return IEMOP_RAISE_INVALID_OPCODE();
10924}
10925
10926
10927
10928/**
10929 * Two byte opcode map, first byte 0x0f.
10930 *
10931 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
10932 * check if it needs updating as well when making changes.
10933 */
10934IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
10935{
10936 /* no prefix, 066h prefix f3h prefix, f2h prefix */
10937 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
10938 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
10939 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
10940 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
10941 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
10942 /* 0x05 */ IEMOP_X4(iemOp_syscall),
10943 /* 0x06 */ IEMOP_X4(iemOp_clts),
10944 /* 0x07 */ IEMOP_X4(iemOp_sysret),
10945 /* 0x08 */ IEMOP_X4(iemOp_invd),
10946 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
10947 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
10948 /* 0x0b */ IEMOP_X4(iemOp_ud2),
10949 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
10950 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
10951 /* 0x0e */ IEMOP_X4(iemOp_femms),
10952 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
10953
10954 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
10955 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
10956 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
10957 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10958 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10959 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10960 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
10961 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10962 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
10963 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
10964 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
10965 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
10966 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
10967 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
10968 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
10969 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
10970
10971 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
10972 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
10973 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
10974 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
10975 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
10976 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10977 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
10978 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10979 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10980 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10981 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
10982 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10983 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
10984 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
10985 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10986 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10987
10988 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
10989 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
10990 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
10991 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
10992 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
10993 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
10994 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
10995 /* 0x37 */ IEMOP_X4(iemOp_getsec),
10996 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
10997 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10998 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
10999 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11000 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11001 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11002 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11003 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11004
11005 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11006 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11007 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11008 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11009 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11010 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11011 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11012 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11013 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11014 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11015 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11016 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11017 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11018 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11019 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11020 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11021
11022 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11023 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11024 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11025 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11026 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11027 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11028 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11029 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11030 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11031 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11032 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11033 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11034 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11035 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11036 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11037 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11038
11039 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11040 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11041 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11042 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11043 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11044 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11045 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11046 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11047 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11048 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11049 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11050 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11051 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11052 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11053 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11054 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11055
11056 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11057 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11058 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11059 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11060 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11061 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11062 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11063 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11064
11065 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11066 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11067 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11068 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11069 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11070 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11071 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11072 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11073
11074 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11075 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11076 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11077 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11078 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11079 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11080 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11081 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11082 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11083 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11084 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11085 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11086 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11087 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11088 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11089 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11090
11091 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11092 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11093 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11094 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11095 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11096 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11097 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11098 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11099 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11100 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11101 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11102 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11103 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11104 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11105 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11106 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11107
11108 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11109 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11110 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11111 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11112 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11113 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11114 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11115 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11116 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11117 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11118 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11119 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11120 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11121 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11122 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11123 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11124
11125 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11126 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11127 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11128 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11129 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11130 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11131 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11132 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11133 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11134 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11135 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11136 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11137 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11138 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11139 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11140 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11141
11142 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11143 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11144 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11145 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11146 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11147 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11148 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11149 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11150 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11151 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11152 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11153 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11154 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11155 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11156 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11157 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11158
11159 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11160 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11161 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11162 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11163 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11164 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11165 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11166 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11167 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11168 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11169 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11170 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11171 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11172 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11173 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11174 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11175
11176 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11177 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11178 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11179 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11180 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11181 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11182 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11183 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11184 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11185 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11186 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11187 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11188 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11189 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11190 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11191 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11192
11193 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11194 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11195 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11196 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11197 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11198 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11199 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11200 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11201 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11202 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11203 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11204 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11205 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11206 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11207 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11208 /* 0xff */ IEMOP_X4(iemOp_ud0),
11209};
11210AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11211
11212/** @} */
11213
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette