VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 96104

Last change on this file since 96104 was 96104, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]pmuludq instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 174.5 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 96104 2022-08-08 09:10:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26/**
27 * Common worker for AVX2 instructions on the forms:
28 * - vpxxx xmm0, xmm1, xmm2/mem128
29 * - vpxxx ymm0, ymm1, ymm2/mem256
30 *
31 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
32 */
33FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
34{
35 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
36 if (IEM_IS_MODRM_REG_MODE(bRm))
37 {
38 /*
39 * Register, register.
40 */
41 if (pVCpu->iem.s.uVexLength)
42 {
43 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
44 IEM_MC_BEGIN(4, 3);
45 IEM_MC_LOCAL(RTUINT256U, uDst);
46 IEM_MC_LOCAL(RTUINT256U, uSrc1);
47 IEM_MC_LOCAL(RTUINT256U, uSrc2);
48 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
49 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
50 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
51 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
52 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
55 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
56 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
57 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
58 IEM_MC_ADVANCE_RIP();
59 IEM_MC_END();
60 }
61 else
62 {
63 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
64 IEM_MC_BEGIN(4, 0);
65 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
66 IEM_MC_ARG(PRTUINT128U, puDst, 1);
67 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
68 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
69 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
70 IEM_MC_PREPARE_AVX_USAGE();
71 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
72 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
73 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
74 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
75 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 }
80 else
81 {
82 /*
83 * Register, memory.
84 */
85 if (pVCpu->iem.s.uVexLength)
86 {
87 IEM_MC_BEGIN(4, 4);
88 IEM_MC_LOCAL(RTUINT256U, uDst);
89 IEM_MC_LOCAL(RTUINT256U, uSrc1);
90 IEM_MC_LOCAL(RTUINT256U, uSrc2);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
93 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
94 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
95 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
96
97 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
98 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
99 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
100 IEM_MC_PREPARE_AVX_USAGE();
101
102 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
103 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
104 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
105 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
106
107 IEM_MC_ADVANCE_RIP();
108 IEM_MC_END();
109 }
110 else
111 {
112 IEM_MC_BEGIN(4, 2);
113 IEM_MC_LOCAL(RTUINT128U, uSrc2);
114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
115 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
116 IEM_MC_ARG(PRTUINT128U, puDst, 1);
117 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
119
120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
121 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
122 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
123 IEM_MC_PREPARE_AVX_USAGE();
124
125 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
126 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
127 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
128 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
129 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
130
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 }
134 }
135 return VINF_SUCCESS;
136}
137
138
139/**
140 * Common worker for AVX2 instructions on the forms:
141 * - vpxxx xmm0, xmm1, xmm2/mem128
142 * - vpxxx ymm0, ymm1, ymm2/mem256
143 *
144 * Takes function table for function w/o implicit state parameter.
145 *
146 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
147 */
148FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
149{
150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
151 if (IEM_IS_MODRM_REG_MODE(bRm))
152 {
153 /*
154 * Register, register.
155 */
156 if (pVCpu->iem.s.uVexLength)
157 {
158 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
159 IEM_MC_BEGIN(3, 3);
160 IEM_MC_LOCAL(RTUINT256U, uDst);
161 IEM_MC_LOCAL(RTUINT256U, uSrc1);
162 IEM_MC_LOCAL(RTUINT256U, uSrc2);
163 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
164 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
165 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
166 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
167 IEM_MC_PREPARE_AVX_USAGE();
168 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
169 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
171 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
172 IEM_MC_ADVANCE_RIP();
173 IEM_MC_END();
174 }
175 else
176 {
177 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
178 IEM_MC_BEGIN(3, 0);
179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
180 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
181 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
182 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
183 IEM_MC_PREPARE_AVX_USAGE();
184 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
185 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
186 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
188 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 }
193 else
194 {
195 /*
196 * Register, memory.
197 */
198 if (pVCpu->iem.s.uVexLength)
199 {
200 IEM_MC_BEGIN(3, 4);
201 IEM_MC_LOCAL(RTUINT256U, uDst);
202 IEM_MC_LOCAL(RTUINT256U, uSrc1);
203 IEM_MC_LOCAL(RTUINT256U, uSrc2);
204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
205 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
206 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
207 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
208
209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
210 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
211 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
212 IEM_MC_PREPARE_AVX_USAGE();
213
214 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
215 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
217 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
218
219 IEM_MC_ADVANCE_RIP();
220 IEM_MC_END();
221 }
222 else
223 {
224 IEM_MC_BEGIN(3, 2);
225 IEM_MC_LOCAL(RTUINT128U, uSrc2);
226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
227 IEM_MC_ARG(PRTUINT128U, puDst, 0);
228 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
229 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
230
231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
232 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
233 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
234 IEM_MC_PREPARE_AVX_USAGE();
235
236 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
237 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
238 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
240 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
241
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 }
246 return VINF_SUCCESS;
247}
248
249
250/**
251 * Common worker for AVX2 instructions on the forms:
252 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
253 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
254 *
255 * The 128-bit memory version of this instruction may elect to skip fetching the
256 * lower 64 bits of the operand. We, however, do not.
257 *
258 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
259 */
260FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
261{
262 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
263}
264
265
266/**
267 * Common worker for AVX2 instructions on the forms:
268 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
269 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
270 *
271 * The 128-bit memory version of this instruction may elect to skip fetching the
272 * higher 64 bits of the operand. We, however, do not.
273 *
274 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
275 */
276FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
277{
278 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
279}
280
281
282/**
283 * Common worker for AVX2 instructions on the forms:
284 * - vpxxx xmm0, xmm1/mem128
285 * - vpxxx ymm0, ymm1/mem256
286 *
287 * Takes function table for function w/o implicit state parameter.
288 *
289 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
290 */
291FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 if (pVCpu->iem.s.uVexLength)
300 {
301 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
302 IEM_MC_BEGIN(2, 2);
303 IEM_MC_LOCAL(RTUINT256U, uDst);
304 IEM_MC_LOCAL(RTUINT256U, uSrc);
305 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
306 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
307 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
308 IEM_MC_PREPARE_AVX_USAGE();
309 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
318 IEM_MC_BEGIN(2, 0);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
321 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
322 IEM_MC_PREPARE_AVX_USAGE();
323 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
324 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
325 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
326 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_ADVANCE_RIP();
328 IEM_MC_END();
329 }
330 }
331 else
332 {
333 /*
334 * Register, memory.
335 */
336 if (pVCpu->iem.s.uVexLength)
337 {
338 IEM_MC_BEGIN(2, 3);
339 IEM_MC_LOCAL(RTUINT256U, uDst);
340 IEM_MC_LOCAL(RTUINT256U, uSrc);
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
342 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
343 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
344
345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
346 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
347 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
348 IEM_MC_PREPARE_AVX_USAGE();
349
350 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
351 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
352 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
353
354 IEM_MC_ADVANCE_RIP();
355 IEM_MC_END();
356 }
357 else
358 {
359 IEM_MC_BEGIN(2, 2);
360 IEM_MC_LOCAL(RTUINT128U, uSrc);
361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
362 IEM_MC_ARG(PRTUINT128U, puDst, 0);
363 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
364
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
366 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
367 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
368 IEM_MC_PREPARE_AVX_USAGE();
369
370 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
371 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
372 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
373 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
374
375 IEM_MC_ADVANCE_RIP();
376 IEM_MC_END();
377 }
378 }
379 return VINF_SUCCESS;
380}
381
382
383/* Opcode VEX.0F 0x00 - invalid */
384/* Opcode VEX.0F 0x01 - invalid */
385/* Opcode VEX.0F 0x02 - invalid */
386/* Opcode VEX.0F 0x03 - invalid */
387/* Opcode VEX.0F 0x04 - invalid */
388/* Opcode VEX.0F 0x05 - invalid */
389/* Opcode VEX.0F 0x06 - invalid */
390/* Opcode VEX.0F 0x07 - invalid */
391/* Opcode VEX.0F 0x08 - invalid */
392/* Opcode VEX.0F 0x09 - invalid */
393/* Opcode VEX.0F 0x0a - invalid */
394
395/** Opcode VEX.0F 0x0b. */
396FNIEMOP_DEF(iemOp_vud2)
397{
398 IEMOP_MNEMONIC(vud2, "vud2");
399 return IEMOP_RAISE_INVALID_OPCODE();
400}
401
402/* Opcode VEX.0F 0x0c - invalid */
403/* Opcode VEX.0F 0x0d - invalid */
404/* Opcode VEX.0F 0x0e - invalid */
405/* Opcode VEX.0F 0x0f - invalid */
406
407
408/**
409 * @opcode 0x10
410 * @oppfx none
411 * @opcpuid avx
412 * @opgroup og_avx_simdfp_datamove
413 * @opxcpttype 4UA
414 * @optest op1=1 op2=2 -> op1=2
415 * @optest op1=0 op2=-22 -> op1=-22
416 */
417FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
418{
419 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
420 Assert(pVCpu->iem.s.uVexLength <= 1);
421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
422 if (IEM_IS_MODRM_REG_MODE(bRm))
423 {
424 /*
425 * Register, register.
426 */
427 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
428 IEM_MC_BEGIN(0, 0);
429 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
430 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
431 if (pVCpu->iem.s.uVexLength == 0)
432 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
433 IEM_GET_MODRM_RM(pVCpu, bRm));
434 else
435 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
436 IEM_GET_MODRM_RM(pVCpu, bRm));
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 }
440 else if (pVCpu->iem.s.uVexLength == 0)
441 {
442 /*
443 * 128-bit: Register, Memory
444 */
445 IEM_MC_BEGIN(0, 2);
446 IEM_MC_LOCAL(RTUINT128U, uSrc);
447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
448
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
450 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
451 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
452 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
453
454 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
455 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
456
457 IEM_MC_ADVANCE_RIP();
458 IEM_MC_END();
459 }
460 else
461 {
462 /*
463 * 256-bit: Register, Memory
464 */
465 IEM_MC_BEGIN(0, 2);
466 IEM_MC_LOCAL(RTUINT256U, uSrc);
467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
468
469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
470 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
471 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
472 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
473
474 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
475 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
476
477 IEM_MC_ADVANCE_RIP();
478 IEM_MC_END();
479 }
480 return VINF_SUCCESS;
481}
482
483
484/**
485 * @opcode 0x10
486 * @oppfx 0x66
487 * @opcpuid avx
488 * @opgroup og_avx_simdfp_datamove
489 * @opxcpttype 4UA
490 * @optest op1=1 op2=2 -> op1=2
491 * @optest op1=0 op2=-22 -> op1=-22
492 */
493FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
494{
495 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
496 Assert(pVCpu->iem.s.uVexLength <= 1);
497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
498 if (IEM_IS_MODRM_REG_MODE(bRm))
499 {
500 /*
501 * Register, register.
502 */
503 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
504 IEM_MC_BEGIN(0, 0);
505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
507 if (pVCpu->iem.s.uVexLength == 0)
508 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
509 IEM_GET_MODRM_RM(pVCpu, bRm));
510 else
511 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
512 IEM_GET_MODRM_RM(pVCpu, bRm));
513 IEM_MC_ADVANCE_RIP();
514 IEM_MC_END();
515 }
516 else if (pVCpu->iem.s.uVexLength == 0)
517 {
518 /*
519 * 128-bit: Memory, register.
520 */
521 IEM_MC_BEGIN(0, 2);
522 IEM_MC_LOCAL(RTUINT128U, uSrc);
523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
524
525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
526 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
527 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
528 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
529
530 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
531 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
532
533 IEM_MC_ADVANCE_RIP();
534 IEM_MC_END();
535 }
536 else
537 {
538 /*
539 * 256-bit: Memory, register.
540 */
541 IEM_MC_BEGIN(0, 2);
542 IEM_MC_LOCAL(RTUINT256U, uSrc);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
548 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
549
550 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
551 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
552
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 }
556 return VINF_SUCCESS;
557}
558
559
560FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
561{
562 Assert(pVCpu->iem.s.uVexLength <= 1);
563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
564 if (IEM_IS_MODRM_REG_MODE(bRm))
565 {
566 /**
567 * @opcode 0x10
568 * @oppfx 0xf3
569 * @opcodesub 11 mr/reg
570 * @opcpuid avx
571 * @opgroup og_avx_simdfp_datamerge
572 * @opxcpttype 5
573 * @optest op1=1 op2=0 op3=2 -> op1=2
574 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
575 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
576 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
577 * @note HssHi refers to bits 127:32.
578 */
579 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
580 IEMOP_HLP_DONE_VEX_DECODING();
581 IEM_MC_BEGIN(0, 0);
582
583 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
584 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
585 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
586 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
587 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
588 IEM_MC_ADVANCE_RIP();
589 IEM_MC_END();
590 }
591 else
592 {
593 /**
594 * @opdone
595 * @opcode 0x10
596 * @oppfx 0xf3
597 * @opcodesub !11 mr/reg
598 * @opcpuid avx
599 * @opgroup og_avx_simdfp_datamove
600 * @opxcpttype 5
601 * @opfunction iemOp_vmovss_Vss_Hss_Wss
602 * @optest op1=1 op2=2 -> op1=2
603 * @optest op1=0 op2=-22 -> op1=-22
604 */
605 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
606 IEM_MC_BEGIN(0, 2);
607 IEM_MC_LOCAL(uint32_t, uSrc);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
612 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
613 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
614
615 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
616 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
617
618 IEM_MC_ADVANCE_RIP();
619 IEM_MC_END();
620 }
621
622 return VINF_SUCCESS;
623}
624
625
626FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
627{
628 Assert(pVCpu->iem.s.uVexLength <= 1);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
630 if (IEM_IS_MODRM_REG_MODE(bRm))
631 {
632 /**
633 * @opcode 0x10
634 * @oppfx 0xf2
635 * @opcodesub 11 mr/reg
636 * @opcpuid avx
637 * @opgroup og_avx_simdfp_datamerge
638 * @opxcpttype 5
639 * @optest op1=1 op2=0 op3=2 -> op1=2
640 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
641 * @optest op1=3 op2=-1 op3=0x77 ->
642 * op1=0xffffffffffffffff0000000000000077
643 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
644 */
645 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
646 IEMOP_HLP_DONE_VEX_DECODING();
647 IEM_MC_BEGIN(0, 0);
648
649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
651 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
652 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
653 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
654 IEM_MC_ADVANCE_RIP();
655 IEM_MC_END();
656 }
657 else
658 {
659 /**
660 * @opdone
661 * @opcode 0x10
662 * @oppfx 0xf2
663 * @opcodesub !11 mr/reg
664 * @opcpuid avx
665 * @opgroup og_avx_simdfp_datamove
666 * @opxcpttype 5
667 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
668 * @optest op1=1 op2=2 -> op1=2
669 * @optest op1=0 op2=-22 -> op1=-22
670 */
671 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
672 IEM_MC_BEGIN(0, 2);
673 IEM_MC_LOCAL(uint64_t, uSrc);
674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
675
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
678 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
679 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
680
681 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
682 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
683
684 IEM_MC_ADVANCE_RIP();
685 IEM_MC_END();
686 }
687
688 return VINF_SUCCESS;
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
712 IEM_MC_BEGIN(0, 0);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP();
762 IEM_MC_END();
763 }
764 return VINF_SUCCESS;
765}
766
767
768/**
769 * @opcode 0x11
770 * @oppfx 0x66
771 * @opcpuid avx
772 * @opgroup og_avx_simdfp_datamove
773 * @opxcpttype 4UA
774 * @optest op1=1 op2=2 -> op1=2
775 * @optest op1=0 op2=-22 -> op1=-22
776 */
777FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
778{
779 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
780 Assert(pVCpu->iem.s.uVexLength <= 1);
781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
782 if (IEM_IS_MODRM_REG_MODE(bRm))
783 {
784 /*
785 * Register, register.
786 */
787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
788 IEM_MC_BEGIN(0, 0);
789 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
790 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
791 if (pVCpu->iem.s.uVexLength == 0)
792 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
793 IEM_GET_MODRM_REG(pVCpu, bRm));
794 else
795 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
796 IEM_GET_MODRM_REG(pVCpu, bRm));
797 IEM_MC_ADVANCE_RIP();
798 IEM_MC_END();
799 }
800 else if (pVCpu->iem.s.uVexLength == 0)
801 {
802 /*
803 * 128-bit: Memory, register.
804 */
805 IEM_MC_BEGIN(0, 2);
806 IEM_MC_LOCAL(RTUINT128U, uSrc);
807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
808
809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
810 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
811 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
812 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
813
814 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
815 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
816
817 IEM_MC_ADVANCE_RIP();
818 IEM_MC_END();
819 }
820 else
821 {
822 /*
823 * 256-bit: Memory, register.
824 */
825 IEM_MC_BEGIN(0, 2);
826 IEM_MC_LOCAL(RTUINT256U, uSrc);
827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
828
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
832 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
833
834 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
835 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
836
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 }
840 return VINF_SUCCESS;
841}
842
843
844FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
845{
846 Assert(pVCpu->iem.s.uVexLength <= 1);
847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
848 if (IEM_IS_MODRM_REG_MODE(bRm))
849 {
850 /**
851 * @opcode 0x11
852 * @oppfx 0xf3
853 * @opcodesub 11 mr/reg
854 * @opcpuid avx
855 * @opgroup og_avx_simdfp_datamerge
856 * @opxcpttype 5
857 * @optest op1=1 op2=0 op3=2 -> op1=2
858 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
859 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
860 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
861 */
862 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
863 IEMOP_HLP_DONE_VEX_DECODING();
864 IEM_MC_BEGIN(0, 0);
865
866 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
867 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
868 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
869 IEM_GET_MODRM_REG(pVCpu, bRm),
870 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
871 IEM_MC_ADVANCE_RIP();
872 IEM_MC_END();
873 }
874 else
875 {
876 /**
877 * @opdone
878 * @opcode 0x11
879 * @oppfx 0xf3
880 * @opcodesub !11 mr/reg
881 * @opcpuid avx
882 * @opgroup og_avx_simdfp_datamove
883 * @opxcpttype 5
884 * @opfunction iemOp_vmovss_Vss_Hss_Wss
885 * @optest op1=1 op2=2 -> op1=2
886 * @optest op1=0 op2=-22 -> op1=-22
887 */
888 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
889 IEM_MC_BEGIN(0, 2);
890 IEM_MC_LOCAL(uint32_t, uSrc);
891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
892
893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
894 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
895 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
896 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
897
898 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
899 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
900
901 IEM_MC_ADVANCE_RIP();
902 IEM_MC_END();
903 }
904
905 return VINF_SUCCESS;
906}
907
908
909FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
910{
911 Assert(pVCpu->iem.s.uVexLength <= 1);
912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
913 if (IEM_IS_MODRM_REG_MODE(bRm))
914 {
915 /**
916 * @opcode 0x11
917 * @oppfx 0xf2
918 * @opcodesub 11 mr/reg
919 * @opcpuid avx
920 * @opgroup og_avx_simdfp_datamerge
921 * @opxcpttype 5
922 * @optest op1=1 op2=0 op3=2 -> op1=2
923 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
924 * @optest op1=3 op2=-1 op3=0x77 ->
925 * op1=0xffffffffffffffff0000000000000077
926 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
927 */
928 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
929 IEMOP_HLP_DONE_VEX_DECODING();
930 IEM_MC_BEGIN(0, 0);
931
932 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
933 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
934 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
935 IEM_GET_MODRM_REG(pVCpu, bRm),
936 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
937 IEM_MC_ADVANCE_RIP();
938 IEM_MC_END();
939 }
940 else
941 {
942 /**
943 * @opdone
944 * @opcode 0x11
945 * @oppfx 0xf2
946 * @opcodesub !11 mr/reg
947 * @opcpuid avx
948 * @opgroup og_avx_simdfp_datamove
949 * @opxcpttype 5
950 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
951 * @optest op1=1 op2=2 -> op1=2
952 * @optest op1=0 op2=-22 -> op1=-22
953 */
954 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
955 IEM_MC_BEGIN(0, 2);
956 IEM_MC_LOCAL(uint64_t, uSrc);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
961 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
962 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
963
964 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
966
967 IEM_MC_ADVANCE_RIP();
968 IEM_MC_END();
969 }
970
971 return VINF_SUCCESS;
972}
973
974
975FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
976{
977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
978 if (IEM_IS_MODRM_REG_MODE(bRm))
979 {
980 /**
981 * @opcode 0x12
982 * @opcodesub 11 mr/reg
983 * @oppfx none
984 * @opcpuid avx
985 * @opgroup og_avx_simdfp_datamerge
986 * @opxcpttype 7LZ
987 * @optest op2=0x2200220122022203
988 * op3=0x3304330533063307
989 * -> op1=0x22002201220222033304330533063307
990 * @optest op2=-1 op3=-42 -> op1=-42
991 * @note op3 and op2 are only the 8-byte high XMM register halfs.
992 */
993 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
994
995 IEMOP_HLP_DONE_VEX_DECODING_L0();
996 IEM_MC_BEGIN(0, 0);
997
998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
999 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1000 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1001 IEM_GET_MODRM_RM(pVCpu, bRm),
1002 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1003
1004 IEM_MC_ADVANCE_RIP();
1005 IEM_MC_END();
1006 }
1007 else
1008 {
1009 /**
1010 * @opdone
1011 * @opcode 0x12
1012 * @opcodesub !11 mr/reg
1013 * @oppfx none
1014 * @opcpuid avx
1015 * @opgroup og_avx_simdfp_datamove
1016 * @opxcpttype 5LZ
1017 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1018 * @optest op1=1 op2=0 op3=0 -> op1=0
1019 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1020 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1021 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1022 */
1023 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1024
1025 IEM_MC_BEGIN(0, 2);
1026 IEM_MC_LOCAL(uint64_t, uSrc);
1027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1028
1029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1030 IEMOP_HLP_DONE_VEX_DECODING_L0();
1031 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1032 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1033
1034 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1035 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1036 uSrc,
1037 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1038
1039 IEM_MC_ADVANCE_RIP();
1040 IEM_MC_END();
1041 }
1042 return VINF_SUCCESS;
1043}
1044
1045
1046/**
1047 * @opcode 0x12
1048 * @opcodesub !11 mr/reg
1049 * @oppfx 0x66
1050 * @opcpuid avx
1051 * @opgroup og_avx_pcksclr_datamerge
1052 * @opxcpttype 5LZ
1053 * @optest op2=0 op3=2 -> op1=2
1054 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1055 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1056 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1057 */
1058FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1059{
1060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1061 if (IEM_IS_MODRM_MEM_MODE(bRm))
1062 {
1063 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1064
1065 IEM_MC_BEGIN(0, 2);
1066 IEM_MC_LOCAL(uint64_t, uSrc);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_VEX_DECODING_L0();
1071 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1072 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1073
1074 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1075 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1076 uSrc,
1077 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1078
1079 IEM_MC_ADVANCE_RIP();
1080 IEM_MC_END();
1081 return VINF_SUCCESS;
1082 }
1083
1084 /**
1085 * @opdone
1086 * @opmnemonic udvex660f12m3
1087 * @opcode 0x12
1088 * @opcodesub 11 mr/reg
1089 * @oppfx 0x66
1090 * @opunused immediate
1091 * @opcpuid avx
1092 * @optest ->
1093 */
1094 return IEMOP_RAISE_INVALID_OPCODE();
1095}
1096
1097
1098/**
1099 * @opcode 0x12
1100 * @oppfx 0xf3
1101 * @opcpuid avx
1102 * @opgroup og_avx_pcksclr_datamove
1103 * @opxcpttype 4
1104 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1105 * -> op1=0x00000002000000020000000100000001
1106 * @optest vex.l==1 /
1107 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1108 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1109 */
1110FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1111{
1112 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1113 Assert(pVCpu->iem.s.uVexLength <= 1);
1114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1115 if (IEM_IS_MODRM_REG_MODE(bRm))
1116 {
1117 /*
1118 * Register, register.
1119 */
1120 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1121 if (pVCpu->iem.s.uVexLength == 0)
1122 {
1123 IEM_MC_BEGIN(2, 0);
1124 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1125 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1126
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_PREPARE_AVX_USAGE();
1129
1130 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1131 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1132 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1133 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1134
1135 IEM_MC_ADVANCE_RIP();
1136 IEM_MC_END();
1137 }
1138 else
1139 {
1140 IEM_MC_BEGIN(3, 0);
1141 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1142 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1143 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1144
1145 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1146 IEM_MC_PREPARE_AVX_USAGE();
1147 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1148
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 }
1152 }
1153 else
1154 {
1155 /*
1156 * Register, memory.
1157 */
1158 if (pVCpu->iem.s.uVexLength == 0)
1159 {
1160 IEM_MC_BEGIN(2, 2);
1161 IEM_MC_LOCAL(RTUINT128U, uSrc);
1162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1163 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1164 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1165
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1167 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1168 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1169 IEM_MC_PREPARE_AVX_USAGE();
1170
1171 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1172 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1173 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1174 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 IEM_MC_BEGIN(3, 2);
1182 IEM_MC_LOCAL(RTUINT256U, uSrc);
1183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1184 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1185 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1186 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1190 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1191 IEM_MC_PREPARE_AVX_USAGE();
1192
1193 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 }
1200 return VINF_SUCCESS;
1201}
1202
1203
1204/**
1205 * @opcode 0x12
1206 * @oppfx 0xf2
1207 * @opcpuid avx
1208 * @opgroup og_avx_pcksclr_datamove
1209 * @opxcpttype 5
1210 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1211 * -> op1=0x22222222111111112222222211111111
1212 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1213 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1214 */
1215FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1216{
1217 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1219 if (IEM_IS_MODRM_REG_MODE(bRm))
1220 {
1221 /*
1222 * Register, register.
1223 */
1224 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1225 if (pVCpu->iem.s.uVexLength == 0)
1226 {
1227 IEM_MC_BEGIN(2, 0);
1228 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1229 IEM_MC_ARG(uint64_t, uSrc, 1);
1230
1231 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1232 IEM_MC_PREPARE_AVX_USAGE();
1233
1234 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1235 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1236 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1237 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1238
1239 IEM_MC_ADVANCE_RIP();
1240 IEM_MC_END();
1241 }
1242 else
1243 {
1244 IEM_MC_BEGIN(3, 0);
1245 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1246 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1247 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1248
1249 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1250 IEM_MC_PREPARE_AVX_USAGE();
1251 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1252
1253 IEM_MC_ADVANCE_RIP();
1254 IEM_MC_END();
1255 }
1256 }
1257 else
1258 {
1259 /*
1260 * Register, memory.
1261 */
1262 if (pVCpu->iem.s.uVexLength == 0)
1263 {
1264 IEM_MC_BEGIN(2, 2);
1265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1266 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1267 IEM_MC_ARG(uint64_t, uSrc, 1);
1268
1269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1272 IEM_MC_PREPARE_AVX_USAGE();
1273
1274 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1275 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1276 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1277 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1278
1279 IEM_MC_ADVANCE_RIP();
1280 IEM_MC_END();
1281 }
1282 else
1283 {
1284 IEM_MC_BEGIN(3, 2);
1285 IEM_MC_LOCAL(RTUINT256U, uSrc);
1286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1287 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1288 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1289 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1290
1291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1292 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1293 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1294 IEM_MC_PREPARE_AVX_USAGE();
1295
1296 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1298
1299 IEM_MC_ADVANCE_RIP();
1300 IEM_MC_END();
1301 }
1302 }
1303 return VINF_SUCCESS;
1304}
1305
1306
1307/**
1308 * @opcode 0x13
1309 * @opcodesub !11 mr/reg
1310 * @oppfx none
1311 * @opcpuid avx
1312 * @opgroup og_avx_simdfp_datamove
1313 * @opxcpttype 5
1314 * @optest op1=1 op2=2 -> op1=2
1315 * @optest op1=0 op2=-42 -> op1=-42
1316 */
1317FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 if (IEM_IS_MODRM_MEM_MODE(bRm))
1321 {
1322 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1323
1324 IEM_MC_BEGIN(0, 2);
1325 IEM_MC_LOCAL(uint64_t, uSrc);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327
1328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1329 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1332
1333 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1334 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1335
1336 IEM_MC_ADVANCE_RIP();
1337 IEM_MC_END();
1338 return VINF_SUCCESS;
1339 }
1340
1341 /**
1342 * @opdone
1343 * @opmnemonic udvex0f13m3
1344 * @opcode 0x13
1345 * @opcodesub 11 mr/reg
1346 * @oppfx none
1347 * @opunused immediate
1348 * @opcpuid avx
1349 * @optest ->
1350 */
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352}
1353
1354
1355/**
1356 * @opcode 0x13
1357 * @opcodesub !11 mr/reg
1358 * @oppfx 0x66
1359 * @opcpuid avx
1360 * @opgroup og_avx_pcksclr_datamove
1361 * @opxcpttype 5
1362 * @optest op1=1 op2=2 -> op1=2
1363 * @optest op1=0 op2=-42 -> op1=-42
1364 */
1365FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1366{
1367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1368 if (IEM_IS_MODRM_MEM_MODE(bRm))
1369 {
1370 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1371 IEM_MC_BEGIN(0, 2);
1372 IEM_MC_LOCAL(uint64_t, uSrc);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1377 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1378 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1379
1380 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1381 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1382
1383 IEM_MC_ADVANCE_RIP();
1384 IEM_MC_END();
1385 return VINF_SUCCESS;
1386 }
1387
1388 /**
1389 * @opdone
1390 * @opmnemonic udvex660f13m3
1391 * @opcode 0x13
1392 * @opcodesub 11 mr/reg
1393 * @oppfx 0x66
1394 * @opunused immediate
1395 * @opcpuid avx
1396 * @optest ->
1397 */
1398 return IEMOP_RAISE_INVALID_OPCODE();
1399}
1400
1401/* Opcode VEX.F3.0F 0x13 - invalid */
1402/* Opcode VEX.F2.0F 0x13 - invalid */
1403
1404/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1405FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1406/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1407FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1408/* Opcode VEX.F3.0F 0x14 - invalid */
1409/* Opcode VEX.F2.0F 0x14 - invalid */
1410/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1411FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1412/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1413FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1414/* Opcode VEX.F3.0F 0x15 - invalid */
1415/* Opcode VEX.F2.0F 0x15 - invalid */
1416
1417
1418FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1419{
1420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1421 if (IEM_IS_MODRM_REG_MODE(bRm))
1422 {
1423 /**
1424 * @opcode 0x16
1425 * @opcodesub 11 mr/reg
1426 * @oppfx none
1427 * @opcpuid avx
1428 * @opgroup og_avx_simdfp_datamerge
1429 * @opxcpttype 7LZ
1430 */
1431 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1432
1433 IEMOP_HLP_DONE_VEX_DECODING_L0();
1434 IEM_MC_BEGIN(0, 0);
1435
1436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1438 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1439 IEM_GET_MODRM_RM(pVCpu, bRm),
1440 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1441
1442 IEM_MC_ADVANCE_RIP();
1443 IEM_MC_END();
1444 }
1445 else
1446 {
1447 /**
1448 * @opdone
1449 * @opcode 0x16
1450 * @opcodesub !11 mr/reg
1451 * @oppfx none
1452 * @opcpuid avx
1453 * @opgroup og_avx_simdfp_datamove
1454 * @opxcpttype 5LZ
1455 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1456 */
1457 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1458
1459 IEM_MC_BEGIN(0, 2);
1460 IEM_MC_LOCAL(uint64_t, uSrc);
1461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1462
1463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1464 IEMOP_HLP_DONE_VEX_DECODING_L0();
1465 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1466 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1467
1468 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1469 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1470 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1471 uSrc);
1472
1473 IEM_MC_ADVANCE_RIP();
1474 IEM_MC_END();
1475 }
1476 return VINF_SUCCESS;
1477}
1478
1479
1480/**
1481 * @opcode 0x16
1482 * @opcodesub !11 mr/reg
1483 * @oppfx 0x66
1484 * @opcpuid avx
1485 * @opgroup og_avx_pcksclr_datamerge
1486 * @opxcpttype 5LZ
1487 */
1488FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1489{
1490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1491 if (IEM_IS_MODRM_MEM_MODE(bRm))
1492 {
1493 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1494
1495 IEM_MC_BEGIN(0, 2);
1496 IEM_MC_LOCAL(uint64_t, uSrc);
1497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1498
1499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1500 IEMOP_HLP_DONE_VEX_DECODING_L0();
1501 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1502 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1503
1504 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1505 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1506 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1507 uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 return VINF_SUCCESS;
1512 }
1513
1514 /**
1515 * @opdone
1516 * @opmnemonic udvex660f16m3
1517 * @opcode 0x12
1518 * @opcodesub 11 mr/reg
1519 * @oppfx 0x66
1520 * @opunused immediate
1521 * @opcpuid avx
1522 * @optest ->
1523 */
1524 return IEMOP_RAISE_INVALID_OPCODE();
1525}
1526
1527
1528/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1529/**
1530 * @opcode 0x16
1531 * @oppfx 0xf3
1532 * @opcpuid avx
1533 * @opgroup og_avx_pcksclr_datamove
1534 * @opxcpttype 4
1535 */
1536FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1537{
1538 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1539 Assert(pVCpu->iem.s.uVexLength <= 1);
1540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1541 if (IEM_IS_MODRM_REG_MODE(bRm))
1542 {
1543 /*
1544 * Register, register.
1545 */
1546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1547 if (pVCpu->iem.s.uVexLength == 0)
1548 {
1549 IEM_MC_BEGIN(2, 0);
1550 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1551 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1552
1553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1554 IEM_MC_PREPARE_AVX_USAGE();
1555
1556 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1557 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1558 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1559 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1560
1561 IEM_MC_ADVANCE_RIP();
1562 IEM_MC_END();
1563 }
1564 else
1565 {
1566 IEM_MC_BEGIN(3, 0);
1567 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1568 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1569 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1570
1571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1572 IEM_MC_PREPARE_AVX_USAGE();
1573 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1574
1575 IEM_MC_ADVANCE_RIP();
1576 IEM_MC_END();
1577 }
1578 }
1579 else
1580 {
1581 /*
1582 * Register, memory.
1583 */
1584 if (pVCpu->iem.s.uVexLength == 0)
1585 {
1586 IEM_MC_BEGIN(2, 2);
1587 IEM_MC_LOCAL(RTUINT128U, uSrc);
1588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1590 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1591
1592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1593 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1595 IEM_MC_PREPARE_AVX_USAGE();
1596
1597 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1598 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1599 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1600 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1601
1602 IEM_MC_ADVANCE_RIP();
1603 IEM_MC_END();
1604 }
1605 else
1606 {
1607 IEM_MC_BEGIN(3, 2);
1608 IEM_MC_LOCAL(RTUINT256U, uSrc);
1609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1610 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1611 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1612 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1613
1614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1617 IEM_MC_PREPARE_AVX_USAGE();
1618
1619 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1620 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1621
1622 IEM_MC_ADVANCE_RIP();
1623 IEM_MC_END();
1624 }
1625 }
1626 return VINF_SUCCESS;
1627}
1628
1629
1630/* Opcode VEX.F2.0F 0x16 - invalid */
1631
1632
1633/**
1634 * @opcode 0x17
1635 * @opcodesub !11 mr/reg
1636 * @oppfx none
1637 * @opcpuid avx
1638 * @opgroup og_avx_simdfp_datamove
1639 * @opxcpttype 5
1640 */
1641FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1642{
1643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1644 if (IEM_IS_MODRM_MEM_MODE(bRm))
1645 {
1646 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1647
1648 IEM_MC_BEGIN(0, 2);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1651
1652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1653 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1654 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1655 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1656
1657 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1658 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1659
1660 IEM_MC_ADVANCE_RIP();
1661 IEM_MC_END();
1662 return VINF_SUCCESS;
1663 }
1664
1665 /**
1666 * @opdone
1667 * @opmnemonic udvex0f17m3
1668 * @opcode 0x17
1669 * @opcodesub 11 mr/reg
1670 * @oppfx none
1671 * @opunused immediate
1672 * @opcpuid avx
1673 * @optest ->
1674 */
1675 return IEMOP_RAISE_INVALID_OPCODE();
1676}
1677
1678
1679/**
1680 * @opcode 0x17
1681 * @opcodesub !11 mr/reg
1682 * @oppfx 0x66
1683 * @opcpuid avx
1684 * @opgroup og_avx_pcksclr_datamove
1685 * @opxcpttype 5
1686 */
1687FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1688{
1689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1690 if (IEM_IS_MODRM_MEM_MODE(bRm))
1691 {
1692 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1693 IEM_MC_BEGIN(0, 2);
1694 IEM_MC_LOCAL(uint64_t, uSrc);
1695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1696
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1698 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1699 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1700 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1701
1702 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1703 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1704
1705 IEM_MC_ADVANCE_RIP();
1706 IEM_MC_END();
1707 return VINF_SUCCESS;
1708 }
1709
1710 /**
1711 * @opdone
1712 * @opmnemonic udvex660f17m3
1713 * @opcode 0x17
1714 * @opcodesub 11 mr/reg
1715 * @oppfx 0x66
1716 * @opunused immediate
1717 * @opcpuid avx
1718 * @optest ->
1719 */
1720 return IEMOP_RAISE_INVALID_OPCODE();
1721}
1722
1723
1724/* Opcode VEX.F3.0F 0x17 - invalid */
1725/* Opcode VEX.F2.0F 0x17 - invalid */
1726
1727
1728/* Opcode VEX.0F 0x18 - invalid */
1729/* Opcode VEX.0F 0x19 - invalid */
1730/* Opcode VEX.0F 0x1a - invalid */
1731/* Opcode VEX.0F 0x1b - invalid */
1732/* Opcode VEX.0F 0x1c - invalid */
1733/* Opcode VEX.0F 0x1d - invalid */
1734/* Opcode VEX.0F 0x1e - invalid */
1735/* Opcode VEX.0F 0x1f - invalid */
1736
1737/* Opcode VEX.0F 0x20 - invalid */
1738/* Opcode VEX.0F 0x21 - invalid */
1739/* Opcode VEX.0F 0x22 - invalid */
1740/* Opcode VEX.0F 0x23 - invalid */
1741/* Opcode VEX.0F 0x24 - invalid */
1742/* Opcode VEX.0F 0x25 - invalid */
1743/* Opcode VEX.0F 0x26 - invalid */
1744/* Opcode VEX.0F 0x27 - invalid */
1745
1746/**
1747 * @opcode 0x28
1748 * @oppfx none
1749 * @opcpuid avx
1750 * @opgroup og_avx_pcksclr_datamove
1751 * @opxcpttype 1
1752 * @optest op1=1 op2=2 -> op1=2
1753 * @optest op1=0 op2=-42 -> op1=-42
1754 * @note Almost identical to vmovapd.
1755 */
1756FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1757{
1758 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1760 Assert(pVCpu->iem.s.uVexLength <= 1);
1761 if (IEM_IS_MODRM_REG_MODE(bRm))
1762 {
1763 /*
1764 * Register, register.
1765 */
1766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1767 IEM_MC_BEGIN(1, 0);
1768
1769 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1770 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1771 if (pVCpu->iem.s.uVexLength == 0)
1772 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1773 IEM_GET_MODRM_RM(pVCpu, bRm));
1774 else
1775 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1776 IEM_GET_MODRM_RM(pVCpu, bRm));
1777 IEM_MC_ADVANCE_RIP();
1778 IEM_MC_END();
1779 }
1780 else
1781 {
1782 /*
1783 * Register, memory.
1784 */
1785 if (pVCpu->iem.s.uVexLength == 0)
1786 {
1787 IEM_MC_BEGIN(0, 2);
1788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1789 IEM_MC_LOCAL(RTUINT128U, uSrc);
1790
1791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1792 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1794 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1795
1796 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1797 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1798
1799 IEM_MC_ADVANCE_RIP();
1800 IEM_MC_END();
1801 }
1802 else
1803 {
1804 IEM_MC_BEGIN(0, 2);
1805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1806 IEM_MC_LOCAL(RTUINT256U, uSrc);
1807
1808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1812
1813 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1814 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1815
1816 IEM_MC_ADVANCE_RIP();
1817 IEM_MC_END();
1818 }
1819 }
1820 return VINF_SUCCESS;
1821}
1822
1823
1824/**
1825 * @opcode 0x28
1826 * @oppfx 66
1827 * @opcpuid avx
1828 * @opgroup og_avx_pcksclr_datamove
1829 * @opxcpttype 1
1830 * @optest op1=1 op2=2 -> op1=2
1831 * @optest op1=0 op2=-42 -> op1=-42
1832 * @note Almost identical to vmovaps
1833 */
1834FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1835{
1836 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1838 Assert(pVCpu->iem.s.uVexLength <= 1);
1839 if (IEM_IS_MODRM_REG_MODE(bRm))
1840 {
1841 /*
1842 * Register, register.
1843 */
1844 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1845 IEM_MC_BEGIN(1, 0);
1846
1847 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1848 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1849 if (pVCpu->iem.s.uVexLength == 0)
1850 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1851 IEM_GET_MODRM_RM(pVCpu, bRm));
1852 else
1853 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1854 IEM_GET_MODRM_RM(pVCpu, bRm));
1855 IEM_MC_ADVANCE_RIP();
1856 IEM_MC_END();
1857 }
1858 else
1859 {
1860 /*
1861 * Register, memory.
1862 */
1863 if (pVCpu->iem.s.uVexLength == 0)
1864 {
1865 IEM_MC_BEGIN(0, 2);
1866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1867 IEM_MC_LOCAL(RTUINT128U, uSrc);
1868
1869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1870 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1871 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1872 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1873
1874 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1875 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1876
1877 IEM_MC_ADVANCE_RIP();
1878 IEM_MC_END();
1879 }
1880 else
1881 {
1882 IEM_MC_BEGIN(0, 2);
1883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1884 IEM_MC_LOCAL(RTUINT256U, uSrc);
1885
1886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1887 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1888 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1889 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1890
1891 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1892 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1893
1894 IEM_MC_ADVANCE_RIP();
1895 IEM_MC_END();
1896 }
1897 }
1898 return VINF_SUCCESS;
1899}
1900
1901/**
1902 * @opmnemonic udvexf30f28
1903 * @opcode 0x28
1904 * @oppfx 0xf3
1905 * @opunused vex.modrm
1906 * @opcpuid avx
1907 * @optest ->
1908 * @opdone
1909 */
1910
1911/**
1912 * @opmnemonic udvexf20f28
1913 * @opcode 0x28
1914 * @oppfx 0xf2
1915 * @opunused vex.modrm
1916 * @opcpuid avx
1917 * @optest ->
1918 * @opdone
1919 */
1920
1921/**
1922 * @opcode 0x29
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_pcksclr_datamove
1926 * @opxcpttype 1
1927 * @optest op1=1 op2=2 -> op1=2
1928 * @optest op1=0 op2=-42 -> op1=-42
1929 * @note Almost identical to vmovapd.
1930 */
1931FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1932{
1933 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 Assert(pVCpu->iem.s.uVexLength <= 1);
1936 if (IEM_IS_MODRM_REG_MODE(bRm))
1937 {
1938 /*
1939 * Register, register.
1940 */
1941 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1942 IEM_MC_BEGIN(1, 0);
1943
1944 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1945 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1946 if (pVCpu->iem.s.uVexLength == 0)
1947 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1948 IEM_GET_MODRM_REG(pVCpu, bRm));
1949 else
1950 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1951 IEM_GET_MODRM_REG(pVCpu, bRm));
1952 IEM_MC_ADVANCE_RIP();
1953 IEM_MC_END();
1954 }
1955 else
1956 {
1957 /*
1958 * Register, memory.
1959 */
1960 if (pVCpu->iem.s.uVexLength == 0)
1961 {
1962 IEM_MC_BEGIN(0, 2);
1963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1964 IEM_MC_LOCAL(RTUINT128U, uSrc);
1965
1966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1967 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1968 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1969 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1970
1971 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1972 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1973
1974 IEM_MC_ADVANCE_RIP();
1975 IEM_MC_END();
1976 }
1977 else
1978 {
1979 IEM_MC_BEGIN(0, 2);
1980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1981 IEM_MC_LOCAL(RTUINT256U, uSrc);
1982
1983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1984 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1986 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1987
1988 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1989 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1990
1991 IEM_MC_ADVANCE_RIP();
1992 IEM_MC_END();
1993 }
1994 }
1995 return VINF_SUCCESS;
1996}
1997
1998/**
1999 * @opcode 0x29
2000 * @oppfx 66
2001 * @opcpuid avx
2002 * @opgroup og_avx_pcksclr_datamove
2003 * @opxcpttype 1
2004 * @optest op1=1 op2=2 -> op1=2
2005 * @optest op1=0 op2=-42 -> op1=-42
2006 * @note Almost identical to vmovaps
2007 */
2008FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2009{
2010 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2011 Assert(pVCpu->iem.s.uVexLength <= 1);
2012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2013 if (IEM_IS_MODRM_REG_MODE(bRm))
2014 {
2015 /*
2016 * Register, register.
2017 */
2018 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2019 IEM_MC_BEGIN(1, 0);
2020
2021 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2022 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2023 if (pVCpu->iem.s.uVexLength == 0)
2024 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2025 IEM_GET_MODRM_REG(pVCpu, bRm));
2026 else
2027 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2028 IEM_GET_MODRM_REG(pVCpu, bRm));
2029 IEM_MC_ADVANCE_RIP();
2030 IEM_MC_END();
2031 }
2032 else
2033 {
2034 /*
2035 * Register, memory.
2036 */
2037 if (pVCpu->iem.s.uVexLength == 0)
2038 {
2039 IEM_MC_BEGIN(0, 2);
2040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2041 IEM_MC_LOCAL(RTUINT128U, uSrc);
2042
2043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2044 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2045 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2046 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2047
2048 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2049 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2050
2051 IEM_MC_ADVANCE_RIP();
2052 IEM_MC_END();
2053 }
2054 else
2055 {
2056 IEM_MC_BEGIN(0, 2);
2057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2058 IEM_MC_LOCAL(RTUINT256U, uSrc);
2059
2060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2061 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2062 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2063 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2064
2065 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2066 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2067
2068 IEM_MC_ADVANCE_RIP();
2069 IEM_MC_END();
2070 }
2071 }
2072 return VINF_SUCCESS;
2073}
2074
2075
2076/**
2077 * @opmnemonic udvexf30f29
2078 * @opcode 0x29
2079 * @oppfx 0xf3
2080 * @opunused vex.modrm
2081 * @opcpuid avx
2082 * @optest ->
2083 * @opdone
2084 */
2085
2086/**
2087 * @opmnemonic udvexf20f29
2088 * @opcode 0x29
2089 * @oppfx 0xf2
2090 * @opunused vex.modrm
2091 * @opcpuid avx
2092 * @optest ->
2093 * @opdone
2094 */
2095
2096
2097/** Opcode VEX.0F 0x2a - invalid */
2098/** Opcode VEX.66.0F 0x2a - invalid */
2099/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2100FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2101/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2102FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2103
2104
2105/**
2106 * @opcode 0x2b
2107 * @opcodesub !11 mr/reg
2108 * @oppfx none
2109 * @opcpuid avx
2110 * @opgroup og_avx_cachect
2111 * @opxcpttype 1
2112 * @optest op1=1 op2=2 -> op1=2
2113 * @optest op1=0 op2=-42 -> op1=-42
2114 * @note Identical implementation to vmovntpd
2115 */
2116FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2117{
2118 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2119 Assert(pVCpu->iem.s.uVexLength <= 1);
2120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2121 if (IEM_IS_MODRM_MEM_MODE(bRm))
2122 {
2123 /*
2124 * memory, register.
2125 */
2126 if (pVCpu->iem.s.uVexLength == 0)
2127 {
2128 IEM_MC_BEGIN(0, 2);
2129 IEM_MC_LOCAL(RTUINT128U, uSrc);
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2131
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2133 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136
2137 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2138 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2139
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 else
2144 {
2145 IEM_MC_BEGIN(0, 2);
2146 IEM_MC_LOCAL(RTUINT256U, uSrc);
2147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2148
2149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2150 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2151 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2152 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2153
2154 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2155 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2156
2157 IEM_MC_ADVANCE_RIP();
2158 IEM_MC_END();
2159 }
2160 }
2161 /* The register, register encoding is invalid. */
2162 else
2163 return IEMOP_RAISE_INVALID_OPCODE();
2164 return VINF_SUCCESS;
2165}
2166
2167/**
2168 * @opcode 0x2b
2169 * @opcodesub !11 mr/reg
2170 * @oppfx 0x66
2171 * @opcpuid avx
2172 * @opgroup og_avx_cachect
2173 * @opxcpttype 1
2174 * @optest op1=1 op2=2 -> op1=2
2175 * @optest op1=0 op2=-42 -> op1=-42
2176 * @note Identical implementation to vmovntps
2177 */
2178FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2179{
2180 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2181 Assert(pVCpu->iem.s.uVexLength <= 1);
2182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2183 if (IEM_IS_MODRM_MEM_MODE(bRm))
2184 {
2185 /*
2186 * memory, register.
2187 */
2188 if (pVCpu->iem.s.uVexLength == 0)
2189 {
2190 IEM_MC_BEGIN(0, 2);
2191 IEM_MC_LOCAL(RTUINT128U, uSrc);
2192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2193
2194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2195 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2196 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2197 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2198
2199 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2200 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2201
2202 IEM_MC_ADVANCE_RIP();
2203 IEM_MC_END();
2204 }
2205 else
2206 {
2207 IEM_MC_BEGIN(0, 2);
2208 IEM_MC_LOCAL(RTUINT256U, uSrc);
2209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2210
2211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2212 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2213 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2214 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2215
2216 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2217 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2218
2219 IEM_MC_ADVANCE_RIP();
2220 IEM_MC_END();
2221 }
2222 }
2223 /* The register, register encoding is invalid. */
2224 else
2225 return IEMOP_RAISE_INVALID_OPCODE();
2226 return VINF_SUCCESS;
2227}
2228
2229/**
2230 * @opmnemonic udvexf30f2b
2231 * @opcode 0x2b
2232 * @oppfx 0xf3
2233 * @opunused vex.modrm
2234 * @opcpuid avx
2235 * @optest ->
2236 * @opdone
2237 */
2238
2239/**
2240 * @opmnemonic udvexf20f2b
2241 * @opcode 0x2b
2242 * @oppfx 0xf2
2243 * @opunused vex.modrm
2244 * @opcpuid avx
2245 * @optest ->
2246 * @opdone
2247 */
2248
2249
2250/* Opcode VEX.0F 0x2c - invalid */
2251/* Opcode VEX.66.0F 0x2c - invalid */
2252/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2253FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2254/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2255FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2256
2257/* Opcode VEX.0F 0x2d - invalid */
2258/* Opcode VEX.66.0F 0x2d - invalid */
2259/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2260FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2261/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2262FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2263
2264/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2265FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
2266/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2267FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
2268/* Opcode VEX.F3.0F 0x2e - invalid */
2269/* Opcode VEX.F2.0F 0x2e - invalid */
2270
2271/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2272FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2273/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2274FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2275/* Opcode VEX.F3.0F 0x2f - invalid */
2276/* Opcode VEX.F2.0F 0x2f - invalid */
2277
2278/* Opcode VEX.0F 0x30 - invalid */
2279/* Opcode VEX.0F 0x31 - invalid */
2280/* Opcode VEX.0F 0x32 - invalid */
2281/* Opcode VEX.0F 0x33 - invalid */
2282/* Opcode VEX.0F 0x34 - invalid */
2283/* Opcode VEX.0F 0x35 - invalid */
2284/* Opcode VEX.0F 0x36 - invalid */
2285/* Opcode VEX.0F 0x37 - invalid */
2286/* Opcode VEX.0F 0x38 - invalid */
2287/* Opcode VEX.0F 0x39 - invalid */
2288/* Opcode VEX.0F 0x3a - invalid */
2289/* Opcode VEX.0F 0x3b - invalid */
2290/* Opcode VEX.0F 0x3c - invalid */
2291/* Opcode VEX.0F 0x3d - invalid */
2292/* Opcode VEX.0F 0x3e - invalid */
2293/* Opcode VEX.0F 0x3f - invalid */
2294/* Opcode VEX.0F 0x40 - invalid */
2295/* Opcode VEX.0F 0x41 - invalid */
2296/* Opcode VEX.0F 0x42 - invalid */
2297/* Opcode VEX.0F 0x43 - invalid */
2298/* Opcode VEX.0F 0x44 - invalid */
2299/* Opcode VEX.0F 0x45 - invalid */
2300/* Opcode VEX.0F 0x46 - invalid */
2301/* Opcode VEX.0F 0x47 - invalid */
2302/* Opcode VEX.0F 0x48 - invalid */
2303/* Opcode VEX.0F 0x49 - invalid */
2304/* Opcode VEX.0F 0x4a - invalid */
2305/* Opcode VEX.0F 0x4b - invalid */
2306/* Opcode VEX.0F 0x4c - invalid */
2307/* Opcode VEX.0F 0x4d - invalid */
2308/* Opcode VEX.0F 0x4e - invalid */
2309/* Opcode VEX.0F 0x4f - invalid */
2310
2311/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2312FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2313/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2314FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2315/* Opcode VEX.F3.0F 0x50 - invalid */
2316/* Opcode VEX.F2.0F 0x50 - invalid */
2317
2318/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2319FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2320/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2321FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2322/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2323FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2324/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2325FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2326
2327/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2328FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2329/* Opcode VEX.66.0F 0x52 - invalid */
2330/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2331FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2332/* Opcode VEX.F2.0F 0x52 - invalid */
2333
2334/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2335FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2336/* Opcode VEX.66.0F 0x53 - invalid */
2337/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2338FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2339/* Opcode VEX.F2.0F 0x53 - invalid */
2340
2341
2342/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2343FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2344{
2345 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2346 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2347 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2348}
2349
2350
2351/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2352FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2353{
2354 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2355 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2356 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2357}
2358
2359
2360/* Opcode VEX.F3.0F 0x54 - invalid */
2361/* Opcode VEX.F2.0F 0x54 - invalid */
2362
2363
2364/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2365FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2366{
2367 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2368 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2369 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2370}
2371
2372
2373/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2374FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2375{
2376 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2377 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2378 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2379}
2380
2381
2382/* Opcode VEX.F3.0F 0x55 - invalid */
2383/* Opcode VEX.F2.0F 0x55 - invalid */
2384
2385/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2386FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2387{
2388 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2389 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2390 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2391}
2392
2393
2394/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2395FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2396{
2397 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2398 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2399 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2400}
2401
2402
2403/* Opcode VEX.F3.0F 0x56 - invalid */
2404/* Opcode VEX.F2.0F 0x56 - invalid */
2405
2406
2407/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2408FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2409{
2410 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2411 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2412 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2413}
2414
2415
2416/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2417FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2418{
2419 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2420 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2421 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2422}
2423
2424
2425/* Opcode VEX.F3.0F 0x57 - invalid */
2426/* Opcode VEX.F2.0F 0x57 - invalid */
2427
2428/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2429FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2430/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2431FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2432/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2433FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2434/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2435FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2436
2437/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2438FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2439/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2440FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2441/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2442FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2443/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2444FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2445
2446/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2447FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2448/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2449FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2450/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2451FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2452/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2453FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2454
2455/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2456FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2457/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2458FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2459/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2460FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2461/* Opcode VEX.F2.0F 0x5b - invalid */
2462
2463/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2464FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2465/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2466FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2467/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2468FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2469/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2470FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2471
2472/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2473FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2474/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2475FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2476/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2477FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2478/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2479FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2480
2481/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2482FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2483/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2484FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2485/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2486FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2487/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2488FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2489
2490/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2491FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2492/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2493FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2494/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2495FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2496/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2497FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2498
2499
2500/* Opcode VEX.0F 0x60 - invalid */
2501
2502
2503/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2504FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2505{
2506 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2507 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2509}
2510
2511
2512/* Opcode VEX.F3.0F 0x60 - invalid */
2513
2514
2515/* Opcode VEX.0F 0x61 - invalid */
2516
2517
2518/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2519FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2520{
2521 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2522 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2523 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2524}
2525
2526
2527/* Opcode VEX.F3.0F 0x61 - invalid */
2528
2529
2530/* Opcode VEX.0F 0x62 - invalid */
2531
2532/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2533FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2534{
2535 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2536 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2537 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2538}
2539
2540
2541/* Opcode VEX.F3.0F 0x62 - invalid */
2542
2543
2544
2545/* Opcode VEX.0F 0x63 - invalid */
2546
2547
2548/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2549FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2550{
2551 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2552 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2553 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2554}
2555
2556
2557/* Opcode VEX.F3.0F 0x63 - invalid */
2558
2559/* Opcode VEX.0F 0x64 - invalid */
2560
2561
2562/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2563FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2564{
2565 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2566 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2567 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2568}
2569
2570
2571/* Opcode VEX.F3.0F 0x64 - invalid */
2572
2573/* Opcode VEX.0F 0x65 - invalid */
2574
2575
2576/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2577FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2578{
2579 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2580 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2581 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2582}
2583
2584
2585/* Opcode VEX.F3.0F 0x65 - invalid */
2586
2587/* Opcode VEX.0F 0x66 - invalid */
2588
2589
2590/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2591FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2592{
2593 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2594 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
2595 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2596}
2597
2598
2599/* Opcode VEX.F3.0F 0x66 - invalid */
2600
2601/* Opcode VEX.0F 0x67 - invalid */
2602
2603
2604/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2605FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
2606{
2607 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2608 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
2609 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2610}
2611
2612
2613/* Opcode VEX.F3.0F 0x67 - invalid */
2614
2615
2616///**
2617// * Common worker for SSE2 instructions on the form:
2618// * pxxxx xmm1, xmm2/mem128
2619// *
2620// * The 2nd operand is the second half of a register, which in the memory case
2621// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2622// * where it may read the full 128 bits or only the upper 64 bits.
2623// *
2624// * Exceptions type 4.
2625// */
2626//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2627//{
2628// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2629// if (IEM_IS_MODRM_REG_MODE(bRm))
2630// {
2631// /*
2632// * Register, register.
2633// */
2634// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635// IEM_MC_BEGIN(2, 0);
2636// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2637// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2638// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2639// IEM_MC_PREPARE_SSE_USAGE();
2640// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2641// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2642// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2643// IEM_MC_ADVANCE_RIP();
2644// IEM_MC_END();
2645// }
2646// else
2647// {
2648// /*
2649// * Register, memory.
2650// */
2651// IEM_MC_BEGIN(2, 2);
2652// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2653// IEM_MC_LOCAL(RTUINT128U, uSrc);
2654// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2655// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2656//
2657// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2658// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2659// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2660// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2661//
2662// IEM_MC_PREPARE_SSE_USAGE();
2663// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2664// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2665//
2666// IEM_MC_ADVANCE_RIP();
2667// IEM_MC_END();
2668// }
2669// return VINF_SUCCESS;
2670//}
2671
2672
2673/* Opcode VEX.0F 0x68 - invalid */
2674
2675/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
2676FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2677{
2678 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2679 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
2680 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2681}
2682
2683
2684/* Opcode VEX.F3.0F 0x68 - invalid */
2685
2686
2687/* Opcode VEX.0F 0x69 - invalid */
2688
2689
2690/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
2691FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2692{
2693 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2694 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
2695 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2696}
2697
2698
2699/* Opcode VEX.F3.0F 0x69 - invalid */
2700
2701
2702/* Opcode VEX.0F 0x6a - invalid */
2703
2704
2705/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
2706FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2707{
2708 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2709 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
2710 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2711}
2712
2713
2714/* Opcode VEX.F3.0F 0x6a - invalid */
2715
2716
2717/* Opcode VEX.0F 0x6b - invalid */
2718
2719
2720/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
2721FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
2722{
2723 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2724 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
2725 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2726}
2727
2728
2729/* Opcode VEX.F3.0F 0x6b - invalid */
2730
2731
2732/* Opcode VEX.0F 0x6c - invalid */
2733
2734
2735/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
2736FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2737{
2738 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2739 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
2740 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2741}
2742
2743
2744/* Opcode VEX.F3.0F 0x6c - invalid */
2745/* Opcode VEX.F2.0F 0x6c - invalid */
2746
2747
2748/* Opcode VEX.0F 0x6d - invalid */
2749
2750
2751/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
2752FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2753{
2754 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2755 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
2756 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2757}
2758
2759
2760/* Opcode VEX.F3.0F 0x6d - invalid */
2761
2762
2763/* Opcode VEX.0F 0x6e - invalid */
2764
2765FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2766{
2767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2768 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2769 {
2770 /**
2771 * @opcode 0x6e
2772 * @opcodesub rex.w=1
2773 * @oppfx 0x66
2774 * @opcpuid avx
2775 * @opgroup og_avx_simdint_datamov
2776 * @opxcpttype 5
2777 * @optest 64-bit / op1=1 op2=2 -> op1=2
2778 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2779 */
2780 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2781 if (IEM_IS_MODRM_REG_MODE(bRm))
2782 {
2783 /* XMM, greg64 */
2784 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2785 IEM_MC_BEGIN(0, 1);
2786 IEM_MC_LOCAL(uint64_t, u64Tmp);
2787
2788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2790
2791 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2792 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2793
2794 IEM_MC_ADVANCE_RIP();
2795 IEM_MC_END();
2796 }
2797 else
2798 {
2799 /* XMM, [mem64] */
2800 IEM_MC_BEGIN(0, 2);
2801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2802 IEM_MC_LOCAL(uint64_t, u64Tmp);
2803
2804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2805 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2806 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2807 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2808
2809 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2810 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2811
2812 IEM_MC_ADVANCE_RIP();
2813 IEM_MC_END();
2814 }
2815 }
2816 else
2817 {
2818 /**
2819 * @opdone
2820 * @opcode 0x6e
2821 * @opcodesub rex.w=0
2822 * @oppfx 0x66
2823 * @opcpuid avx
2824 * @opgroup og_avx_simdint_datamov
2825 * @opxcpttype 5
2826 * @opfunction iemOp_vmovd_q_Vy_Ey
2827 * @optest op1=1 op2=2 -> op1=2
2828 * @optest op1=0 op2=-42 -> op1=-42
2829 */
2830 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2831 if (IEM_IS_MODRM_REG_MODE(bRm))
2832 {
2833 /* XMM, greg32 */
2834 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2835 IEM_MC_BEGIN(0, 1);
2836 IEM_MC_LOCAL(uint32_t, u32Tmp);
2837
2838 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2839 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2840
2841 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2842 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2843
2844 IEM_MC_ADVANCE_RIP();
2845 IEM_MC_END();
2846 }
2847 else
2848 {
2849 /* XMM, [mem32] */
2850 IEM_MC_BEGIN(0, 2);
2851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2852 IEM_MC_LOCAL(uint32_t, u32Tmp);
2853
2854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2855 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2856 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2857 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2858
2859 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2860 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2861
2862 IEM_MC_ADVANCE_RIP();
2863 IEM_MC_END();
2864 }
2865 }
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/* Opcode VEX.F3.0F 0x6e - invalid */
2871
2872
2873/* Opcode VEX.0F 0x6f - invalid */
2874
2875/**
2876 * @opcode 0x6f
2877 * @oppfx 0x66
2878 * @opcpuid avx
2879 * @opgroup og_avx_simdint_datamove
2880 * @opxcpttype 1
2881 * @optest op1=1 op2=2 -> op1=2
2882 * @optest op1=0 op2=-42 -> op1=-42
2883 */
2884FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2885{
2886 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2887 Assert(pVCpu->iem.s.uVexLength <= 1);
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if (IEM_IS_MODRM_REG_MODE(bRm))
2890 {
2891 /*
2892 * Register, register.
2893 */
2894 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2895 IEM_MC_BEGIN(0, 0);
2896
2897 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2898 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2899 if (pVCpu->iem.s.uVexLength == 0)
2900 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2901 IEM_GET_MODRM_RM(pVCpu, bRm));
2902 else
2903 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2904 IEM_GET_MODRM_RM(pVCpu, bRm));
2905 IEM_MC_ADVANCE_RIP();
2906 IEM_MC_END();
2907 }
2908 else if (pVCpu->iem.s.uVexLength == 0)
2909 {
2910 /*
2911 * Register, memory128.
2912 */
2913 IEM_MC_BEGIN(0, 2);
2914 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2916
2917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2918 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2919 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2920 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2921
2922 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2923 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
2924
2925 IEM_MC_ADVANCE_RIP();
2926 IEM_MC_END();
2927 }
2928 else
2929 {
2930 /*
2931 * Register, memory256.
2932 */
2933 IEM_MC_BEGIN(0, 2);
2934 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2936
2937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2938 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2939 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2940 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2941
2942 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2943 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
2944
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 return VINF_SUCCESS;
2949}
2950
2951/**
2952 * @opcode 0x6f
2953 * @oppfx 0xf3
2954 * @opcpuid avx
2955 * @opgroup og_avx_simdint_datamove
2956 * @opxcpttype 4UA
2957 * @optest op1=1 op2=2 -> op1=2
2958 * @optest op1=0 op2=-42 -> op1=-42
2959 */
2960FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2961{
2962 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2963 Assert(pVCpu->iem.s.uVexLength <= 1);
2964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2965 if (IEM_IS_MODRM_REG_MODE(bRm))
2966 {
2967 /*
2968 * Register, register.
2969 */
2970 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2971 IEM_MC_BEGIN(0, 0);
2972
2973 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2974 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2975 if (pVCpu->iem.s.uVexLength == 0)
2976 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2977 IEM_GET_MODRM_RM(pVCpu, bRm));
2978 else
2979 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2980 IEM_GET_MODRM_RM(pVCpu, bRm));
2981 IEM_MC_ADVANCE_RIP();
2982 IEM_MC_END();
2983 }
2984 else if (pVCpu->iem.s.uVexLength == 0)
2985 {
2986 /*
2987 * Register, memory128.
2988 */
2989 IEM_MC_BEGIN(0, 2);
2990 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2992
2993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2994 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2995 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2996 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2997
2998 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2999 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3000
3001 IEM_MC_ADVANCE_RIP();
3002 IEM_MC_END();
3003 }
3004 else
3005 {
3006 /*
3007 * Register, memory256.
3008 */
3009 IEM_MC_BEGIN(0, 2);
3010 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3012
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3014 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3015 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3016 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3017
3018 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3019 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3020
3021 IEM_MC_ADVANCE_RIP();
3022 IEM_MC_END();
3023 }
3024 return VINF_SUCCESS;
3025}
3026
3027
3028/* Opcode VEX.0F 0x70 - invalid */
3029
3030
3031/**
3032 * Common worker for AVX/AVX2 instructions on the forms:
3033 * - vpxxx xmm0, xmm2/mem128, imm8
3034 * - vpxxx ymm0, ymm2/mem256, imm8
3035 *
3036 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3037 */
3038FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3039{
3040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041 if (IEM_IS_MODRM_REG_MODE(bRm))
3042 {
3043 /*
3044 * Register, register.
3045 */
3046 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3047 if (pVCpu->iem.s.uVexLength)
3048 {
3049 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3050 IEM_MC_BEGIN(3, 2);
3051 IEM_MC_LOCAL(RTUINT256U, uDst);
3052 IEM_MC_LOCAL(RTUINT256U, uSrc);
3053 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3054 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3055 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3056 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3057 IEM_MC_PREPARE_AVX_USAGE();
3058 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3059 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3060 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3061 IEM_MC_ADVANCE_RIP();
3062 IEM_MC_END();
3063 }
3064 else
3065 {
3066 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3067 IEM_MC_BEGIN(3, 0);
3068 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3069 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3070 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3071 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3072 IEM_MC_PREPARE_AVX_USAGE();
3073 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3074 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3075 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3076 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3077 IEM_MC_ADVANCE_RIP();
3078 IEM_MC_END();
3079 }
3080 }
3081 else
3082 {
3083 /*
3084 * Register, memory.
3085 */
3086 if (pVCpu->iem.s.uVexLength)
3087 {
3088 IEM_MC_BEGIN(3, 3);
3089 IEM_MC_LOCAL(RTUINT256U, uDst);
3090 IEM_MC_LOCAL(RTUINT256U, uSrc);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3093 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3094
3095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3096 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3098 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3099 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3100 IEM_MC_PREPARE_AVX_USAGE();
3101
3102 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3103 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3104 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3105
3106 IEM_MC_ADVANCE_RIP();
3107 IEM_MC_END();
3108 }
3109 else
3110 {
3111 IEM_MC_BEGIN(3, 1);
3112 IEM_MC_LOCAL(RTUINT128U, uSrc);
3113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3114 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3115 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3116
3117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3118 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3119 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3120 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3121 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3122 IEM_MC_PREPARE_AVX_USAGE();
3123
3124 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3125 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3126 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3127 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3128
3129 IEM_MC_ADVANCE_RIP();
3130 IEM_MC_END();
3131 }
3132 }
3133 return VINF_SUCCESS;
3134}
3135
3136
3137/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3138FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3139{
3140 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3141 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3142 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3143
3144}
3145
3146
3147/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3148FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3149{
3150 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3151 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3152 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3153
3154}
3155
3156
3157/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3158FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3159{
3160 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3161 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3162 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3163}
3164
3165
3166/* Opcode VEX.0F 0x71 11/2 - invalid. */
3167/** Opcode VEX.66.0F 0x71 11/2. */
3168FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3169
3170/* Opcode VEX.0F 0x71 11/4 - invalid */
3171/** Opcode VEX.66.0F 0x71 11/4. */
3172FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3173
3174/* Opcode VEX.0F 0x71 11/6 - invalid */
3175/** Opcode VEX.66.0F 0x71 11/6. */
3176FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3177
3178
3179/**
3180 * VEX Group 12 jump table for register variant.
3181 */
3182IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3183{
3184 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3185 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3186 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3187 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3188 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3189 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3190 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3191 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3192};
3193AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3194
3195
3196/** Opcode VEX.0F 0x71. */
3197FNIEMOP_DEF(iemOp_VGrp12)
3198{
3199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3200 if (IEM_IS_MODRM_REG_MODE(bRm))
3201 /* register, register */
3202 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3203 + pVCpu->iem.s.idxPrefix], bRm);
3204 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3205}
3206
3207
3208/* Opcode VEX.0F 0x72 11/2 - invalid. */
3209/** Opcode VEX.66.0F 0x72 11/2. */
3210FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3211
3212/* Opcode VEX.0F 0x72 11/4 - invalid. */
3213/** Opcode VEX.66.0F 0x72 11/4. */
3214FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3215
3216/* Opcode VEX.0F 0x72 11/6 - invalid. */
3217/** Opcode VEX.66.0F 0x72 11/6. */
3218FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3219
3220
3221/**
3222 * Group 13 jump table for register variant.
3223 */
3224IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3225{
3226 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3227 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3228 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3229 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3230 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3231 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3232 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3233 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3234};
3235AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3236
3237/** Opcode VEX.0F 0x72. */
3238FNIEMOP_DEF(iemOp_VGrp13)
3239{
3240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3241 if (IEM_IS_MODRM_REG_MODE(bRm))
3242 /* register, register */
3243 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3244 + pVCpu->iem.s.idxPrefix], bRm);
3245 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3246}
3247
3248
3249/* Opcode VEX.0F 0x73 11/2 - invalid. */
3250/** Opcode VEX.66.0F 0x73 11/2. */
3251FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3252
3253/** Opcode VEX.66.0F 0x73 11/3. */
3254FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3255
3256/* Opcode VEX.0F 0x73 11/6 - invalid. */
3257/** Opcode VEX.66.0F 0x73 11/6. */
3258FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3259
3260/** Opcode VEX.66.0F 0x73 11/7. */
3261FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3262
3263/**
3264 * Group 14 jump table for register variant.
3265 */
3266IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3267{
3268 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3269 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3270 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3271 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3272 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3273 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3274 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3275 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3276};
3277AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3278
3279
3280/** Opcode VEX.0F 0x73. */
3281FNIEMOP_DEF(iemOp_VGrp14)
3282{
3283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3284 if (IEM_IS_MODRM_REG_MODE(bRm))
3285 /* register, register */
3286 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3287 + pVCpu->iem.s.idxPrefix], bRm);
3288 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3289}
3290
3291
3292/* Opcode VEX.0F 0x74 - invalid */
3293
3294
3295/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3296FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3297{
3298 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3299 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3300 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3301}
3302
3303/* Opcode VEX.F3.0F 0x74 - invalid */
3304/* Opcode VEX.F2.0F 0x74 - invalid */
3305
3306
3307/* Opcode VEX.0F 0x75 - invalid */
3308
3309
3310/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3311FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3312{
3313 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3314 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3315 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3316}
3317
3318
3319/* Opcode VEX.F3.0F 0x75 - invalid */
3320/* Opcode VEX.F2.0F 0x75 - invalid */
3321
3322
3323/* Opcode VEX.0F 0x76 - invalid */
3324
3325
3326/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3327FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3328{
3329 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3330 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3331 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3332}
3333
3334
3335/* Opcode VEX.F3.0F 0x76 - invalid */
3336/* Opcode VEX.F2.0F 0x76 - invalid */
3337
3338
3339/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3340FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
3341/* Opcode VEX.66.0F 0x77 - invalid */
3342/* Opcode VEX.F3.0F 0x77 - invalid */
3343/* Opcode VEX.F2.0F 0x77 - invalid */
3344
3345/* Opcode VEX.0F 0x78 - invalid */
3346/* Opcode VEX.66.0F 0x78 - invalid */
3347/* Opcode VEX.F3.0F 0x78 - invalid */
3348/* Opcode VEX.F2.0F 0x78 - invalid */
3349
3350/* Opcode VEX.0F 0x79 - invalid */
3351/* Opcode VEX.66.0F 0x79 - invalid */
3352/* Opcode VEX.F3.0F 0x79 - invalid */
3353/* Opcode VEX.F2.0F 0x79 - invalid */
3354
3355/* Opcode VEX.0F 0x7a - invalid */
3356/* Opcode VEX.66.0F 0x7a - invalid */
3357/* Opcode VEX.F3.0F 0x7a - invalid */
3358/* Opcode VEX.F2.0F 0x7a - invalid */
3359
3360/* Opcode VEX.0F 0x7b - invalid */
3361/* Opcode VEX.66.0F 0x7b - invalid */
3362/* Opcode VEX.F3.0F 0x7b - invalid */
3363/* Opcode VEX.F2.0F 0x7b - invalid */
3364
3365/* Opcode VEX.0F 0x7c - invalid */
3366/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3367FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3368/* Opcode VEX.F3.0F 0x7c - invalid */
3369/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3370FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3371
3372/* Opcode VEX.0F 0x7d - invalid */
3373/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3374FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3375/* Opcode VEX.F3.0F 0x7d - invalid */
3376/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3377FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3378
3379
3380/* Opcode VEX.0F 0x7e - invalid */
3381
3382FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3383{
3384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3385 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3386 {
3387 /**
3388 * @opcode 0x7e
3389 * @opcodesub rex.w=1
3390 * @oppfx 0x66
3391 * @opcpuid avx
3392 * @opgroup og_avx_simdint_datamov
3393 * @opxcpttype 5
3394 * @optest 64-bit / op1=1 op2=2 -> op1=2
3395 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3396 */
3397 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3398 if (IEM_IS_MODRM_REG_MODE(bRm))
3399 {
3400 /* greg64, XMM */
3401 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3402 IEM_MC_BEGIN(0, 1);
3403 IEM_MC_LOCAL(uint64_t, u64Tmp);
3404
3405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3407
3408 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3409 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3410
3411 IEM_MC_ADVANCE_RIP();
3412 IEM_MC_END();
3413 }
3414 else
3415 {
3416 /* [mem64], XMM */
3417 IEM_MC_BEGIN(0, 2);
3418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3419 IEM_MC_LOCAL(uint64_t, u64Tmp);
3420
3421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3422 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3423 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3424 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3425
3426 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3427 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3428
3429 IEM_MC_ADVANCE_RIP();
3430 IEM_MC_END();
3431 }
3432 }
3433 else
3434 {
3435 /**
3436 * @opdone
3437 * @opcode 0x7e
3438 * @opcodesub rex.w=0
3439 * @oppfx 0x66
3440 * @opcpuid avx
3441 * @opgroup og_avx_simdint_datamov
3442 * @opxcpttype 5
3443 * @opfunction iemOp_vmovd_q_Vy_Ey
3444 * @optest op1=1 op2=2 -> op1=2
3445 * @optest op1=0 op2=-42 -> op1=-42
3446 */
3447 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3448 if (IEM_IS_MODRM_REG_MODE(bRm))
3449 {
3450 /* greg32, XMM */
3451 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3452 IEM_MC_BEGIN(0, 1);
3453 IEM_MC_LOCAL(uint32_t, u32Tmp);
3454
3455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3456 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3457
3458 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3459 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3460
3461 IEM_MC_ADVANCE_RIP();
3462 IEM_MC_END();
3463 }
3464 else
3465 {
3466 /* [mem32], XMM */
3467 IEM_MC_BEGIN(0, 2);
3468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3469 IEM_MC_LOCAL(uint32_t, u32Tmp);
3470
3471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3472 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3473 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3474 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3475
3476 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3477 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3478
3479 IEM_MC_ADVANCE_RIP();
3480 IEM_MC_END();
3481 }
3482 }
3483 return VINF_SUCCESS;
3484}
3485
3486/**
3487 * @opcode 0x7e
3488 * @oppfx 0xf3
3489 * @opcpuid avx
3490 * @opgroup og_avx_pcksclr_datamove
3491 * @opxcpttype none
3492 * @optest op1=1 op2=2 -> op1=2
3493 * @optest op1=0 op2=-42 -> op1=-42
3494 */
3495FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3496{
3497 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3499 if (IEM_IS_MODRM_REG_MODE(bRm))
3500 {
3501 /*
3502 * Register, register.
3503 */
3504 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3505 IEM_MC_BEGIN(0, 0);
3506
3507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3508 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3509
3510 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3511 IEM_GET_MODRM_RM(pVCpu, bRm));
3512 IEM_MC_ADVANCE_RIP();
3513 IEM_MC_END();
3514 }
3515 else
3516 {
3517 /*
3518 * Memory, register.
3519 */
3520 IEM_MC_BEGIN(0, 2);
3521 IEM_MC_LOCAL(uint64_t, uSrc);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3523
3524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3525 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3526 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3527 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3528
3529 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3530 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3531
3532 IEM_MC_ADVANCE_RIP();
3533 IEM_MC_END();
3534 }
3535 return VINF_SUCCESS;
3536
3537}
3538/* Opcode VEX.F2.0F 0x7e - invalid */
3539
3540
3541/* Opcode VEX.0F 0x7f - invalid */
3542
3543/**
3544 * @opcode 0x7f
3545 * @oppfx 0x66
3546 * @opcpuid avx
3547 * @opgroup og_avx_simdint_datamove
3548 * @opxcpttype 1
3549 * @optest op1=1 op2=2 -> op1=2
3550 * @optest op1=0 op2=-42 -> op1=-42
3551 */
3552FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3553{
3554 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3555 Assert(pVCpu->iem.s.uVexLength <= 1);
3556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3557 if (IEM_IS_MODRM_REG_MODE(bRm))
3558 {
3559 /*
3560 * Register, register.
3561 */
3562 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3563 IEM_MC_BEGIN(0, 0);
3564
3565 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3567 if (pVCpu->iem.s.uVexLength == 0)
3568 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3569 IEM_GET_MODRM_REG(pVCpu, bRm));
3570 else
3571 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3572 IEM_GET_MODRM_REG(pVCpu, bRm));
3573 IEM_MC_ADVANCE_RIP();
3574 IEM_MC_END();
3575 }
3576 else if (pVCpu->iem.s.uVexLength == 0)
3577 {
3578 /*
3579 * Register, memory128.
3580 */
3581 IEM_MC_BEGIN(0, 2);
3582 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3584
3585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3586 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3589
3590 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3592
3593 IEM_MC_ADVANCE_RIP();
3594 IEM_MC_END();
3595 }
3596 else
3597 {
3598 /*
3599 * Register, memory256.
3600 */
3601 IEM_MC_BEGIN(0, 2);
3602 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3604
3605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3606 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3607 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3608 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3609
3610 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3611 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3612
3613 IEM_MC_ADVANCE_RIP();
3614 IEM_MC_END();
3615 }
3616 return VINF_SUCCESS;
3617}
3618
3619/**
3620 * @opcode 0x7f
3621 * @oppfx 0xf3
3622 * @opcpuid avx
3623 * @opgroup og_avx_simdint_datamove
3624 * @opxcpttype 4UA
3625 * @optest op1=1 op2=2 -> op1=2
3626 * @optest op1=0 op2=-42 -> op1=-42
3627 */
3628FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3629{
3630 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3631 Assert(pVCpu->iem.s.uVexLength <= 1);
3632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3633 if (IEM_IS_MODRM_REG_MODE(bRm))
3634 {
3635 /*
3636 * Register, register.
3637 */
3638 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3639 IEM_MC_BEGIN(0, 0);
3640
3641 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3642 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3643 if (pVCpu->iem.s.uVexLength == 0)
3644 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3645 IEM_GET_MODRM_REG(pVCpu, bRm));
3646 else
3647 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3648 IEM_GET_MODRM_REG(pVCpu, bRm));
3649 IEM_MC_ADVANCE_RIP();
3650 IEM_MC_END();
3651 }
3652 else if (pVCpu->iem.s.uVexLength == 0)
3653 {
3654 /*
3655 * Register, memory128.
3656 */
3657 IEM_MC_BEGIN(0, 2);
3658 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3660
3661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3662 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3663 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3664 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3665
3666 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3667 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3668
3669 IEM_MC_ADVANCE_RIP();
3670 IEM_MC_END();
3671 }
3672 else
3673 {
3674 /*
3675 * Register, memory256.
3676 */
3677 IEM_MC_BEGIN(0, 2);
3678 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3680
3681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3682 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3683 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3684 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3685
3686 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3687 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3688
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 }
3692 return VINF_SUCCESS;
3693}
3694
3695/* Opcode VEX.F2.0F 0x7f - invalid */
3696
3697
3698/* Opcode VEX.0F 0x80 - invalid */
3699/* Opcode VEX.0F 0x81 - invalid */
3700/* Opcode VEX.0F 0x82 - invalid */
3701/* Opcode VEX.0F 0x83 - invalid */
3702/* Opcode VEX.0F 0x84 - invalid */
3703/* Opcode VEX.0F 0x85 - invalid */
3704/* Opcode VEX.0F 0x86 - invalid */
3705/* Opcode VEX.0F 0x87 - invalid */
3706/* Opcode VEX.0F 0x88 - invalid */
3707/* Opcode VEX.0F 0x89 - invalid */
3708/* Opcode VEX.0F 0x8a - invalid */
3709/* Opcode VEX.0F 0x8b - invalid */
3710/* Opcode VEX.0F 0x8c - invalid */
3711/* Opcode VEX.0F 0x8d - invalid */
3712/* Opcode VEX.0F 0x8e - invalid */
3713/* Opcode VEX.0F 0x8f - invalid */
3714/* Opcode VEX.0F 0x90 - invalid */
3715/* Opcode VEX.0F 0x91 - invalid */
3716/* Opcode VEX.0F 0x92 - invalid */
3717/* Opcode VEX.0F 0x93 - invalid */
3718/* Opcode VEX.0F 0x94 - invalid */
3719/* Opcode VEX.0F 0x95 - invalid */
3720/* Opcode VEX.0F 0x96 - invalid */
3721/* Opcode VEX.0F 0x97 - invalid */
3722/* Opcode VEX.0F 0x98 - invalid */
3723/* Opcode VEX.0F 0x99 - invalid */
3724/* Opcode VEX.0F 0x9a - invalid */
3725/* Opcode VEX.0F 0x9b - invalid */
3726/* Opcode VEX.0F 0x9c - invalid */
3727/* Opcode VEX.0F 0x9d - invalid */
3728/* Opcode VEX.0F 0x9e - invalid */
3729/* Opcode VEX.0F 0x9f - invalid */
3730/* Opcode VEX.0F 0xa0 - invalid */
3731/* Opcode VEX.0F 0xa1 - invalid */
3732/* Opcode VEX.0F 0xa2 - invalid */
3733/* Opcode VEX.0F 0xa3 - invalid */
3734/* Opcode VEX.0F 0xa4 - invalid */
3735/* Opcode VEX.0F 0xa5 - invalid */
3736/* Opcode VEX.0F 0xa6 - invalid */
3737/* Opcode VEX.0F 0xa7 - invalid */
3738/* Opcode VEX.0F 0xa8 - invalid */
3739/* Opcode VEX.0F 0xa9 - invalid */
3740/* Opcode VEX.0F 0xaa - invalid */
3741/* Opcode VEX.0F 0xab - invalid */
3742/* Opcode VEX.0F 0xac - invalid */
3743/* Opcode VEX.0F 0xad - invalid */
3744
3745
3746/* Opcode VEX.0F 0xae mem/0 - invalid. */
3747/* Opcode VEX.0F 0xae mem/1 - invalid. */
3748
3749/**
3750 * @ opmaps grp15
3751 * @ opcode !11/2
3752 * @ oppfx none
3753 * @ opcpuid sse
3754 * @ opgroup og_sse_mxcsrsm
3755 * @ opxcpttype 5
3756 * @ optest op1=0 -> mxcsr=0
3757 * @ optest op1=0x2083 -> mxcsr=0x2083
3758 * @ optest op1=0xfffffffe -> value.xcpt=0xd
3759 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
3760 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
3761 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
3762 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
3763 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
3764 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3765 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3766 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3767 */
3768FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
3769//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
3770//{
3771// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3772// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
3773// return IEMOP_RAISE_INVALID_OPCODE();
3774//
3775// IEM_MC_BEGIN(2, 0);
3776// IEM_MC_ARG(uint8_t, iEffSeg, 0);
3777// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3778// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3779// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3781// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3782// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
3783// IEM_MC_END();
3784// return VINF_SUCCESS;
3785//}
3786
3787
3788/**
3789 * @opmaps vexgrp15
3790 * @opcode !11/3
3791 * @oppfx none
3792 * @opcpuid avx
3793 * @opgroup og_avx_mxcsrsm
3794 * @opxcpttype 5
3795 * @optest mxcsr=0 -> op1=0
3796 * @optest mxcsr=0x2083 -> op1=0x2083
3797 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
3798 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
3799 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
3800 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
3801 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
3802 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
3803 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
3804 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
3805 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
3806 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
3807 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3808 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
3809 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3810 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
3811 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3812 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
3813 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
3814 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
3815 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
3816 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
3817 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
3818 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
3819 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
3820 * -> value.xcpt=0x6
3821 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
3822 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
3823 * APMv4 rev 3.17 page 509.
3824 * @todo Test this instruction on AMD Ryzen.
3825 */
3826FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
3827{
3828 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3829 IEM_MC_BEGIN(2, 0);
3830 IEM_MC_ARG(uint8_t, iEffSeg, 0);
3831 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3833 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3835 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3836 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
3837 IEM_MC_END();
3838 return VINF_SUCCESS;
3839}
3840
3841/* Opcode VEX.0F 0xae mem/4 - invalid. */
3842/* Opcode VEX.0F 0xae mem/5 - invalid. */
3843/* Opcode VEX.0F 0xae mem/6 - invalid. */
3844/* Opcode VEX.0F 0xae mem/7 - invalid. */
3845
3846/* Opcode VEX.0F 0xae 11b/0 - invalid. */
3847/* Opcode VEX.0F 0xae 11b/1 - invalid. */
3848/* Opcode VEX.0F 0xae 11b/2 - invalid. */
3849/* Opcode VEX.0F 0xae 11b/3 - invalid. */
3850/* Opcode VEX.0F 0xae 11b/4 - invalid. */
3851/* Opcode VEX.0F 0xae 11b/5 - invalid. */
3852/* Opcode VEX.0F 0xae 11b/6 - invalid. */
3853/* Opcode VEX.0F 0xae 11b/7 - invalid. */
3854
3855/**
3856 * Vex group 15 jump table for memory variant.
3857 */
3858IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
3859{ /* pfx: none, 066h, 0f3h, 0f2h */
3860 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3861 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3862 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3863 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3864 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3865 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3866 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3867 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3868};
3869AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
3870
3871
3872/** Opcode vex. 0xae. */
3873FNIEMOP_DEF(iemOp_VGrp15)
3874{
3875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3876 if (IEM_IS_MODRM_REG_MODE(bRm))
3877 /* register, register */
3878 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
3879
3880 /* memory, register */
3881 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3882 + pVCpu->iem.s.idxPrefix], bRm);
3883}
3884
3885
3886/* Opcode VEX.0F 0xaf - invalid. */
3887
3888/* Opcode VEX.0F 0xb0 - invalid. */
3889/* Opcode VEX.0F 0xb1 - invalid. */
3890/* Opcode VEX.0F 0xb2 - invalid. */
3891/* Opcode VEX.0F 0xb2 - invalid. */
3892/* Opcode VEX.0F 0xb3 - invalid. */
3893/* Opcode VEX.0F 0xb4 - invalid. */
3894/* Opcode VEX.0F 0xb5 - invalid. */
3895/* Opcode VEX.0F 0xb6 - invalid. */
3896/* Opcode VEX.0F 0xb7 - invalid. */
3897/* Opcode VEX.0F 0xb8 - invalid. */
3898/* Opcode VEX.0F 0xb9 - invalid. */
3899/* Opcode VEX.0F 0xba - invalid. */
3900/* Opcode VEX.0F 0xbb - invalid. */
3901/* Opcode VEX.0F 0xbc - invalid. */
3902/* Opcode VEX.0F 0xbd - invalid. */
3903/* Opcode VEX.0F 0xbe - invalid. */
3904/* Opcode VEX.0F 0xbf - invalid. */
3905
3906/* Opcode VEX.0F 0xc0 - invalid. */
3907/* Opcode VEX.66.0F 0xc0 - invalid. */
3908/* Opcode VEX.F3.0F 0xc0 - invalid. */
3909/* Opcode VEX.F2.0F 0xc0 - invalid. */
3910
3911/* Opcode VEX.0F 0xc1 - invalid. */
3912/* Opcode VEX.66.0F 0xc1 - invalid. */
3913/* Opcode VEX.F3.0F 0xc1 - invalid. */
3914/* Opcode VEX.F2.0F 0xc1 - invalid. */
3915
3916/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
3917FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
3918/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
3919FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
3920/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
3921FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
3922/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
3923FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
3924
3925/* Opcode VEX.0F 0xc3 - invalid */
3926/* Opcode VEX.66.0F 0xc3 - invalid */
3927/* Opcode VEX.F3.0F 0xc3 - invalid */
3928/* Opcode VEX.F2.0F 0xc3 - invalid */
3929
3930/* Opcode VEX.0F 0xc4 - invalid */
3931/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
3932FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
3933/* Opcode VEX.F3.0F 0xc4 - invalid */
3934/* Opcode VEX.F2.0F 0xc4 - invalid */
3935
3936/* Opcode VEX.0F 0xc5 - invlid */
3937/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
3938FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
3939/* Opcode VEX.F3.0F 0xc5 - invalid */
3940/* Opcode VEX.F2.0F 0xc5 - invalid */
3941
3942/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
3943FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
3944/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
3945FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
3946/* Opcode VEX.F3.0F 0xc6 - invalid */
3947/* Opcode VEX.F2.0F 0xc6 - invalid */
3948
3949/* Opcode VEX.0F 0xc7 - invalid */
3950/* Opcode VEX.66.0F 0xc7 - invalid */
3951/* Opcode VEX.F3.0F 0xc7 - invalid */
3952/* Opcode VEX.F2.0F 0xc7 - invalid */
3953
3954/* Opcode VEX.0F 0xc8 - invalid */
3955/* Opcode VEX.0F 0xc9 - invalid */
3956/* Opcode VEX.0F 0xca - invalid */
3957/* Opcode VEX.0F 0xcb - invalid */
3958/* Opcode VEX.0F 0xcc - invalid */
3959/* Opcode VEX.0F 0xcd - invalid */
3960/* Opcode VEX.0F 0xce - invalid */
3961/* Opcode VEX.0F 0xcf - invalid */
3962
3963
3964/* Opcode VEX.0F 0xd0 - invalid */
3965/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
3966FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
3967/* Opcode VEX.F3.0F 0xd0 - invalid */
3968/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
3969FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
3970
3971/* Opcode VEX.0F 0xd1 - invalid */
3972/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
3973FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
3974/* Opcode VEX.F3.0F 0xd1 - invalid */
3975/* Opcode VEX.F2.0F 0xd1 - invalid */
3976
3977/* Opcode VEX.0F 0xd2 - invalid */
3978/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
3979FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
3980/* Opcode VEX.F3.0F 0xd2 - invalid */
3981/* Opcode VEX.F2.0F 0xd2 - invalid */
3982
3983/* Opcode VEX.0F 0xd3 - invalid */
3984/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
3985FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
3986/* Opcode VEX.F3.0F 0xd3 - invalid */
3987/* Opcode VEX.F2.0F 0xd3 - invalid */
3988
3989/* Opcode VEX.0F 0xd4 - invalid */
3990
3991
3992/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
3993FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
3994{
3995 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3996 IEMOPMEDIAF3_INIT_VARS( vpaddq);
3997 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3998}
3999
4000
4001/* Opcode VEX.F3.0F 0xd4 - invalid */
4002/* Opcode VEX.F2.0F 0xd4 - invalid */
4003
4004/* Opcode VEX.0F 0xd5 - invalid */
4005
4006
4007/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4008FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4009{
4010 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4011 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4012 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4013}
4014
4015
4016/* Opcode VEX.F3.0F 0xd5 - invalid */
4017/* Opcode VEX.F2.0F 0xd5 - invalid */
4018
4019/* Opcode VEX.0F 0xd6 - invalid */
4020
4021/**
4022 * @opcode 0xd6
4023 * @oppfx 0x66
4024 * @opcpuid avx
4025 * @opgroup og_avx_pcksclr_datamove
4026 * @opxcpttype none
4027 * @optest op1=-1 op2=2 -> op1=2
4028 * @optest op1=0 op2=-42 -> op1=-42
4029 */
4030FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4031{
4032 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4034 if (IEM_IS_MODRM_REG_MODE(bRm))
4035 {
4036 /*
4037 * Register, register.
4038 */
4039 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4040 IEM_MC_BEGIN(0, 0);
4041
4042 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4043 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4044
4045 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4046 IEM_GET_MODRM_REG(pVCpu, bRm));
4047 IEM_MC_ADVANCE_RIP();
4048 IEM_MC_END();
4049 }
4050 else
4051 {
4052 /*
4053 * Memory, register.
4054 */
4055 IEM_MC_BEGIN(0, 2);
4056 IEM_MC_LOCAL(uint64_t, uSrc);
4057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4058
4059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4060 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4063
4064 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4065 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4066
4067 IEM_MC_ADVANCE_RIP();
4068 IEM_MC_END();
4069 }
4070 return VINF_SUCCESS;
4071}
4072
4073/* Opcode VEX.F3.0F 0xd6 - invalid */
4074/* Opcode VEX.F2.0F 0xd6 - invalid */
4075
4076
4077/* Opcode VEX.0F 0xd7 - invalid */
4078
4079/** Opcode VEX.66.0F 0xd7 - */
4080FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4081{
4082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4083 /* Docs says register only. */
4084 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4085 {
4086 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4087 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
4088 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4089 if (pVCpu->iem.s.uVexLength)
4090 {
4091 IEM_MC_BEGIN(2, 1);
4092 IEM_MC_ARG(uint64_t *, puDst, 0);
4093 IEM_MC_LOCAL(RTUINT256U, uSrc);
4094 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4095 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4096 IEM_MC_PREPARE_AVX_USAGE();
4097 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4098 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4099 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4100 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4101 IEM_MC_ADVANCE_RIP();
4102 IEM_MC_END();
4103 }
4104 else
4105 {
4106 IEM_MC_BEGIN(2, 0);
4107 IEM_MC_ARG(uint64_t *, puDst, 0);
4108 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4109 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4110 IEM_MC_PREPARE_AVX_USAGE();
4111 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4112 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4113 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4114 IEM_MC_ADVANCE_RIP();
4115 IEM_MC_END();
4116 }
4117 return VINF_SUCCESS;
4118 }
4119 return IEMOP_RAISE_INVALID_OPCODE();
4120}
4121
4122
4123/* Opcode VEX.F3.0F 0xd7 - invalid */
4124/* Opcode VEX.F2.0F 0xd7 - invalid */
4125
4126
4127/* Opcode VEX.0F 0xd8 - invalid */
4128/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4129FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4130/* Opcode VEX.F3.0F 0xd8 - invalid */
4131/* Opcode VEX.F2.0F 0xd8 - invalid */
4132
4133/* Opcode VEX.0F 0xd9 - invalid */
4134/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4135FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4136/* Opcode VEX.F3.0F 0xd9 - invalid */
4137/* Opcode VEX.F2.0F 0xd9 - invalid */
4138
4139/* Opcode VEX.0F 0xda - invalid */
4140
4141
4142/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4143FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4144{
4145 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4146 IEMOPMEDIAF3_INIT_VARS(vpminub);
4147 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4148}
4149
4150
4151/* Opcode VEX.F3.0F 0xda - invalid */
4152/* Opcode VEX.F2.0F 0xda - invalid */
4153
4154/* Opcode VEX.0F 0xdb - invalid */
4155
4156
4157/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4158FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4159{
4160 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4161 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4162 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4163}
4164
4165
4166/* Opcode VEX.F3.0F 0xdb - invalid */
4167/* Opcode VEX.F2.0F 0xdb - invalid */
4168
4169/* Opcode VEX.0F 0xdc - invalid */
4170/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4171FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4172/* Opcode VEX.F3.0F 0xdc - invalid */
4173/* Opcode VEX.F2.0F 0xdc - invalid */
4174
4175/* Opcode VEX.0F 0xdd - invalid */
4176/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4177FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4178/* Opcode VEX.F3.0F 0xdd - invalid */
4179/* Opcode VEX.F2.0F 0xdd - invalid */
4180
4181/* Opcode VEX.0F 0xde - invalid */
4182
4183
4184/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4185FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4186{
4187 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4188 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4190}
4191
4192
4193/* Opcode VEX.F3.0F 0xde - invalid */
4194/* Opcode VEX.F2.0F 0xde - invalid */
4195
4196/* Opcode VEX.0F 0xdf - invalid */
4197
4198
4199/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4200FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4201{
4202 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4203 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4204 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4205}
4206
4207
4208/* Opcode VEX.F3.0F 0xdf - invalid */
4209/* Opcode VEX.F2.0F 0xdf - invalid */
4210
4211/* Opcode VEX.0F 0xe0 - invalid */
4212
4213
4214/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4215FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4216{
4217 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4218 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4219 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4220}
4221
4222
4223/* Opcode VEX.F3.0F 0xe0 - invalid */
4224/* Opcode VEX.F2.0F 0xe0 - invalid */
4225
4226/* Opcode VEX.0F 0xe1 - invalid */
4227/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4228FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4229/* Opcode VEX.F3.0F 0xe1 - invalid */
4230/* Opcode VEX.F2.0F 0xe1 - invalid */
4231
4232/* Opcode VEX.0F 0xe2 - invalid */
4233/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4234FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4235/* Opcode VEX.F3.0F 0xe2 - invalid */
4236/* Opcode VEX.F2.0F 0xe2 - invalid */
4237
4238/* Opcode VEX.0F 0xe3 - invalid */
4239
4240
4241/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4242FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4243{
4244 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4245 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4246 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4247}
4248
4249
4250/* Opcode VEX.F3.0F 0xe3 - invalid */
4251/* Opcode VEX.F2.0F 0xe3 - invalid */
4252
4253/* Opcode VEX.0F 0xe4 - invalid */
4254
4255
4256/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4257FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4258{
4259 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4260 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4261 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4262}
4263
4264
4265/* Opcode VEX.F3.0F 0xe4 - invalid */
4266/* Opcode VEX.F2.0F 0xe4 - invalid */
4267
4268/* Opcode VEX.0F 0xe5 - invalid */
4269
4270
4271/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4272FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4273{
4274 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4275 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4277}
4278
4279
4280/* Opcode VEX.F3.0F 0xe5 - invalid */
4281/* Opcode VEX.F2.0F 0xe5 - invalid */
4282
4283/* Opcode VEX.0F 0xe6 - invalid */
4284/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4285FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4286/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4287FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4288/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4289FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4290
4291
4292/* Opcode VEX.0F 0xe7 - invalid */
4293
4294/**
4295 * @opcode 0xe7
4296 * @opcodesub !11 mr/reg
4297 * @oppfx 0x66
4298 * @opcpuid avx
4299 * @opgroup og_avx_cachect
4300 * @opxcpttype 1
4301 * @optest op1=-1 op2=2 -> op1=2
4302 * @optest op1=0 op2=-42 -> op1=-42
4303 */
4304FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
4305{
4306 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4307 Assert(pVCpu->iem.s.uVexLength <= 1);
4308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4309 if (IEM_IS_MODRM_MEM_MODE(bRm))
4310 {
4311 if (pVCpu->iem.s.uVexLength == 0)
4312 {
4313 /*
4314 * 128-bit: Memory, register.
4315 */
4316 IEM_MC_BEGIN(0, 2);
4317 IEM_MC_LOCAL(RTUINT128U, uSrc);
4318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4319
4320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4321 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4323 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4324
4325 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4326 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4327
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 }
4331 else
4332 {
4333 /*
4334 * 256-bit: Memory, register.
4335 */
4336 IEM_MC_BEGIN(0, 2);
4337 IEM_MC_LOCAL(RTUINT256U, uSrc);
4338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4339
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4341 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4342 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4343 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4344
4345 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4346 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4347
4348 IEM_MC_ADVANCE_RIP();
4349 IEM_MC_END();
4350 }
4351 return VINF_SUCCESS;
4352 }
4353 /**
4354 * @opdone
4355 * @opmnemonic udvex660fe7reg
4356 * @opcode 0xe7
4357 * @opcodesub 11 mr/reg
4358 * @oppfx 0x66
4359 * @opunused immediate
4360 * @opcpuid avx
4361 * @optest ->
4362 */
4363 return IEMOP_RAISE_INVALID_OPCODE();
4364}
4365
4366/* Opcode VEX.F3.0F 0xe7 - invalid */
4367/* Opcode VEX.F2.0F 0xe7 - invalid */
4368
4369
4370/* Opcode VEX.0F 0xe8 - invalid */
4371/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
4372FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
4373/* Opcode VEX.F3.0F 0xe8 - invalid */
4374/* Opcode VEX.F2.0F 0xe8 - invalid */
4375
4376/* Opcode VEX.0F 0xe9 - invalid */
4377/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
4378FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
4379/* Opcode VEX.F3.0F 0xe9 - invalid */
4380/* Opcode VEX.F2.0F 0xe9 - invalid */
4381
4382/* Opcode VEX.0F 0xea - invalid */
4383
4384
4385/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
4386FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
4387{
4388 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4389 IEMOPMEDIAF3_INIT_VARS(vpminsw);
4390 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4391}
4392
4393
4394/* Opcode VEX.F3.0F 0xea - invalid */
4395/* Opcode VEX.F2.0F 0xea - invalid */
4396
4397/* Opcode VEX.0F 0xeb - invalid */
4398
4399
4400/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
4401FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
4402{
4403 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4404 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4405 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
4406}
4407
4408
4409
4410/* Opcode VEX.F3.0F 0xeb - invalid */
4411/* Opcode VEX.F2.0F 0xeb - invalid */
4412
4413/* Opcode VEX.0F 0xec - invalid */
4414/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
4415FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
4416/* Opcode VEX.F3.0F 0xec - invalid */
4417/* Opcode VEX.F2.0F 0xec - invalid */
4418
4419/* Opcode VEX.0F 0xed - invalid */
4420/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
4421FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
4422/* Opcode VEX.F3.0F 0xed - invalid */
4423/* Opcode VEX.F2.0F 0xed - invalid */
4424
4425/* Opcode VEX.0F 0xee - invalid */
4426
4427
4428/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
4429FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
4430{
4431 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4432 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
4433 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4434}
4435
4436
4437/* Opcode VEX.F3.0F 0xee - invalid */
4438/* Opcode VEX.F2.0F 0xee - invalid */
4439
4440
4441/* Opcode VEX.0F 0xef - invalid */
4442
4443
4444/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
4445FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
4446{
4447 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4448 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4449 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
4450}
4451
4452
4453/* Opcode VEX.F3.0F 0xef - invalid */
4454/* Opcode VEX.F2.0F 0xef - invalid */
4455
4456/* Opcode VEX.0F 0xf0 - invalid */
4457/* Opcode VEX.66.0F 0xf0 - invalid */
4458/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
4459FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
4460
4461/* Opcode VEX.0F 0xf1 - invalid */
4462/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
4463FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
4464/* Opcode VEX.F2.0F 0xf1 - invalid */
4465
4466/* Opcode VEX.0F 0xf2 - invalid */
4467/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
4468FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
4469/* Opcode VEX.F2.0F 0xf2 - invalid */
4470
4471/* Opcode VEX.0F 0xf3 - invalid */
4472/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
4473FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
4474/* Opcode VEX.F2.0F 0xf3 - invalid */
4475
4476/* Opcode VEX.0F 0xf4 - invalid */
4477
4478
4479/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
4480FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
4481{
4482 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4483 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
4484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4485}
4486
4487
4488/* Opcode VEX.F2.0F 0xf4 - invalid */
4489
4490/* Opcode VEX.0F 0xf5 - invalid */
4491/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
4492FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
4493/* Opcode VEX.F2.0F 0xf5 - invalid */
4494
4495/* Opcode VEX.0F 0xf6 - invalid */
4496
4497
4498/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
4499FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
4500{
4501 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4502 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
4503 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4504}
4505
4506
4507/* Opcode VEX.F2.0F 0xf6 - invalid */
4508
4509/* Opcode VEX.0F 0xf7 - invalid */
4510/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
4511FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
4512/* Opcode VEX.F2.0F 0xf7 - invalid */
4513
4514/* Opcode VEX.0F 0xf8 - invalid */
4515
4516
4517/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
4518FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
4519{
4520 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4521 IEMOPMEDIAF3_INIT_VARS( vpsubb);
4522 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4523}
4524
4525
4526/* Opcode VEX.F2.0F 0xf8 - invalid */
4527
4528/* Opcode VEX.0F 0xf9 - invalid */
4529
4530
4531/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
4532FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
4533{
4534 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4535 IEMOPMEDIAF3_INIT_VARS( vpsubw);
4536 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4537}
4538
4539
4540/* Opcode VEX.F2.0F 0xf9 - invalid */
4541
4542/* Opcode VEX.0F 0xfa - invalid */
4543
4544
4545/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
4546FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
4547{
4548 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4549 IEMOPMEDIAF3_INIT_VARS( vpsubd);
4550 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4551}
4552
4553
4554/* Opcode VEX.F2.0F 0xfa - invalid */
4555
4556/* Opcode VEX.0F 0xfb - invalid */
4557
4558
4559/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
4560FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
4561{
4562 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4563 IEMOPMEDIAF3_INIT_VARS( vpsubq);
4564 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4565}
4566
4567
4568/* Opcode VEX.F2.0F 0xfb - invalid */
4569
4570/* Opcode VEX.0F 0xfc - invalid */
4571
4572
4573/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
4574FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
4575{
4576 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4577 IEMOPMEDIAF3_INIT_VARS( vpaddb);
4578 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4579}
4580
4581
4582/* Opcode VEX.F2.0F 0xfc - invalid */
4583
4584/* Opcode VEX.0F 0xfd - invalid */
4585
4586
4587/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
4588FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
4589{
4590 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4591 IEMOPMEDIAF3_INIT_VARS( vpaddw);
4592 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4593}
4594
4595
4596/* Opcode VEX.F2.0F 0xfd - invalid */
4597
4598/* Opcode VEX.0F 0xfe - invalid */
4599
4600
4601/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
4602FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
4603{
4604 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4605 IEMOPMEDIAF3_INIT_VARS( vpaddd);
4606 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4607}
4608
4609
4610/* Opcode VEX.F2.0F 0xfe - invalid */
4611
4612
4613/** Opcode **** 0x0f 0xff - UD0 */
4614FNIEMOP_DEF(iemOp_vud0)
4615{
4616 IEMOP_MNEMONIC(vud0, "vud0");
4617 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
4618 {
4619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
4620#ifndef TST_IEM_CHECK_MC
4621 RTGCPTR GCPtrEff;
4622 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
4623 if (rcStrict != VINF_SUCCESS)
4624 return rcStrict;
4625#endif
4626 IEMOP_HLP_DONE_DECODING();
4627 }
4628 return IEMOP_RAISE_INVALID_OPCODE();
4629}
4630
4631
4632
4633/**
4634 * VEX opcode map \#1.
4635 *
4636 * @sa g_apfnTwoByteMap
4637 */
4638IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
4639{
4640 /* no prefix, 066h prefix f3h prefix, f2h prefix */
4641 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
4642 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
4643 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
4644 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
4645 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
4646 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
4647 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
4648 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
4649 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
4650 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
4651 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
4652 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
4653 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
4654 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
4655 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
4656 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
4657
4658 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
4659 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
4660 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
4661 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4662 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4663 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4664 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
4665 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4666 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
4667 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
4668 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
4669 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
4670 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
4671 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
4672 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
4673 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
4674
4675 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
4676 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
4677 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
4678 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
4679 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
4680 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
4681 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
4682 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
4683 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4684 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4685 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
4686 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4687 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
4688 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
4689 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4690 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4691
4692 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
4693 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
4694 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
4695 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
4696 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
4697 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
4698 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
4699 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
4700 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4701 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4702 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4703 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4704 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4705 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4706 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4707 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4708
4709 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
4710 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
4711 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
4712 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
4713 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
4714 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
4715 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
4716 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
4717 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
4718 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
4719 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
4720 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
4721 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
4722 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
4723 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
4724 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
4725
4726 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4727 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
4728 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4729 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4730 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4731 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4732 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4733 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4734 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
4735 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
4736 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
4737 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
4738 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
4739 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
4740 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
4741 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
4742
4743 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4744 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4745 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4746 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4747 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4748 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4749 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4750 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4751 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4752 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4753 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4754 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4755 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4756 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4757 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4758 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
4759
4760 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
4761 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4762 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4763 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4764 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4765 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4766 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4767 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4768 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
4769 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
4770 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
4771 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
4772 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
4773 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
4774 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
4775 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
4776
4777 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
4778 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
4779 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
4780 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
4781 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
4782 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
4783 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
4784 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
4785 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
4786 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
4787 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
4788 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
4789 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
4790 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
4791 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
4792 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
4793
4794 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
4795 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
4796 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
4797 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
4798 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
4799 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
4800 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
4801 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
4802 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
4803 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
4804 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
4805 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
4806 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
4807 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
4808 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
4809 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
4810
4811 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4812 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4813 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4814 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4815 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4816 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4817 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4818 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4819 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4820 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4821 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
4822 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
4823 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
4824 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
4825 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
4826 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
4827
4828 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4829 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4830 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4831 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4832 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4833 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4834 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4835 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4836 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4837 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4838 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
4839 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
4840 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
4841 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
4842 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
4843 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
4844
4845 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4846 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4847 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
4848 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4849 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4850 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4851 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
4852 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4853 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4854 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4855 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
4856 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
4857 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
4858 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
4859 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
4860 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
4861
4862 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
4863 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4864 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4865 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4866 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4867 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4868 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4869 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4870 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4871 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4872 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4873 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4874 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4875 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4876 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4877 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4878
4879 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4880 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4881 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4882 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4883 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4884 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4885 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
4886 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4887 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4888 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4889 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4890 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4891 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4892 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4893 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4894 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4895
4896 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
4897 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4898 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4899 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4900 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4901 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4902 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4903 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4904 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4905 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4906 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4907 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4908 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4909 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4910 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4911 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
4912};
4913AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
4914/** @} */
4915
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette