VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 96119

Last change on this file since 96119 was 96109, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]unpck{l,h}p{s,d} instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 175.6 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 96109 2022-08-08 11:41:33Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26/**
27 * Common worker for AVX2 instructions on the forms:
28 * - vpxxx xmm0, xmm1, xmm2/mem128
29 * - vpxxx ymm0, ymm1, ymm2/mem256
30 *
31 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
32 */
33FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
34{
35 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
36 if (IEM_IS_MODRM_REG_MODE(bRm))
37 {
38 /*
39 * Register, register.
40 */
41 if (pVCpu->iem.s.uVexLength)
42 {
43 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
44 IEM_MC_BEGIN(4, 3);
45 IEM_MC_LOCAL(RTUINT256U, uDst);
46 IEM_MC_LOCAL(RTUINT256U, uSrc1);
47 IEM_MC_LOCAL(RTUINT256U, uSrc2);
48 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
49 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
50 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
51 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
52 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
55 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
56 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
57 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
58 IEM_MC_ADVANCE_RIP();
59 IEM_MC_END();
60 }
61 else
62 {
63 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
64 IEM_MC_BEGIN(4, 0);
65 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
66 IEM_MC_ARG(PRTUINT128U, puDst, 1);
67 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
68 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
69 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
70 IEM_MC_PREPARE_AVX_USAGE();
71 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
72 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
73 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
74 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
75 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 }
80 else
81 {
82 /*
83 * Register, memory.
84 */
85 if (pVCpu->iem.s.uVexLength)
86 {
87 IEM_MC_BEGIN(4, 4);
88 IEM_MC_LOCAL(RTUINT256U, uDst);
89 IEM_MC_LOCAL(RTUINT256U, uSrc1);
90 IEM_MC_LOCAL(RTUINT256U, uSrc2);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
93 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
94 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
95 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
96
97 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
98 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
99 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
100 IEM_MC_PREPARE_AVX_USAGE();
101
102 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
103 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
104 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
105 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
106
107 IEM_MC_ADVANCE_RIP();
108 IEM_MC_END();
109 }
110 else
111 {
112 IEM_MC_BEGIN(4, 2);
113 IEM_MC_LOCAL(RTUINT128U, uSrc2);
114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
115 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
116 IEM_MC_ARG(PRTUINT128U, puDst, 1);
117 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
119
120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
121 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
122 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
123 IEM_MC_PREPARE_AVX_USAGE();
124
125 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
126 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
127 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
128 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
129 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
130
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 }
134 }
135 return VINF_SUCCESS;
136}
137
138
139/**
140 * Common worker for AVX2 instructions on the forms:
141 * - vpxxx xmm0, xmm1, xmm2/mem128
142 * - vpxxx ymm0, ymm1, ymm2/mem256
143 *
144 * Takes function table for function w/o implicit state parameter.
145 *
146 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
147 */
148FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
149{
150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
151 if (IEM_IS_MODRM_REG_MODE(bRm))
152 {
153 /*
154 * Register, register.
155 */
156 if (pVCpu->iem.s.uVexLength)
157 {
158 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
159 IEM_MC_BEGIN(3, 3);
160 IEM_MC_LOCAL(RTUINT256U, uDst);
161 IEM_MC_LOCAL(RTUINT256U, uSrc1);
162 IEM_MC_LOCAL(RTUINT256U, uSrc2);
163 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
164 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
165 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
166 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
167 IEM_MC_PREPARE_AVX_USAGE();
168 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
169 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
171 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
172 IEM_MC_ADVANCE_RIP();
173 IEM_MC_END();
174 }
175 else
176 {
177 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
178 IEM_MC_BEGIN(3, 0);
179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
180 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
181 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
182 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
183 IEM_MC_PREPARE_AVX_USAGE();
184 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
185 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
186 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
188 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 }
193 else
194 {
195 /*
196 * Register, memory.
197 */
198 if (pVCpu->iem.s.uVexLength)
199 {
200 IEM_MC_BEGIN(3, 4);
201 IEM_MC_LOCAL(RTUINT256U, uDst);
202 IEM_MC_LOCAL(RTUINT256U, uSrc1);
203 IEM_MC_LOCAL(RTUINT256U, uSrc2);
204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
205 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
206 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
207 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
208
209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
210 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
211 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
212 IEM_MC_PREPARE_AVX_USAGE();
213
214 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
215 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
217 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
218
219 IEM_MC_ADVANCE_RIP();
220 IEM_MC_END();
221 }
222 else
223 {
224 IEM_MC_BEGIN(3, 2);
225 IEM_MC_LOCAL(RTUINT128U, uSrc2);
226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
227 IEM_MC_ARG(PRTUINT128U, puDst, 0);
228 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
229 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
230
231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
232 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
233 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
234 IEM_MC_PREPARE_AVX_USAGE();
235
236 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
237 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
238 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
240 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
241
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 }
246 return VINF_SUCCESS;
247}
248
249
250/**
251 * Common worker for AVX2 instructions on the forms:
252 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
253 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
254 *
255 * The 128-bit memory version of this instruction may elect to skip fetching the
256 * lower 64 bits of the operand. We, however, do not.
257 *
258 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
259 */
260FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
261{
262 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
263}
264
265
266/**
267 * Common worker for AVX2 instructions on the forms:
268 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
269 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
270 *
271 * The 128-bit memory version of this instruction may elect to skip fetching the
272 * higher 64 bits of the operand. We, however, do not.
273 *
274 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
275 */
276FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
277{
278 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
279}
280
281
282/**
283 * Common worker for AVX2 instructions on the forms:
284 * - vpxxx xmm0, xmm1/mem128
285 * - vpxxx ymm0, ymm1/mem256
286 *
287 * Takes function table for function w/o implicit state parameter.
288 *
289 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
290 */
291FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 if (pVCpu->iem.s.uVexLength)
300 {
301 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
302 IEM_MC_BEGIN(2, 2);
303 IEM_MC_LOCAL(RTUINT256U, uDst);
304 IEM_MC_LOCAL(RTUINT256U, uSrc);
305 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
306 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
307 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
308 IEM_MC_PREPARE_AVX_USAGE();
309 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
318 IEM_MC_BEGIN(2, 0);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
321 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
322 IEM_MC_PREPARE_AVX_USAGE();
323 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
324 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
325 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
326 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_ADVANCE_RIP();
328 IEM_MC_END();
329 }
330 }
331 else
332 {
333 /*
334 * Register, memory.
335 */
336 if (pVCpu->iem.s.uVexLength)
337 {
338 IEM_MC_BEGIN(2, 3);
339 IEM_MC_LOCAL(RTUINT256U, uDst);
340 IEM_MC_LOCAL(RTUINT256U, uSrc);
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
342 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
343 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
344
345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
346 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
347 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
348 IEM_MC_PREPARE_AVX_USAGE();
349
350 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
351 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
352 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
353
354 IEM_MC_ADVANCE_RIP();
355 IEM_MC_END();
356 }
357 else
358 {
359 IEM_MC_BEGIN(2, 2);
360 IEM_MC_LOCAL(RTUINT128U, uSrc);
361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
362 IEM_MC_ARG(PRTUINT128U, puDst, 0);
363 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
364
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
366 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
367 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
368 IEM_MC_PREPARE_AVX_USAGE();
369
370 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
371 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
372 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
373 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
374
375 IEM_MC_ADVANCE_RIP();
376 IEM_MC_END();
377 }
378 }
379 return VINF_SUCCESS;
380}
381
382
383/* Opcode VEX.0F 0x00 - invalid */
384/* Opcode VEX.0F 0x01 - invalid */
385/* Opcode VEX.0F 0x02 - invalid */
386/* Opcode VEX.0F 0x03 - invalid */
387/* Opcode VEX.0F 0x04 - invalid */
388/* Opcode VEX.0F 0x05 - invalid */
389/* Opcode VEX.0F 0x06 - invalid */
390/* Opcode VEX.0F 0x07 - invalid */
391/* Opcode VEX.0F 0x08 - invalid */
392/* Opcode VEX.0F 0x09 - invalid */
393/* Opcode VEX.0F 0x0a - invalid */
394
395/** Opcode VEX.0F 0x0b. */
396FNIEMOP_DEF(iemOp_vud2)
397{
398 IEMOP_MNEMONIC(vud2, "vud2");
399 return IEMOP_RAISE_INVALID_OPCODE();
400}
401
402/* Opcode VEX.0F 0x0c - invalid */
403/* Opcode VEX.0F 0x0d - invalid */
404/* Opcode VEX.0F 0x0e - invalid */
405/* Opcode VEX.0F 0x0f - invalid */
406
407
408/**
409 * @opcode 0x10
410 * @oppfx none
411 * @opcpuid avx
412 * @opgroup og_avx_simdfp_datamove
413 * @opxcpttype 4UA
414 * @optest op1=1 op2=2 -> op1=2
415 * @optest op1=0 op2=-22 -> op1=-22
416 */
417FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
418{
419 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
420 Assert(pVCpu->iem.s.uVexLength <= 1);
421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
422 if (IEM_IS_MODRM_REG_MODE(bRm))
423 {
424 /*
425 * Register, register.
426 */
427 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
428 IEM_MC_BEGIN(0, 0);
429 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
430 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
431 if (pVCpu->iem.s.uVexLength == 0)
432 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
433 IEM_GET_MODRM_RM(pVCpu, bRm));
434 else
435 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
436 IEM_GET_MODRM_RM(pVCpu, bRm));
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 }
440 else if (pVCpu->iem.s.uVexLength == 0)
441 {
442 /*
443 * 128-bit: Register, Memory
444 */
445 IEM_MC_BEGIN(0, 2);
446 IEM_MC_LOCAL(RTUINT128U, uSrc);
447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
448
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
450 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
451 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
452 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
453
454 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
455 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
456
457 IEM_MC_ADVANCE_RIP();
458 IEM_MC_END();
459 }
460 else
461 {
462 /*
463 * 256-bit: Register, Memory
464 */
465 IEM_MC_BEGIN(0, 2);
466 IEM_MC_LOCAL(RTUINT256U, uSrc);
467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
468
469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
470 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
471 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
472 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
473
474 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
475 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
476
477 IEM_MC_ADVANCE_RIP();
478 IEM_MC_END();
479 }
480 return VINF_SUCCESS;
481}
482
483
484/**
485 * @opcode 0x10
486 * @oppfx 0x66
487 * @opcpuid avx
488 * @opgroup og_avx_simdfp_datamove
489 * @opxcpttype 4UA
490 * @optest op1=1 op2=2 -> op1=2
491 * @optest op1=0 op2=-22 -> op1=-22
492 */
493FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
494{
495 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
496 Assert(pVCpu->iem.s.uVexLength <= 1);
497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
498 if (IEM_IS_MODRM_REG_MODE(bRm))
499 {
500 /*
501 * Register, register.
502 */
503 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
504 IEM_MC_BEGIN(0, 0);
505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
507 if (pVCpu->iem.s.uVexLength == 0)
508 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
509 IEM_GET_MODRM_RM(pVCpu, bRm));
510 else
511 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
512 IEM_GET_MODRM_RM(pVCpu, bRm));
513 IEM_MC_ADVANCE_RIP();
514 IEM_MC_END();
515 }
516 else if (pVCpu->iem.s.uVexLength == 0)
517 {
518 /*
519 * 128-bit: Memory, register.
520 */
521 IEM_MC_BEGIN(0, 2);
522 IEM_MC_LOCAL(RTUINT128U, uSrc);
523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
524
525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
526 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
527 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
528 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
529
530 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
531 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
532
533 IEM_MC_ADVANCE_RIP();
534 IEM_MC_END();
535 }
536 else
537 {
538 /*
539 * 256-bit: Memory, register.
540 */
541 IEM_MC_BEGIN(0, 2);
542 IEM_MC_LOCAL(RTUINT256U, uSrc);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
548 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
549
550 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
551 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
552
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 }
556 return VINF_SUCCESS;
557}
558
559
560FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
561{
562 Assert(pVCpu->iem.s.uVexLength <= 1);
563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
564 if (IEM_IS_MODRM_REG_MODE(bRm))
565 {
566 /**
567 * @opcode 0x10
568 * @oppfx 0xf3
569 * @opcodesub 11 mr/reg
570 * @opcpuid avx
571 * @opgroup og_avx_simdfp_datamerge
572 * @opxcpttype 5
573 * @optest op1=1 op2=0 op3=2 -> op1=2
574 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
575 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
576 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
577 * @note HssHi refers to bits 127:32.
578 */
579 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
580 IEMOP_HLP_DONE_VEX_DECODING();
581 IEM_MC_BEGIN(0, 0);
582
583 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
584 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
585 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
586 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
587 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
588 IEM_MC_ADVANCE_RIP();
589 IEM_MC_END();
590 }
591 else
592 {
593 /**
594 * @opdone
595 * @opcode 0x10
596 * @oppfx 0xf3
597 * @opcodesub !11 mr/reg
598 * @opcpuid avx
599 * @opgroup og_avx_simdfp_datamove
600 * @opxcpttype 5
601 * @opfunction iemOp_vmovss_Vss_Hss_Wss
602 * @optest op1=1 op2=2 -> op1=2
603 * @optest op1=0 op2=-22 -> op1=-22
604 */
605 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
606 IEM_MC_BEGIN(0, 2);
607 IEM_MC_LOCAL(uint32_t, uSrc);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
612 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
613 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
614
615 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
616 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
617
618 IEM_MC_ADVANCE_RIP();
619 IEM_MC_END();
620 }
621
622 return VINF_SUCCESS;
623}
624
625
626FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
627{
628 Assert(pVCpu->iem.s.uVexLength <= 1);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
630 if (IEM_IS_MODRM_REG_MODE(bRm))
631 {
632 /**
633 * @opcode 0x10
634 * @oppfx 0xf2
635 * @opcodesub 11 mr/reg
636 * @opcpuid avx
637 * @opgroup og_avx_simdfp_datamerge
638 * @opxcpttype 5
639 * @optest op1=1 op2=0 op3=2 -> op1=2
640 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
641 * @optest op1=3 op2=-1 op3=0x77 ->
642 * op1=0xffffffffffffffff0000000000000077
643 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
644 */
645 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
646 IEMOP_HLP_DONE_VEX_DECODING();
647 IEM_MC_BEGIN(0, 0);
648
649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
651 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
652 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
653 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
654 IEM_MC_ADVANCE_RIP();
655 IEM_MC_END();
656 }
657 else
658 {
659 /**
660 * @opdone
661 * @opcode 0x10
662 * @oppfx 0xf2
663 * @opcodesub !11 mr/reg
664 * @opcpuid avx
665 * @opgroup og_avx_simdfp_datamove
666 * @opxcpttype 5
667 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
668 * @optest op1=1 op2=2 -> op1=2
669 * @optest op1=0 op2=-22 -> op1=-22
670 */
671 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
672 IEM_MC_BEGIN(0, 2);
673 IEM_MC_LOCAL(uint64_t, uSrc);
674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
675
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
678 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
679 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
680
681 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
682 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
683
684 IEM_MC_ADVANCE_RIP();
685 IEM_MC_END();
686 }
687
688 return VINF_SUCCESS;
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
712 IEM_MC_BEGIN(0, 0);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP();
762 IEM_MC_END();
763 }
764 return VINF_SUCCESS;
765}
766
767
768/**
769 * @opcode 0x11
770 * @oppfx 0x66
771 * @opcpuid avx
772 * @opgroup og_avx_simdfp_datamove
773 * @opxcpttype 4UA
774 * @optest op1=1 op2=2 -> op1=2
775 * @optest op1=0 op2=-22 -> op1=-22
776 */
777FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
778{
779 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
780 Assert(pVCpu->iem.s.uVexLength <= 1);
781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
782 if (IEM_IS_MODRM_REG_MODE(bRm))
783 {
784 /*
785 * Register, register.
786 */
787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
788 IEM_MC_BEGIN(0, 0);
789 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
790 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
791 if (pVCpu->iem.s.uVexLength == 0)
792 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
793 IEM_GET_MODRM_REG(pVCpu, bRm));
794 else
795 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
796 IEM_GET_MODRM_REG(pVCpu, bRm));
797 IEM_MC_ADVANCE_RIP();
798 IEM_MC_END();
799 }
800 else if (pVCpu->iem.s.uVexLength == 0)
801 {
802 /*
803 * 128-bit: Memory, register.
804 */
805 IEM_MC_BEGIN(0, 2);
806 IEM_MC_LOCAL(RTUINT128U, uSrc);
807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
808
809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
810 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
811 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
812 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
813
814 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
815 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
816
817 IEM_MC_ADVANCE_RIP();
818 IEM_MC_END();
819 }
820 else
821 {
822 /*
823 * 256-bit: Memory, register.
824 */
825 IEM_MC_BEGIN(0, 2);
826 IEM_MC_LOCAL(RTUINT256U, uSrc);
827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
828
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
832 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
833
834 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
835 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
836
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 }
840 return VINF_SUCCESS;
841}
842
843
844FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
845{
846 Assert(pVCpu->iem.s.uVexLength <= 1);
847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
848 if (IEM_IS_MODRM_REG_MODE(bRm))
849 {
850 /**
851 * @opcode 0x11
852 * @oppfx 0xf3
853 * @opcodesub 11 mr/reg
854 * @opcpuid avx
855 * @opgroup og_avx_simdfp_datamerge
856 * @opxcpttype 5
857 * @optest op1=1 op2=0 op3=2 -> op1=2
858 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
859 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
860 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
861 */
862 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
863 IEMOP_HLP_DONE_VEX_DECODING();
864 IEM_MC_BEGIN(0, 0);
865
866 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
867 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
868 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
869 IEM_GET_MODRM_REG(pVCpu, bRm),
870 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
871 IEM_MC_ADVANCE_RIP();
872 IEM_MC_END();
873 }
874 else
875 {
876 /**
877 * @opdone
878 * @opcode 0x11
879 * @oppfx 0xf3
880 * @opcodesub !11 mr/reg
881 * @opcpuid avx
882 * @opgroup og_avx_simdfp_datamove
883 * @opxcpttype 5
884 * @opfunction iemOp_vmovss_Vss_Hss_Wss
885 * @optest op1=1 op2=2 -> op1=2
886 * @optest op1=0 op2=-22 -> op1=-22
887 */
888 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
889 IEM_MC_BEGIN(0, 2);
890 IEM_MC_LOCAL(uint32_t, uSrc);
891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
892
893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
894 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
895 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
896 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
897
898 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
899 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
900
901 IEM_MC_ADVANCE_RIP();
902 IEM_MC_END();
903 }
904
905 return VINF_SUCCESS;
906}
907
908
909FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
910{
911 Assert(pVCpu->iem.s.uVexLength <= 1);
912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
913 if (IEM_IS_MODRM_REG_MODE(bRm))
914 {
915 /**
916 * @opcode 0x11
917 * @oppfx 0xf2
918 * @opcodesub 11 mr/reg
919 * @opcpuid avx
920 * @opgroup og_avx_simdfp_datamerge
921 * @opxcpttype 5
922 * @optest op1=1 op2=0 op3=2 -> op1=2
923 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
924 * @optest op1=3 op2=-1 op3=0x77 ->
925 * op1=0xffffffffffffffff0000000000000077
926 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
927 */
928 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
929 IEMOP_HLP_DONE_VEX_DECODING();
930 IEM_MC_BEGIN(0, 0);
931
932 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
933 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
934 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
935 IEM_GET_MODRM_REG(pVCpu, bRm),
936 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
937 IEM_MC_ADVANCE_RIP();
938 IEM_MC_END();
939 }
940 else
941 {
942 /**
943 * @opdone
944 * @opcode 0x11
945 * @oppfx 0xf2
946 * @opcodesub !11 mr/reg
947 * @opcpuid avx
948 * @opgroup og_avx_simdfp_datamove
949 * @opxcpttype 5
950 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
951 * @optest op1=1 op2=2 -> op1=2
952 * @optest op1=0 op2=-22 -> op1=-22
953 */
954 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
955 IEM_MC_BEGIN(0, 2);
956 IEM_MC_LOCAL(uint64_t, uSrc);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
961 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
962 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
963
964 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
966
967 IEM_MC_ADVANCE_RIP();
968 IEM_MC_END();
969 }
970
971 return VINF_SUCCESS;
972}
973
974
975FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
976{
977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
978 if (IEM_IS_MODRM_REG_MODE(bRm))
979 {
980 /**
981 * @opcode 0x12
982 * @opcodesub 11 mr/reg
983 * @oppfx none
984 * @opcpuid avx
985 * @opgroup og_avx_simdfp_datamerge
986 * @opxcpttype 7LZ
987 * @optest op2=0x2200220122022203
988 * op3=0x3304330533063307
989 * -> op1=0x22002201220222033304330533063307
990 * @optest op2=-1 op3=-42 -> op1=-42
991 * @note op3 and op2 are only the 8-byte high XMM register halfs.
992 */
993 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
994
995 IEMOP_HLP_DONE_VEX_DECODING_L0();
996 IEM_MC_BEGIN(0, 0);
997
998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
999 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1000 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1001 IEM_GET_MODRM_RM(pVCpu, bRm),
1002 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1003
1004 IEM_MC_ADVANCE_RIP();
1005 IEM_MC_END();
1006 }
1007 else
1008 {
1009 /**
1010 * @opdone
1011 * @opcode 0x12
1012 * @opcodesub !11 mr/reg
1013 * @oppfx none
1014 * @opcpuid avx
1015 * @opgroup og_avx_simdfp_datamove
1016 * @opxcpttype 5LZ
1017 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1018 * @optest op1=1 op2=0 op3=0 -> op1=0
1019 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1020 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1021 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1022 */
1023 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1024
1025 IEM_MC_BEGIN(0, 2);
1026 IEM_MC_LOCAL(uint64_t, uSrc);
1027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1028
1029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1030 IEMOP_HLP_DONE_VEX_DECODING_L0();
1031 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1032 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1033
1034 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1035 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1036 uSrc,
1037 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1038
1039 IEM_MC_ADVANCE_RIP();
1040 IEM_MC_END();
1041 }
1042 return VINF_SUCCESS;
1043}
1044
1045
1046/**
1047 * @opcode 0x12
1048 * @opcodesub !11 mr/reg
1049 * @oppfx 0x66
1050 * @opcpuid avx
1051 * @opgroup og_avx_pcksclr_datamerge
1052 * @opxcpttype 5LZ
1053 * @optest op2=0 op3=2 -> op1=2
1054 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1055 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1056 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1057 */
1058FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1059{
1060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1061 if (IEM_IS_MODRM_MEM_MODE(bRm))
1062 {
1063 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1064
1065 IEM_MC_BEGIN(0, 2);
1066 IEM_MC_LOCAL(uint64_t, uSrc);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_VEX_DECODING_L0();
1071 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1072 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1073
1074 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1075 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1076 uSrc,
1077 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1078
1079 IEM_MC_ADVANCE_RIP();
1080 IEM_MC_END();
1081 return VINF_SUCCESS;
1082 }
1083
1084 /**
1085 * @opdone
1086 * @opmnemonic udvex660f12m3
1087 * @opcode 0x12
1088 * @opcodesub 11 mr/reg
1089 * @oppfx 0x66
1090 * @opunused immediate
1091 * @opcpuid avx
1092 * @optest ->
1093 */
1094 return IEMOP_RAISE_INVALID_OPCODE();
1095}
1096
1097
1098/**
1099 * @opcode 0x12
1100 * @oppfx 0xf3
1101 * @opcpuid avx
1102 * @opgroup og_avx_pcksclr_datamove
1103 * @opxcpttype 4
1104 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1105 * -> op1=0x00000002000000020000000100000001
1106 * @optest vex.l==1 /
1107 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1108 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1109 */
1110FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1111{
1112 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1113 Assert(pVCpu->iem.s.uVexLength <= 1);
1114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1115 if (IEM_IS_MODRM_REG_MODE(bRm))
1116 {
1117 /*
1118 * Register, register.
1119 */
1120 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1121 if (pVCpu->iem.s.uVexLength == 0)
1122 {
1123 IEM_MC_BEGIN(2, 0);
1124 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1125 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1126
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_PREPARE_AVX_USAGE();
1129
1130 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1131 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1132 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1133 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1134
1135 IEM_MC_ADVANCE_RIP();
1136 IEM_MC_END();
1137 }
1138 else
1139 {
1140 IEM_MC_BEGIN(3, 0);
1141 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1142 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1143 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1144
1145 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1146 IEM_MC_PREPARE_AVX_USAGE();
1147 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1148
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 }
1152 }
1153 else
1154 {
1155 /*
1156 * Register, memory.
1157 */
1158 if (pVCpu->iem.s.uVexLength == 0)
1159 {
1160 IEM_MC_BEGIN(2, 2);
1161 IEM_MC_LOCAL(RTUINT128U, uSrc);
1162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1163 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1164 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1165
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1167 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1168 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1169 IEM_MC_PREPARE_AVX_USAGE();
1170
1171 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1172 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1173 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1174 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 IEM_MC_BEGIN(3, 2);
1182 IEM_MC_LOCAL(RTUINT256U, uSrc);
1183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1184 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1185 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1186 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1190 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1191 IEM_MC_PREPARE_AVX_USAGE();
1192
1193 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 }
1200 return VINF_SUCCESS;
1201}
1202
1203
1204/**
1205 * @opcode 0x12
1206 * @oppfx 0xf2
1207 * @opcpuid avx
1208 * @opgroup og_avx_pcksclr_datamove
1209 * @opxcpttype 5
1210 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1211 * -> op1=0x22222222111111112222222211111111
1212 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1213 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1214 */
1215FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1216{
1217 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1219 if (IEM_IS_MODRM_REG_MODE(bRm))
1220 {
1221 /*
1222 * Register, register.
1223 */
1224 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1225 if (pVCpu->iem.s.uVexLength == 0)
1226 {
1227 IEM_MC_BEGIN(2, 0);
1228 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1229 IEM_MC_ARG(uint64_t, uSrc, 1);
1230
1231 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1232 IEM_MC_PREPARE_AVX_USAGE();
1233
1234 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1235 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1236 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1237 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1238
1239 IEM_MC_ADVANCE_RIP();
1240 IEM_MC_END();
1241 }
1242 else
1243 {
1244 IEM_MC_BEGIN(3, 0);
1245 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1246 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1247 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1248
1249 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1250 IEM_MC_PREPARE_AVX_USAGE();
1251 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1252
1253 IEM_MC_ADVANCE_RIP();
1254 IEM_MC_END();
1255 }
1256 }
1257 else
1258 {
1259 /*
1260 * Register, memory.
1261 */
1262 if (pVCpu->iem.s.uVexLength == 0)
1263 {
1264 IEM_MC_BEGIN(2, 2);
1265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1266 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1267 IEM_MC_ARG(uint64_t, uSrc, 1);
1268
1269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1272 IEM_MC_PREPARE_AVX_USAGE();
1273
1274 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1275 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1276 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1277 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1278
1279 IEM_MC_ADVANCE_RIP();
1280 IEM_MC_END();
1281 }
1282 else
1283 {
1284 IEM_MC_BEGIN(3, 2);
1285 IEM_MC_LOCAL(RTUINT256U, uSrc);
1286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1287 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1288 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1289 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1290
1291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1292 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1293 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1294 IEM_MC_PREPARE_AVX_USAGE();
1295
1296 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1298
1299 IEM_MC_ADVANCE_RIP();
1300 IEM_MC_END();
1301 }
1302 }
1303 return VINF_SUCCESS;
1304}
1305
1306
1307/**
1308 * @opcode 0x13
1309 * @opcodesub !11 mr/reg
1310 * @oppfx none
1311 * @opcpuid avx
1312 * @opgroup og_avx_simdfp_datamove
1313 * @opxcpttype 5
1314 * @optest op1=1 op2=2 -> op1=2
1315 * @optest op1=0 op2=-42 -> op1=-42
1316 */
1317FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 if (IEM_IS_MODRM_MEM_MODE(bRm))
1321 {
1322 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1323
1324 IEM_MC_BEGIN(0, 2);
1325 IEM_MC_LOCAL(uint64_t, uSrc);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327
1328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1329 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1332
1333 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1334 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1335
1336 IEM_MC_ADVANCE_RIP();
1337 IEM_MC_END();
1338 return VINF_SUCCESS;
1339 }
1340
1341 /**
1342 * @opdone
1343 * @opmnemonic udvex0f13m3
1344 * @opcode 0x13
1345 * @opcodesub 11 mr/reg
1346 * @oppfx none
1347 * @opunused immediate
1348 * @opcpuid avx
1349 * @optest ->
1350 */
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352}
1353
1354
1355/**
1356 * @opcode 0x13
1357 * @opcodesub !11 mr/reg
1358 * @oppfx 0x66
1359 * @opcpuid avx
1360 * @opgroup og_avx_pcksclr_datamove
1361 * @opxcpttype 5
1362 * @optest op1=1 op2=2 -> op1=2
1363 * @optest op1=0 op2=-42 -> op1=-42
1364 */
1365FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1366{
1367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1368 if (IEM_IS_MODRM_MEM_MODE(bRm))
1369 {
1370 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1371 IEM_MC_BEGIN(0, 2);
1372 IEM_MC_LOCAL(uint64_t, uSrc);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1377 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1378 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1379
1380 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1381 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1382
1383 IEM_MC_ADVANCE_RIP();
1384 IEM_MC_END();
1385 return VINF_SUCCESS;
1386 }
1387
1388 /**
1389 * @opdone
1390 * @opmnemonic udvex660f13m3
1391 * @opcode 0x13
1392 * @opcodesub 11 mr/reg
1393 * @oppfx 0x66
1394 * @opunused immediate
1395 * @opcpuid avx
1396 * @optest ->
1397 */
1398 return IEMOP_RAISE_INVALID_OPCODE();
1399}
1400
1401/* Opcode VEX.F3.0F 0x13 - invalid */
1402/* Opcode VEX.F2.0F 0x13 - invalid */
1403
1404/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1405FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1406{
1407 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1408 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1410}
1411
1412
1413/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1414FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1415{
1416 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1417 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1418 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1419}
1420
1421
1422/* Opcode VEX.F3.0F 0x14 - invalid */
1423/* Opcode VEX.F2.0F 0x14 - invalid */
1424
1425
1426/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1427FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1428{
1429 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1430 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1431 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1432}
1433
1434
1435/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1436FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1437{
1438 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1439 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1440 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1441}
1442
1443
1444/* Opcode VEX.F3.0F 0x15 - invalid */
1445/* Opcode VEX.F2.0F 0x15 - invalid */
1446
1447
1448FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1449{
1450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1451 if (IEM_IS_MODRM_REG_MODE(bRm))
1452 {
1453 /**
1454 * @opcode 0x16
1455 * @opcodesub 11 mr/reg
1456 * @oppfx none
1457 * @opcpuid avx
1458 * @opgroup og_avx_simdfp_datamerge
1459 * @opxcpttype 7LZ
1460 */
1461 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1462
1463 IEMOP_HLP_DONE_VEX_DECODING_L0();
1464 IEM_MC_BEGIN(0, 0);
1465
1466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1467 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1468 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1469 IEM_GET_MODRM_RM(pVCpu, bRm),
1470 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1471
1472 IEM_MC_ADVANCE_RIP();
1473 IEM_MC_END();
1474 }
1475 else
1476 {
1477 /**
1478 * @opdone
1479 * @opcode 0x16
1480 * @opcodesub !11 mr/reg
1481 * @oppfx none
1482 * @opcpuid avx
1483 * @opgroup og_avx_simdfp_datamove
1484 * @opxcpttype 5LZ
1485 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1486 */
1487 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1488
1489 IEM_MC_BEGIN(0, 2);
1490 IEM_MC_LOCAL(uint64_t, uSrc);
1491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1492
1493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1494 IEMOP_HLP_DONE_VEX_DECODING_L0();
1495 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1496 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1497
1498 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1499 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1500 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1501 uSrc);
1502
1503 IEM_MC_ADVANCE_RIP();
1504 IEM_MC_END();
1505 }
1506 return VINF_SUCCESS;
1507}
1508
1509
1510/**
1511 * @opcode 0x16
1512 * @opcodesub !11 mr/reg
1513 * @oppfx 0x66
1514 * @opcpuid avx
1515 * @opgroup og_avx_pcksclr_datamerge
1516 * @opxcpttype 5LZ
1517 */
1518FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1519{
1520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1521 if (IEM_IS_MODRM_MEM_MODE(bRm))
1522 {
1523 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1524
1525 IEM_MC_BEGIN(0, 2);
1526 IEM_MC_LOCAL(uint64_t, uSrc);
1527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1528
1529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1530 IEMOP_HLP_DONE_VEX_DECODING_L0();
1531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1532 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1533
1534 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1535 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1536 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1537 uSrc);
1538
1539 IEM_MC_ADVANCE_RIP();
1540 IEM_MC_END();
1541 return VINF_SUCCESS;
1542 }
1543
1544 /**
1545 * @opdone
1546 * @opmnemonic udvex660f16m3
1547 * @opcode 0x12
1548 * @opcodesub 11 mr/reg
1549 * @oppfx 0x66
1550 * @opunused immediate
1551 * @opcpuid avx
1552 * @optest ->
1553 */
1554 return IEMOP_RAISE_INVALID_OPCODE();
1555}
1556
1557
1558/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1559/**
1560 * @opcode 0x16
1561 * @oppfx 0xf3
1562 * @opcpuid avx
1563 * @opgroup og_avx_pcksclr_datamove
1564 * @opxcpttype 4
1565 */
1566FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1567{
1568 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1569 Assert(pVCpu->iem.s.uVexLength <= 1);
1570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1571 if (IEM_IS_MODRM_REG_MODE(bRm))
1572 {
1573 /*
1574 * Register, register.
1575 */
1576 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1577 if (pVCpu->iem.s.uVexLength == 0)
1578 {
1579 IEM_MC_BEGIN(2, 0);
1580 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1581 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1582
1583 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1584 IEM_MC_PREPARE_AVX_USAGE();
1585
1586 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1587 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1588 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1589 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 }
1594 else
1595 {
1596 IEM_MC_BEGIN(3, 0);
1597 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1598 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1599 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1600
1601 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1602 IEM_MC_PREPARE_AVX_USAGE();
1603 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1604
1605 IEM_MC_ADVANCE_RIP();
1606 IEM_MC_END();
1607 }
1608 }
1609 else
1610 {
1611 /*
1612 * Register, memory.
1613 */
1614 if (pVCpu->iem.s.uVexLength == 0)
1615 {
1616 IEM_MC_BEGIN(2, 2);
1617 IEM_MC_LOCAL(RTUINT128U, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1620 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1621
1622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1623 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1624 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1625 IEM_MC_PREPARE_AVX_USAGE();
1626
1627 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1629 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1630 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1631
1632 IEM_MC_ADVANCE_RIP();
1633 IEM_MC_END();
1634 }
1635 else
1636 {
1637 IEM_MC_BEGIN(3, 2);
1638 IEM_MC_LOCAL(RTUINT256U, uSrc);
1639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1640 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1641 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1642 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1643
1644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1645 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1646 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1647 IEM_MC_PREPARE_AVX_USAGE();
1648
1649 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1650 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1651
1652 IEM_MC_ADVANCE_RIP();
1653 IEM_MC_END();
1654 }
1655 }
1656 return VINF_SUCCESS;
1657}
1658
1659
1660/* Opcode VEX.F2.0F 0x16 - invalid */
1661
1662
1663/**
1664 * @opcode 0x17
1665 * @opcodesub !11 mr/reg
1666 * @oppfx none
1667 * @opcpuid avx
1668 * @opgroup og_avx_simdfp_datamove
1669 * @opxcpttype 5
1670 */
1671FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1672{
1673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1674 if (IEM_IS_MODRM_MEM_MODE(bRm))
1675 {
1676 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1677
1678 IEM_MC_BEGIN(0, 2);
1679 IEM_MC_LOCAL(uint64_t, uSrc);
1680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1681
1682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1683 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1684 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1685 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1686
1687 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1688 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1689
1690 IEM_MC_ADVANCE_RIP();
1691 IEM_MC_END();
1692 return VINF_SUCCESS;
1693 }
1694
1695 /**
1696 * @opdone
1697 * @opmnemonic udvex0f17m3
1698 * @opcode 0x17
1699 * @opcodesub 11 mr/reg
1700 * @oppfx none
1701 * @opunused immediate
1702 * @opcpuid avx
1703 * @optest ->
1704 */
1705 return IEMOP_RAISE_INVALID_OPCODE();
1706}
1707
1708
1709/**
1710 * @opcode 0x17
1711 * @opcodesub !11 mr/reg
1712 * @oppfx 0x66
1713 * @opcpuid avx
1714 * @opgroup og_avx_pcksclr_datamove
1715 * @opxcpttype 5
1716 */
1717FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1718{
1719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1720 if (IEM_IS_MODRM_MEM_MODE(bRm))
1721 {
1722 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1723 IEM_MC_BEGIN(0, 2);
1724 IEM_MC_LOCAL(uint64_t, uSrc);
1725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1726
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1728 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1729 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1730 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1731
1732 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1733 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1734
1735 IEM_MC_ADVANCE_RIP();
1736 IEM_MC_END();
1737 return VINF_SUCCESS;
1738 }
1739
1740 /**
1741 * @opdone
1742 * @opmnemonic udvex660f17m3
1743 * @opcode 0x17
1744 * @opcodesub 11 mr/reg
1745 * @oppfx 0x66
1746 * @opunused immediate
1747 * @opcpuid avx
1748 * @optest ->
1749 */
1750 return IEMOP_RAISE_INVALID_OPCODE();
1751}
1752
1753
1754/* Opcode VEX.F3.0F 0x17 - invalid */
1755/* Opcode VEX.F2.0F 0x17 - invalid */
1756
1757
1758/* Opcode VEX.0F 0x18 - invalid */
1759/* Opcode VEX.0F 0x19 - invalid */
1760/* Opcode VEX.0F 0x1a - invalid */
1761/* Opcode VEX.0F 0x1b - invalid */
1762/* Opcode VEX.0F 0x1c - invalid */
1763/* Opcode VEX.0F 0x1d - invalid */
1764/* Opcode VEX.0F 0x1e - invalid */
1765/* Opcode VEX.0F 0x1f - invalid */
1766
1767/* Opcode VEX.0F 0x20 - invalid */
1768/* Opcode VEX.0F 0x21 - invalid */
1769/* Opcode VEX.0F 0x22 - invalid */
1770/* Opcode VEX.0F 0x23 - invalid */
1771/* Opcode VEX.0F 0x24 - invalid */
1772/* Opcode VEX.0F 0x25 - invalid */
1773/* Opcode VEX.0F 0x26 - invalid */
1774/* Opcode VEX.0F 0x27 - invalid */
1775
1776/**
1777 * @opcode 0x28
1778 * @oppfx none
1779 * @opcpuid avx
1780 * @opgroup og_avx_pcksclr_datamove
1781 * @opxcpttype 1
1782 * @optest op1=1 op2=2 -> op1=2
1783 * @optest op1=0 op2=-42 -> op1=-42
1784 * @note Almost identical to vmovapd.
1785 */
1786FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1787{
1788 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 Assert(pVCpu->iem.s.uVexLength <= 1);
1791 if (IEM_IS_MODRM_REG_MODE(bRm))
1792 {
1793 /*
1794 * Register, register.
1795 */
1796 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1797 IEM_MC_BEGIN(1, 0);
1798
1799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1801 if (pVCpu->iem.s.uVexLength == 0)
1802 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1803 IEM_GET_MODRM_RM(pVCpu, bRm));
1804 else
1805 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1806 IEM_GET_MODRM_RM(pVCpu, bRm));
1807 IEM_MC_ADVANCE_RIP();
1808 IEM_MC_END();
1809 }
1810 else
1811 {
1812 /*
1813 * Register, memory.
1814 */
1815 if (pVCpu->iem.s.uVexLength == 0)
1816 {
1817 IEM_MC_BEGIN(0, 2);
1818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1819 IEM_MC_LOCAL(RTUINT128U, uSrc);
1820
1821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1822 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1823 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1824 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1825
1826 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1827 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1828
1829 IEM_MC_ADVANCE_RIP();
1830 IEM_MC_END();
1831 }
1832 else
1833 {
1834 IEM_MC_BEGIN(0, 2);
1835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1836 IEM_MC_LOCAL(RTUINT256U, uSrc);
1837
1838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1839 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1840 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1841 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1842
1843 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1844 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1845
1846 IEM_MC_ADVANCE_RIP();
1847 IEM_MC_END();
1848 }
1849 }
1850 return VINF_SUCCESS;
1851}
1852
1853
1854/**
1855 * @opcode 0x28
1856 * @oppfx 66
1857 * @opcpuid avx
1858 * @opgroup og_avx_pcksclr_datamove
1859 * @opxcpttype 1
1860 * @optest op1=1 op2=2 -> op1=2
1861 * @optest op1=0 op2=-42 -> op1=-42
1862 * @note Almost identical to vmovaps
1863 */
1864FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1865{
1866 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1868 Assert(pVCpu->iem.s.uVexLength <= 1);
1869 if (IEM_IS_MODRM_REG_MODE(bRm))
1870 {
1871 /*
1872 * Register, register.
1873 */
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1875 IEM_MC_BEGIN(1, 0);
1876
1877 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1878 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1879 if (pVCpu->iem.s.uVexLength == 0)
1880 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1881 IEM_GET_MODRM_RM(pVCpu, bRm));
1882 else
1883 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1884 IEM_GET_MODRM_RM(pVCpu, bRm));
1885 IEM_MC_ADVANCE_RIP();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 /*
1891 * Register, memory.
1892 */
1893 if (pVCpu->iem.s.uVexLength == 0)
1894 {
1895 IEM_MC_BEGIN(0, 2);
1896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1897 IEM_MC_LOCAL(RTUINT128U, uSrc);
1898
1899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1900 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1901 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1902 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1903
1904 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1905 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1906
1907 IEM_MC_ADVANCE_RIP();
1908 IEM_MC_END();
1909 }
1910 else
1911 {
1912 IEM_MC_BEGIN(0, 2);
1913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1914 IEM_MC_LOCAL(RTUINT256U, uSrc);
1915
1916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1917 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1918 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1919 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1920
1921 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1922 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1923
1924 IEM_MC_ADVANCE_RIP();
1925 IEM_MC_END();
1926 }
1927 }
1928 return VINF_SUCCESS;
1929}
1930
1931/**
1932 * @opmnemonic udvexf30f28
1933 * @opcode 0x28
1934 * @oppfx 0xf3
1935 * @opunused vex.modrm
1936 * @opcpuid avx
1937 * @optest ->
1938 * @opdone
1939 */
1940
1941/**
1942 * @opmnemonic udvexf20f28
1943 * @opcode 0x28
1944 * @oppfx 0xf2
1945 * @opunused vex.modrm
1946 * @opcpuid avx
1947 * @optest ->
1948 * @opdone
1949 */
1950
1951/**
1952 * @opcode 0x29
1953 * @oppfx none
1954 * @opcpuid avx
1955 * @opgroup og_avx_pcksclr_datamove
1956 * @opxcpttype 1
1957 * @optest op1=1 op2=2 -> op1=2
1958 * @optest op1=0 op2=-42 -> op1=-42
1959 * @note Almost identical to vmovapd.
1960 */
1961FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1962{
1963 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1965 Assert(pVCpu->iem.s.uVexLength <= 1);
1966 if (IEM_IS_MODRM_REG_MODE(bRm))
1967 {
1968 /*
1969 * Register, register.
1970 */
1971 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1972 IEM_MC_BEGIN(1, 0);
1973
1974 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1975 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1976 if (pVCpu->iem.s.uVexLength == 0)
1977 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1978 IEM_GET_MODRM_REG(pVCpu, bRm));
1979 else
1980 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1981 IEM_GET_MODRM_REG(pVCpu, bRm));
1982 IEM_MC_ADVANCE_RIP();
1983 IEM_MC_END();
1984 }
1985 else
1986 {
1987 /*
1988 * Register, memory.
1989 */
1990 if (pVCpu->iem.s.uVexLength == 0)
1991 {
1992 IEM_MC_BEGIN(0, 2);
1993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1994 IEM_MC_LOCAL(RTUINT128U, uSrc);
1995
1996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1997 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1999 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2000
2001 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2002 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2003
2004 IEM_MC_ADVANCE_RIP();
2005 IEM_MC_END();
2006 }
2007 else
2008 {
2009 IEM_MC_BEGIN(0, 2);
2010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2011 IEM_MC_LOCAL(RTUINT256U, uSrc);
2012
2013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2014 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2015 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2016 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2017
2018 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2019 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2020
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 }
2024 }
2025 return VINF_SUCCESS;
2026}
2027
2028/**
2029 * @opcode 0x29
2030 * @oppfx 66
2031 * @opcpuid avx
2032 * @opgroup og_avx_pcksclr_datamove
2033 * @opxcpttype 1
2034 * @optest op1=1 op2=2 -> op1=2
2035 * @optest op1=0 op2=-42 -> op1=-42
2036 * @note Almost identical to vmovaps
2037 */
2038FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2039{
2040 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2041 Assert(pVCpu->iem.s.uVexLength <= 1);
2042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2043 if (IEM_IS_MODRM_REG_MODE(bRm))
2044 {
2045 /*
2046 * Register, register.
2047 */
2048 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2049 IEM_MC_BEGIN(1, 0);
2050
2051 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2052 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2053 if (pVCpu->iem.s.uVexLength == 0)
2054 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2055 IEM_GET_MODRM_REG(pVCpu, bRm));
2056 else
2057 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2058 IEM_GET_MODRM_REG(pVCpu, bRm));
2059 IEM_MC_ADVANCE_RIP();
2060 IEM_MC_END();
2061 }
2062 else
2063 {
2064 /*
2065 * Register, memory.
2066 */
2067 if (pVCpu->iem.s.uVexLength == 0)
2068 {
2069 IEM_MC_BEGIN(0, 2);
2070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2071 IEM_MC_LOCAL(RTUINT128U, uSrc);
2072
2073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2074 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2075 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2076 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2077
2078 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2079 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2080
2081 IEM_MC_ADVANCE_RIP();
2082 IEM_MC_END();
2083 }
2084 else
2085 {
2086 IEM_MC_BEGIN(0, 2);
2087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2088 IEM_MC_LOCAL(RTUINT256U, uSrc);
2089
2090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2091 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2092 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2093 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2094
2095 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2096 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2097
2098 IEM_MC_ADVANCE_RIP();
2099 IEM_MC_END();
2100 }
2101 }
2102 return VINF_SUCCESS;
2103}
2104
2105
2106/**
2107 * @opmnemonic udvexf30f29
2108 * @opcode 0x29
2109 * @oppfx 0xf3
2110 * @opunused vex.modrm
2111 * @opcpuid avx
2112 * @optest ->
2113 * @opdone
2114 */
2115
2116/**
2117 * @opmnemonic udvexf20f29
2118 * @opcode 0x29
2119 * @oppfx 0xf2
2120 * @opunused vex.modrm
2121 * @opcpuid avx
2122 * @optest ->
2123 * @opdone
2124 */
2125
2126
2127/** Opcode VEX.0F 0x2a - invalid */
2128/** Opcode VEX.66.0F 0x2a - invalid */
2129/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2130FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2131/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2132FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2133
2134
2135/**
2136 * @opcode 0x2b
2137 * @opcodesub !11 mr/reg
2138 * @oppfx none
2139 * @opcpuid avx
2140 * @opgroup og_avx_cachect
2141 * @opxcpttype 1
2142 * @optest op1=1 op2=2 -> op1=2
2143 * @optest op1=0 op2=-42 -> op1=-42
2144 * @note Identical implementation to vmovntpd
2145 */
2146FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2147{
2148 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2149 Assert(pVCpu->iem.s.uVexLength <= 1);
2150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2151 if (IEM_IS_MODRM_MEM_MODE(bRm))
2152 {
2153 /*
2154 * memory, register.
2155 */
2156 if (pVCpu->iem.s.uVexLength == 0)
2157 {
2158 IEM_MC_BEGIN(0, 2);
2159 IEM_MC_LOCAL(RTUINT128U, uSrc);
2160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2161
2162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2163 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2164 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2165 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2166
2167 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2168 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2169
2170 IEM_MC_ADVANCE_RIP();
2171 IEM_MC_END();
2172 }
2173 else
2174 {
2175 IEM_MC_BEGIN(0, 2);
2176 IEM_MC_LOCAL(RTUINT256U, uSrc);
2177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2178
2179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2180 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2181 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2182 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2183
2184 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2185 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2186
2187 IEM_MC_ADVANCE_RIP();
2188 IEM_MC_END();
2189 }
2190 }
2191 /* The register, register encoding is invalid. */
2192 else
2193 return IEMOP_RAISE_INVALID_OPCODE();
2194 return VINF_SUCCESS;
2195}
2196
2197/**
2198 * @opcode 0x2b
2199 * @opcodesub !11 mr/reg
2200 * @oppfx 0x66
2201 * @opcpuid avx
2202 * @opgroup og_avx_cachect
2203 * @opxcpttype 1
2204 * @optest op1=1 op2=2 -> op1=2
2205 * @optest op1=0 op2=-42 -> op1=-42
2206 * @note Identical implementation to vmovntps
2207 */
2208FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2209{
2210 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2211 Assert(pVCpu->iem.s.uVexLength <= 1);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_MEM_MODE(bRm))
2214 {
2215 /*
2216 * memory, register.
2217 */
2218 if (pVCpu->iem.s.uVexLength == 0)
2219 {
2220 IEM_MC_BEGIN(0, 2);
2221 IEM_MC_LOCAL(RTUINT128U, uSrc);
2222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2223
2224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2225 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2226 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2227 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2228
2229 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2230 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2231
2232 IEM_MC_ADVANCE_RIP();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 IEM_MC_BEGIN(0, 2);
2238 IEM_MC_LOCAL(RTUINT256U, uSrc);
2239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2240
2241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2242 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2243 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2244 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2245
2246 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2247 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2248
2249 IEM_MC_ADVANCE_RIP();
2250 IEM_MC_END();
2251 }
2252 }
2253 /* The register, register encoding is invalid. */
2254 else
2255 return IEMOP_RAISE_INVALID_OPCODE();
2256 return VINF_SUCCESS;
2257}
2258
2259/**
2260 * @opmnemonic udvexf30f2b
2261 * @opcode 0x2b
2262 * @oppfx 0xf3
2263 * @opunused vex.modrm
2264 * @opcpuid avx
2265 * @optest ->
2266 * @opdone
2267 */
2268
2269/**
2270 * @opmnemonic udvexf20f2b
2271 * @opcode 0x2b
2272 * @oppfx 0xf2
2273 * @opunused vex.modrm
2274 * @opcpuid avx
2275 * @optest ->
2276 * @opdone
2277 */
2278
2279
2280/* Opcode VEX.0F 0x2c - invalid */
2281/* Opcode VEX.66.0F 0x2c - invalid */
2282/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2283FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2284/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2285FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2286
2287/* Opcode VEX.0F 0x2d - invalid */
2288/* Opcode VEX.66.0F 0x2d - invalid */
2289/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2290FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2291/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2292FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2293
2294/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2295FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
2296/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2297FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
2298/* Opcode VEX.F3.0F 0x2e - invalid */
2299/* Opcode VEX.F2.0F 0x2e - invalid */
2300
2301/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2302FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2303/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2304FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2305/* Opcode VEX.F3.0F 0x2f - invalid */
2306/* Opcode VEX.F2.0F 0x2f - invalid */
2307
2308/* Opcode VEX.0F 0x30 - invalid */
2309/* Opcode VEX.0F 0x31 - invalid */
2310/* Opcode VEX.0F 0x32 - invalid */
2311/* Opcode VEX.0F 0x33 - invalid */
2312/* Opcode VEX.0F 0x34 - invalid */
2313/* Opcode VEX.0F 0x35 - invalid */
2314/* Opcode VEX.0F 0x36 - invalid */
2315/* Opcode VEX.0F 0x37 - invalid */
2316/* Opcode VEX.0F 0x38 - invalid */
2317/* Opcode VEX.0F 0x39 - invalid */
2318/* Opcode VEX.0F 0x3a - invalid */
2319/* Opcode VEX.0F 0x3b - invalid */
2320/* Opcode VEX.0F 0x3c - invalid */
2321/* Opcode VEX.0F 0x3d - invalid */
2322/* Opcode VEX.0F 0x3e - invalid */
2323/* Opcode VEX.0F 0x3f - invalid */
2324/* Opcode VEX.0F 0x40 - invalid */
2325/* Opcode VEX.0F 0x41 - invalid */
2326/* Opcode VEX.0F 0x42 - invalid */
2327/* Opcode VEX.0F 0x43 - invalid */
2328/* Opcode VEX.0F 0x44 - invalid */
2329/* Opcode VEX.0F 0x45 - invalid */
2330/* Opcode VEX.0F 0x46 - invalid */
2331/* Opcode VEX.0F 0x47 - invalid */
2332/* Opcode VEX.0F 0x48 - invalid */
2333/* Opcode VEX.0F 0x49 - invalid */
2334/* Opcode VEX.0F 0x4a - invalid */
2335/* Opcode VEX.0F 0x4b - invalid */
2336/* Opcode VEX.0F 0x4c - invalid */
2337/* Opcode VEX.0F 0x4d - invalid */
2338/* Opcode VEX.0F 0x4e - invalid */
2339/* Opcode VEX.0F 0x4f - invalid */
2340
2341/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2342FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2343/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2344FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2345/* Opcode VEX.F3.0F 0x50 - invalid */
2346/* Opcode VEX.F2.0F 0x50 - invalid */
2347
2348/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2349FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2350/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2351FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2352/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2353FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2354/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2355FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2356
2357/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2358FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2359/* Opcode VEX.66.0F 0x52 - invalid */
2360/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2361FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2362/* Opcode VEX.F2.0F 0x52 - invalid */
2363
2364/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2365FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2366/* Opcode VEX.66.0F 0x53 - invalid */
2367/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2368FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2369/* Opcode VEX.F2.0F 0x53 - invalid */
2370
2371
2372/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2373FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2374{
2375 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2376 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2377 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2378}
2379
2380
2381/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2382FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2383{
2384 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2385 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2386 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2387}
2388
2389
2390/* Opcode VEX.F3.0F 0x54 - invalid */
2391/* Opcode VEX.F2.0F 0x54 - invalid */
2392
2393
2394/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2395FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2396{
2397 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2398 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2399 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2400}
2401
2402
2403/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2404FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2405{
2406 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2407 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2408 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2409}
2410
2411
2412/* Opcode VEX.F3.0F 0x55 - invalid */
2413/* Opcode VEX.F2.0F 0x55 - invalid */
2414
2415/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2416FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2417{
2418 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2420 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2421}
2422
2423
2424/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2425FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2426{
2427 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2429 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2430}
2431
2432
2433/* Opcode VEX.F3.0F 0x56 - invalid */
2434/* Opcode VEX.F2.0F 0x56 - invalid */
2435
2436
2437/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2438FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2439{
2440 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2442 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2443}
2444
2445
2446/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2447FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2448{
2449 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2451 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2452}
2453
2454
2455/* Opcode VEX.F3.0F 0x57 - invalid */
2456/* Opcode VEX.F2.0F 0x57 - invalid */
2457
2458/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2459FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2460/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2461FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2462/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2463FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2464/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2465FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2466
2467/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2468FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2469/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2470FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2471/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2472FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2473/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2474FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2475
2476/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2477FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2478/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2479FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2480/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2481FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2482/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2483FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2484
2485/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2486FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2487/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2488FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2489/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2490FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2491/* Opcode VEX.F2.0F 0x5b - invalid */
2492
2493/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2494FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2495/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2496FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2497/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2498FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2499/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2500FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2501
2502/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2503FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2504/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2505FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2506/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2507FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2508/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2509FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2510
2511/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2512FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2513/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2514FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2515/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2516FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2517/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2518FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2519
2520/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2521FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2522/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2523FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2524/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2525FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2526/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2527FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2528
2529
2530/* Opcode VEX.0F 0x60 - invalid */
2531
2532
2533/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2534FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2535{
2536 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2537 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2538 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2539}
2540
2541
2542/* Opcode VEX.F3.0F 0x60 - invalid */
2543
2544
2545/* Opcode VEX.0F 0x61 - invalid */
2546
2547
2548/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2549FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2550{
2551 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2552 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2553 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2554}
2555
2556
2557/* Opcode VEX.F3.0F 0x61 - invalid */
2558
2559
2560/* Opcode VEX.0F 0x62 - invalid */
2561
2562/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2563FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2564{
2565 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2566 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2567 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2568}
2569
2570
2571/* Opcode VEX.F3.0F 0x62 - invalid */
2572
2573
2574
2575/* Opcode VEX.0F 0x63 - invalid */
2576
2577
2578/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2579FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2580{
2581 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2582 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2583 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2584}
2585
2586
2587/* Opcode VEX.F3.0F 0x63 - invalid */
2588
2589/* Opcode VEX.0F 0x64 - invalid */
2590
2591
2592/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2593FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2594{
2595 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2596 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2597 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2598}
2599
2600
2601/* Opcode VEX.F3.0F 0x64 - invalid */
2602
2603/* Opcode VEX.0F 0x65 - invalid */
2604
2605
2606/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2607FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2608{
2609 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2610 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2611 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2612}
2613
2614
2615/* Opcode VEX.F3.0F 0x65 - invalid */
2616
2617/* Opcode VEX.0F 0x66 - invalid */
2618
2619
2620/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2621FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2622{
2623 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2624 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
2625 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2626}
2627
2628
2629/* Opcode VEX.F3.0F 0x66 - invalid */
2630
2631/* Opcode VEX.0F 0x67 - invalid */
2632
2633
2634/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2635FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
2636{
2637 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2638 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
2639 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2640}
2641
2642
2643/* Opcode VEX.F3.0F 0x67 - invalid */
2644
2645
2646///**
2647// * Common worker for SSE2 instructions on the form:
2648// * pxxxx xmm1, xmm2/mem128
2649// *
2650// * The 2nd operand is the second half of a register, which in the memory case
2651// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2652// * where it may read the full 128 bits or only the upper 64 bits.
2653// *
2654// * Exceptions type 4.
2655// */
2656//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2657//{
2658// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2659// if (IEM_IS_MODRM_REG_MODE(bRm))
2660// {
2661// /*
2662// * Register, register.
2663// */
2664// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2665// IEM_MC_BEGIN(2, 0);
2666// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2667// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2668// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2669// IEM_MC_PREPARE_SSE_USAGE();
2670// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2671// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2672// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2673// IEM_MC_ADVANCE_RIP();
2674// IEM_MC_END();
2675// }
2676// else
2677// {
2678// /*
2679// * Register, memory.
2680// */
2681// IEM_MC_BEGIN(2, 2);
2682// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2683// IEM_MC_LOCAL(RTUINT128U, uSrc);
2684// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2685// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2686//
2687// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2688// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2689// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2690// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2691//
2692// IEM_MC_PREPARE_SSE_USAGE();
2693// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2694// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2695//
2696// IEM_MC_ADVANCE_RIP();
2697// IEM_MC_END();
2698// }
2699// return VINF_SUCCESS;
2700//}
2701
2702
2703/* Opcode VEX.0F 0x68 - invalid */
2704
2705/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
2706FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2707{
2708 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2709 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
2710 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2711}
2712
2713
2714/* Opcode VEX.F3.0F 0x68 - invalid */
2715
2716
2717/* Opcode VEX.0F 0x69 - invalid */
2718
2719
2720/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
2721FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2722{
2723 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2724 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
2725 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2726}
2727
2728
2729/* Opcode VEX.F3.0F 0x69 - invalid */
2730
2731
2732/* Opcode VEX.0F 0x6a - invalid */
2733
2734
2735/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
2736FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2737{
2738 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2739 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
2740 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2741}
2742
2743
2744/* Opcode VEX.F3.0F 0x6a - invalid */
2745
2746
2747/* Opcode VEX.0F 0x6b - invalid */
2748
2749
2750/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
2751FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
2752{
2753 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2754 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
2755 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2756}
2757
2758
2759/* Opcode VEX.F3.0F 0x6b - invalid */
2760
2761
2762/* Opcode VEX.0F 0x6c - invalid */
2763
2764
2765/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
2766FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2767{
2768 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2769 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
2770 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2771}
2772
2773
2774/* Opcode VEX.F3.0F 0x6c - invalid */
2775/* Opcode VEX.F2.0F 0x6c - invalid */
2776
2777
2778/* Opcode VEX.0F 0x6d - invalid */
2779
2780
2781/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
2782FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2783{
2784 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2785 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
2786 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2787}
2788
2789
2790/* Opcode VEX.F3.0F 0x6d - invalid */
2791
2792
2793/* Opcode VEX.0F 0x6e - invalid */
2794
2795FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2796{
2797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2798 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2799 {
2800 /**
2801 * @opcode 0x6e
2802 * @opcodesub rex.w=1
2803 * @oppfx 0x66
2804 * @opcpuid avx
2805 * @opgroup og_avx_simdint_datamov
2806 * @opxcpttype 5
2807 * @optest 64-bit / op1=1 op2=2 -> op1=2
2808 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2809 */
2810 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2811 if (IEM_IS_MODRM_REG_MODE(bRm))
2812 {
2813 /* XMM, greg64 */
2814 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817
2818 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2819 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2820
2821 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2822 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2823
2824 IEM_MC_ADVANCE_RIP();
2825 IEM_MC_END();
2826 }
2827 else
2828 {
2829 /* XMM, [mem64] */
2830 IEM_MC_BEGIN(0, 2);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832 IEM_MC_LOCAL(uint64_t, u64Tmp);
2833
2834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2835 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2836 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2837 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2838
2839 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2840 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2841
2842 IEM_MC_ADVANCE_RIP();
2843 IEM_MC_END();
2844 }
2845 }
2846 else
2847 {
2848 /**
2849 * @opdone
2850 * @opcode 0x6e
2851 * @opcodesub rex.w=0
2852 * @oppfx 0x66
2853 * @opcpuid avx
2854 * @opgroup og_avx_simdint_datamov
2855 * @opxcpttype 5
2856 * @opfunction iemOp_vmovd_q_Vy_Ey
2857 * @optest op1=1 op2=2 -> op1=2
2858 * @optest op1=0 op2=-42 -> op1=-42
2859 */
2860 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2861 if (IEM_IS_MODRM_REG_MODE(bRm))
2862 {
2863 /* XMM, greg32 */
2864 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2865 IEM_MC_BEGIN(0, 1);
2866 IEM_MC_LOCAL(uint32_t, u32Tmp);
2867
2868 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2869 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2870
2871 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2872 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 else
2878 {
2879 /* XMM, [mem32] */
2880 IEM_MC_BEGIN(0, 2);
2881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2882 IEM_MC_LOCAL(uint32_t, u32Tmp);
2883
2884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2885 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2886 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2887 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2888
2889 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2890 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2891
2892 IEM_MC_ADVANCE_RIP();
2893 IEM_MC_END();
2894 }
2895 }
2896 return VINF_SUCCESS;
2897}
2898
2899
2900/* Opcode VEX.F3.0F 0x6e - invalid */
2901
2902
2903/* Opcode VEX.0F 0x6f - invalid */
2904
2905/**
2906 * @opcode 0x6f
2907 * @oppfx 0x66
2908 * @opcpuid avx
2909 * @opgroup og_avx_simdint_datamove
2910 * @opxcpttype 1
2911 * @optest op1=1 op2=2 -> op1=2
2912 * @optest op1=0 op2=-42 -> op1=-42
2913 */
2914FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2915{
2916 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2917 Assert(pVCpu->iem.s.uVexLength <= 1);
2918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2919 if (IEM_IS_MODRM_REG_MODE(bRm))
2920 {
2921 /*
2922 * Register, register.
2923 */
2924 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2925 IEM_MC_BEGIN(0, 0);
2926
2927 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2928 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2929 if (pVCpu->iem.s.uVexLength == 0)
2930 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2931 IEM_GET_MODRM_RM(pVCpu, bRm));
2932 else
2933 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2934 IEM_GET_MODRM_RM(pVCpu, bRm));
2935 IEM_MC_ADVANCE_RIP();
2936 IEM_MC_END();
2937 }
2938 else if (pVCpu->iem.s.uVexLength == 0)
2939 {
2940 /*
2941 * Register, memory128.
2942 */
2943 IEM_MC_BEGIN(0, 2);
2944 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2946
2947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2948 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2949 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2950 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2951
2952 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2953 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
2954
2955 IEM_MC_ADVANCE_RIP();
2956 IEM_MC_END();
2957 }
2958 else
2959 {
2960 /*
2961 * Register, memory256.
2962 */
2963 IEM_MC_BEGIN(0, 2);
2964 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2966
2967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2971
2972 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2973 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
2974
2975 IEM_MC_ADVANCE_RIP();
2976 IEM_MC_END();
2977 }
2978 return VINF_SUCCESS;
2979}
2980
2981/**
2982 * @opcode 0x6f
2983 * @oppfx 0xf3
2984 * @opcpuid avx
2985 * @opgroup og_avx_simdint_datamove
2986 * @opxcpttype 4UA
2987 * @optest op1=1 op2=2 -> op1=2
2988 * @optest op1=0 op2=-42 -> op1=-42
2989 */
2990FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2991{
2992 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2993 Assert(pVCpu->iem.s.uVexLength <= 1);
2994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2995 if (IEM_IS_MODRM_REG_MODE(bRm))
2996 {
2997 /*
2998 * Register, register.
2999 */
3000 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3001 IEM_MC_BEGIN(0, 0);
3002
3003 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3004 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3005 if (pVCpu->iem.s.uVexLength == 0)
3006 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3007 IEM_GET_MODRM_RM(pVCpu, bRm));
3008 else
3009 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3010 IEM_GET_MODRM_RM(pVCpu, bRm));
3011 IEM_MC_ADVANCE_RIP();
3012 IEM_MC_END();
3013 }
3014 else if (pVCpu->iem.s.uVexLength == 0)
3015 {
3016 /*
3017 * Register, memory128.
3018 */
3019 IEM_MC_BEGIN(0, 2);
3020 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3022
3023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3024 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3027
3028 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3029 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3030
3031 IEM_MC_ADVANCE_RIP();
3032 IEM_MC_END();
3033 }
3034 else
3035 {
3036 /*
3037 * Register, memory256.
3038 */
3039 IEM_MC_BEGIN(0, 2);
3040 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3042
3043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3044 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3045 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3046 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3047
3048 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3049 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3050
3051 IEM_MC_ADVANCE_RIP();
3052 IEM_MC_END();
3053 }
3054 return VINF_SUCCESS;
3055}
3056
3057
3058/* Opcode VEX.0F 0x70 - invalid */
3059
3060
3061/**
3062 * Common worker for AVX/AVX2 instructions on the forms:
3063 * - vpxxx xmm0, xmm2/mem128, imm8
3064 * - vpxxx ymm0, ymm2/mem256, imm8
3065 *
3066 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3067 */
3068FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3069{
3070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3071 if (IEM_IS_MODRM_REG_MODE(bRm))
3072 {
3073 /*
3074 * Register, register.
3075 */
3076 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3077 if (pVCpu->iem.s.uVexLength)
3078 {
3079 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3080 IEM_MC_BEGIN(3, 2);
3081 IEM_MC_LOCAL(RTUINT256U, uDst);
3082 IEM_MC_LOCAL(RTUINT256U, uSrc);
3083 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3084 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3085 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3086 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3087 IEM_MC_PREPARE_AVX_USAGE();
3088 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3089 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3090 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3091 IEM_MC_ADVANCE_RIP();
3092 IEM_MC_END();
3093 }
3094 else
3095 {
3096 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3097 IEM_MC_BEGIN(3, 0);
3098 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3099 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3100 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3101 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3102 IEM_MC_PREPARE_AVX_USAGE();
3103 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3104 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3105 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3106 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3107 IEM_MC_ADVANCE_RIP();
3108 IEM_MC_END();
3109 }
3110 }
3111 else
3112 {
3113 /*
3114 * Register, memory.
3115 */
3116 if (pVCpu->iem.s.uVexLength)
3117 {
3118 IEM_MC_BEGIN(3, 3);
3119 IEM_MC_LOCAL(RTUINT256U, uDst);
3120 IEM_MC_LOCAL(RTUINT256U, uSrc);
3121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3122 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3123 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3124
3125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3126 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3127 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3128 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3129 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3130 IEM_MC_PREPARE_AVX_USAGE();
3131
3132 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3133 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3134 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3135
3136 IEM_MC_ADVANCE_RIP();
3137 IEM_MC_END();
3138 }
3139 else
3140 {
3141 IEM_MC_BEGIN(3, 1);
3142 IEM_MC_LOCAL(RTUINT128U, uSrc);
3143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3144 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3145 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3146
3147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3148 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3149 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3150 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3151 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3152 IEM_MC_PREPARE_AVX_USAGE();
3153
3154 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3155 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3156 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3157 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3158
3159 IEM_MC_ADVANCE_RIP();
3160 IEM_MC_END();
3161 }
3162 }
3163 return VINF_SUCCESS;
3164}
3165
3166
3167/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3168FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3169{
3170 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3171 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3172 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3173
3174}
3175
3176
3177/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3178FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3179{
3180 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3181 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3182 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3183
3184}
3185
3186
3187/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3188FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3189{
3190 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3191 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3192 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3193}
3194
3195
3196/* Opcode VEX.0F 0x71 11/2 - invalid. */
3197/** Opcode VEX.66.0F 0x71 11/2. */
3198FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3199
3200/* Opcode VEX.0F 0x71 11/4 - invalid */
3201/** Opcode VEX.66.0F 0x71 11/4. */
3202FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3203
3204/* Opcode VEX.0F 0x71 11/6 - invalid */
3205/** Opcode VEX.66.0F 0x71 11/6. */
3206FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3207
3208
3209/**
3210 * VEX Group 12 jump table for register variant.
3211 */
3212IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3213{
3214 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3215 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3216 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3217 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3218 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3219 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3220 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3221 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3222};
3223AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3224
3225
3226/** Opcode VEX.0F 0x71. */
3227FNIEMOP_DEF(iemOp_VGrp12)
3228{
3229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3230 if (IEM_IS_MODRM_REG_MODE(bRm))
3231 /* register, register */
3232 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3233 + pVCpu->iem.s.idxPrefix], bRm);
3234 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3235}
3236
3237
3238/* Opcode VEX.0F 0x72 11/2 - invalid. */
3239/** Opcode VEX.66.0F 0x72 11/2. */
3240FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3241
3242/* Opcode VEX.0F 0x72 11/4 - invalid. */
3243/** Opcode VEX.66.0F 0x72 11/4. */
3244FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3245
3246/* Opcode VEX.0F 0x72 11/6 - invalid. */
3247/** Opcode VEX.66.0F 0x72 11/6. */
3248FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3249
3250
3251/**
3252 * Group 13 jump table for register variant.
3253 */
3254IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3255{
3256 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3257 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3258 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3259 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3260 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3261 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3262 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3263 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3264};
3265AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3266
3267/** Opcode VEX.0F 0x72. */
3268FNIEMOP_DEF(iemOp_VGrp13)
3269{
3270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3271 if (IEM_IS_MODRM_REG_MODE(bRm))
3272 /* register, register */
3273 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3274 + pVCpu->iem.s.idxPrefix], bRm);
3275 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3276}
3277
3278
3279/* Opcode VEX.0F 0x73 11/2 - invalid. */
3280/** Opcode VEX.66.0F 0x73 11/2. */
3281FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3282
3283/** Opcode VEX.66.0F 0x73 11/3. */
3284FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3285
3286/* Opcode VEX.0F 0x73 11/6 - invalid. */
3287/** Opcode VEX.66.0F 0x73 11/6. */
3288FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3289
3290/** Opcode VEX.66.0F 0x73 11/7. */
3291FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3292
3293/**
3294 * Group 14 jump table for register variant.
3295 */
3296IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3297{
3298 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3299 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3300 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3301 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3302 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3303 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3304 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3305 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3306};
3307AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3308
3309
3310/** Opcode VEX.0F 0x73. */
3311FNIEMOP_DEF(iemOp_VGrp14)
3312{
3313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3314 if (IEM_IS_MODRM_REG_MODE(bRm))
3315 /* register, register */
3316 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3317 + pVCpu->iem.s.idxPrefix], bRm);
3318 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3319}
3320
3321
3322/* Opcode VEX.0F 0x74 - invalid */
3323
3324
3325/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3326FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3327{
3328 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3329 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3330 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3331}
3332
3333/* Opcode VEX.F3.0F 0x74 - invalid */
3334/* Opcode VEX.F2.0F 0x74 - invalid */
3335
3336
3337/* Opcode VEX.0F 0x75 - invalid */
3338
3339
3340/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3341FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3342{
3343 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3344 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3345 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3346}
3347
3348
3349/* Opcode VEX.F3.0F 0x75 - invalid */
3350/* Opcode VEX.F2.0F 0x75 - invalid */
3351
3352
3353/* Opcode VEX.0F 0x76 - invalid */
3354
3355
3356/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3357FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3358{
3359 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3360 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3361 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3362}
3363
3364
3365/* Opcode VEX.F3.0F 0x76 - invalid */
3366/* Opcode VEX.F2.0F 0x76 - invalid */
3367
3368
3369/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3370FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
3371/* Opcode VEX.66.0F 0x77 - invalid */
3372/* Opcode VEX.F3.0F 0x77 - invalid */
3373/* Opcode VEX.F2.0F 0x77 - invalid */
3374
3375/* Opcode VEX.0F 0x78 - invalid */
3376/* Opcode VEX.66.0F 0x78 - invalid */
3377/* Opcode VEX.F3.0F 0x78 - invalid */
3378/* Opcode VEX.F2.0F 0x78 - invalid */
3379
3380/* Opcode VEX.0F 0x79 - invalid */
3381/* Opcode VEX.66.0F 0x79 - invalid */
3382/* Opcode VEX.F3.0F 0x79 - invalid */
3383/* Opcode VEX.F2.0F 0x79 - invalid */
3384
3385/* Opcode VEX.0F 0x7a - invalid */
3386/* Opcode VEX.66.0F 0x7a - invalid */
3387/* Opcode VEX.F3.0F 0x7a - invalid */
3388/* Opcode VEX.F2.0F 0x7a - invalid */
3389
3390/* Opcode VEX.0F 0x7b - invalid */
3391/* Opcode VEX.66.0F 0x7b - invalid */
3392/* Opcode VEX.F3.0F 0x7b - invalid */
3393/* Opcode VEX.F2.0F 0x7b - invalid */
3394
3395/* Opcode VEX.0F 0x7c - invalid */
3396/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3397FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3398/* Opcode VEX.F3.0F 0x7c - invalid */
3399/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3400FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3401
3402/* Opcode VEX.0F 0x7d - invalid */
3403/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3404FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3405/* Opcode VEX.F3.0F 0x7d - invalid */
3406/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3407FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3408
3409
3410/* Opcode VEX.0F 0x7e - invalid */
3411
3412FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3413{
3414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3415 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3416 {
3417 /**
3418 * @opcode 0x7e
3419 * @opcodesub rex.w=1
3420 * @oppfx 0x66
3421 * @opcpuid avx
3422 * @opgroup og_avx_simdint_datamov
3423 * @opxcpttype 5
3424 * @optest 64-bit / op1=1 op2=2 -> op1=2
3425 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3426 */
3427 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3428 if (IEM_IS_MODRM_REG_MODE(bRm))
3429 {
3430 /* greg64, XMM */
3431 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3432 IEM_MC_BEGIN(0, 1);
3433 IEM_MC_LOCAL(uint64_t, u64Tmp);
3434
3435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3437
3438 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3439 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3440
3441 IEM_MC_ADVANCE_RIP();
3442 IEM_MC_END();
3443 }
3444 else
3445 {
3446 /* [mem64], XMM */
3447 IEM_MC_BEGIN(0, 2);
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3449 IEM_MC_LOCAL(uint64_t, u64Tmp);
3450
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3452 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3453 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3454 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3455
3456 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3457 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3458
3459 IEM_MC_ADVANCE_RIP();
3460 IEM_MC_END();
3461 }
3462 }
3463 else
3464 {
3465 /**
3466 * @opdone
3467 * @opcode 0x7e
3468 * @opcodesub rex.w=0
3469 * @oppfx 0x66
3470 * @opcpuid avx
3471 * @opgroup og_avx_simdint_datamov
3472 * @opxcpttype 5
3473 * @opfunction iemOp_vmovd_q_Vy_Ey
3474 * @optest op1=1 op2=2 -> op1=2
3475 * @optest op1=0 op2=-42 -> op1=-42
3476 */
3477 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3478 if (IEM_IS_MODRM_REG_MODE(bRm))
3479 {
3480 /* greg32, XMM */
3481 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3482 IEM_MC_BEGIN(0, 1);
3483 IEM_MC_LOCAL(uint32_t, u32Tmp);
3484
3485 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3486 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3487
3488 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3489 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3490
3491 IEM_MC_ADVANCE_RIP();
3492 IEM_MC_END();
3493 }
3494 else
3495 {
3496 /* [mem32], XMM */
3497 IEM_MC_BEGIN(0, 2);
3498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3499 IEM_MC_LOCAL(uint32_t, u32Tmp);
3500
3501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3502 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3503 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3504 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3505
3506 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3507 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3508
3509 IEM_MC_ADVANCE_RIP();
3510 IEM_MC_END();
3511 }
3512 }
3513 return VINF_SUCCESS;
3514}
3515
3516/**
3517 * @opcode 0x7e
3518 * @oppfx 0xf3
3519 * @opcpuid avx
3520 * @opgroup og_avx_pcksclr_datamove
3521 * @opxcpttype none
3522 * @optest op1=1 op2=2 -> op1=2
3523 * @optest op1=0 op2=-42 -> op1=-42
3524 */
3525FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3526{
3527 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3529 if (IEM_IS_MODRM_REG_MODE(bRm))
3530 {
3531 /*
3532 * Register, register.
3533 */
3534 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3535 IEM_MC_BEGIN(0, 0);
3536
3537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3539
3540 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3541 IEM_GET_MODRM_RM(pVCpu, bRm));
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 else
3546 {
3547 /*
3548 * Memory, register.
3549 */
3550 IEM_MC_BEGIN(0, 2);
3551 IEM_MC_LOCAL(uint64_t, uSrc);
3552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3553
3554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3555 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3556 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3557 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3558
3559 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3560 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3561
3562 IEM_MC_ADVANCE_RIP();
3563 IEM_MC_END();
3564 }
3565 return VINF_SUCCESS;
3566
3567}
3568/* Opcode VEX.F2.0F 0x7e - invalid */
3569
3570
3571/* Opcode VEX.0F 0x7f - invalid */
3572
3573/**
3574 * @opcode 0x7f
3575 * @oppfx 0x66
3576 * @opcpuid avx
3577 * @opgroup og_avx_simdint_datamove
3578 * @opxcpttype 1
3579 * @optest op1=1 op2=2 -> op1=2
3580 * @optest op1=0 op2=-42 -> op1=-42
3581 */
3582FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3583{
3584 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3585 Assert(pVCpu->iem.s.uVexLength <= 1);
3586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3587 if (IEM_IS_MODRM_REG_MODE(bRm))
3588 {
3589 /*
3590 * Register, register.
3591 */
3592 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3593 IEM_MC_BEGIN(0, 0);
3594
3595 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3596 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3597 if (pVCpu->iem.s.uVexLength == 0)
3598 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3599 IEM_GET_MODRM_REG(pVCpu, bRm));
3600 else
3601 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3602 IEM_GET_MODRM_REG(pVCpu, bRm));
3603 IEM_MC_ADVANCE_RIP();
3604 IEM_MC_END();
3605 }
3606 else if (pVCpu->iem.s.uVexLength == 0)
3607 {
3608 /*
3609 * Register, memory128.
3610 */
3611 IEM_MC_BEGIN(0, 2);
3612 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3614
3615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3616 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3617 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3618 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3619
3620 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3621 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3622
3623 IEM_MC_ADVANCE_RIP();
3624 IEM_MC_END();
3625 }
3626 else
3627 {
3628 /*
3629 * Register, memory256.
3630 */
3631 IEM_MC_BEGIN(0, 2);
3632 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3634
3635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3636 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3637 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3638 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3639
3640 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3641 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3642
3643 IEM_MC_ADVANCE_RIP();
3644 IEM_MC_END();
3645 }
3646 return VINF_SUCCESS;
3647}
3648
3649/**
3650 * @opcode 0x7f
3651 * @oppfx 0xf3
3652 * @opcpuid avx
3653 * @opgroup og_avx_simdint_datamove
3654 * @opxcpttype 4UA
3655 * @optest op1=1 op2=2 -> op1=2
3656 * @optest op1=0 op2=-42 -> op1=-42
3657 */
3658FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3659{
3660 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3661 Assert(pVCpu->iem.s.uVexLength <= 1);
3662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3663 if (IEM_IS_MODRM_REG_MODE(bRm))
3664 {
3665 /*
3666 * Register, register.
3667 */
3668 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3669 IEM_MC_BEGIN(0, 0);
3670
3671 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3672 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3673 if (pVCpu->iem.s.uVexLength == 0)
3674 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3675 IEM_GET_MODRM_REG(pVCpu, bRm));
3676 else
3677 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3678 IEM_GET_MODRM_REG(pVCpu, bRm));
3679 IEM_MC_ADVANCE_RIP();
3680 IEM_MC_END();
3681 }
3682 else if (pVCpu->iem.s.uVexLength == 0)
3683 {
3684 /*
3685 * Register, memory128.
3686 */
3687 IEM_MC_BEGIN(0, 2);
3688 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3690
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3695
3696 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3697 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3698
3699 IEM_MC_ADVANCE_RIP();
3700 IEM_MC_END();
3701 }
3702 else
3703 {
3704 /*
3705 * Register, memory256.
3706 */
3707 IEM_MC_BEGIN(0, 2);
3708 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3710
3711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3712 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3715
3716 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3717 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3718
3719 IEM_MC_ADVANCE_RIP();
3720 IEM_MC_END();
3721 }
3722 return VINF_SUCCESS;
3723}
3724
3725/* Opcode VEX.F2.0F 0x7f - invalid */
3726
3727
3728/* Opcode VEX.0F 0x80 - invalid */
3729/* Opcode VEX.0F 0x81 - invalid */
3730/* Opcode VEX.0F 0x82 - invalid */
3731/* Opcode VEX.0F 0x83 - invalid */
3732/* Opcode VEX.0F 0x84 - invalid */
3733/* Opcode VEX.0F 0x85 - invalid */
3734/* Opcode VEX.0F 0x86 - invalid */
3735/* Opcode VEX.0F 0x87 - invalid */
3736/* Opcode VEX.0F 0x88 - invalid */
3737/* Opcode VEX.0F 0x89 - invalid */
3738/* Opcode VEX.0F 0x8a - invalid */
3739/* Opcode VEX.0F 0x8b - invalid */
3740/* Opcode VEX.0F 0x8c - invalid */
3741/* Opcode VEX.0F 0x8d - invalid */
3742/* Opcode VEX.0F 0x8e - invalid */
3743/* Opcode VEX.0F 0x8f - invalid */
3744/* Opcode VEX.0F 0x90 - invalid */
3745/* Opcode VEX.0F 0x91 - invalid */
3746/* Opcode VEX.0F 0x92 - invalid */
3747/* Opcode VEX.0F 0x93 - invalid */
3748/* Opcode VEX.0F 0x94 - invalid */
3749/* Opcode VEX.0F 0x95 - invalid */
3750/* Opcode VEX.0F 0x96 - invalid */
3751/* Opcode VEX.0F 0x97 - invalid */
3752/* Opcode VEX.0F 0x98 - invalid */
3753/* Opcode VEX.0F 0x99 - invalid */
3754/* Opcode VEX.0F 0x9a - invalid */
3755/* Opcode VEX.0F 0x9b - invalid */
3756/* Opcode VEX.0F 0x9c - invalid */
3757/* Opcode VEX.0F 0x9d - invalid */
3758/* Opcode VEX.0F 0x9e - invalid */
3759/* Opcode VEX.0F 0x9f - invalid */
3760/* Opcode VEX.0F 0xa0 - invalid */
3761/* Opcode VEX.0F 0xa1 - invalid */
3762/* Opcode VEX.0F 0xa2 - invalid */
3763/* Opcode VEX.0F 0xa3 - invalid */
3764/* Opcode VEX.0F 0xa4 - invalid */
3765/* Opcode VEX.0F 0xa5 - invalid */
3766/* Opcode VEX.0F 0xa6 - invalid */
3767/* Opcode VEX.0F 0xa7 - invalid */
3768/* Opcode VEX.0F 0xa8 - invalid */
3769/* Opcode VEX.0F 0xa9 - invalid */
3770/* Opcode VEX.0F 0xaa - invalid */
3771/* Opcode VEX.0F 0xab - invalid */
3772/* Opcode VEX.0F 0xac - invalid */
3773/* Opcode VEX.0F 0xad - invalid */
3774
3775
3776/* Opcode VEX.0F 0xae mem/0 - invalid. */
3777/* Opcode VEX.0F 0xae mem/1 - invalid. */
3778
3779/**
3780 * @ opmaps grp15
3781 * @ opcode !11/2
3782 * @ oppfx none
3783 * @ opcpuid sse
3784 * @ opgroup og_sse_mxcsrsm
3785 * @ opxcpttype 5
3786 * @ optest op1=0 -> mxcsr=0
3787 * @ optest op1=0x2083 -> mxcsr=0x2083
3788 * @ optest op1=0xfffffffe -> value.xcpt=0xd
3789 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
3790 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
3791 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
3792 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
3793 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
3794 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3795 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3796 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3797 */
3798FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
3799//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
3800//{
3801// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3802// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
3803// return IEMOP_RAISE_INVALID_OPCODE();
3804//
3805// IEM_MC_BEGIN(2, 0);
3806// IEM_MC_ARG(uint8_t, iEffSeg, 0);
3807// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3808// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3809// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3810// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3811// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3812// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
3813// IEM_MC_END();
3814// return VINF_SUCCESS;
3815//}
3816
3817
3818/**
3819 * @opmaps vexgrp15
3820 * @opcode !11/3
3821 * @oppfx none
3822 * @opcpuid avx
3823 * @opgroup og_avx_mxcsrsm
3824 * @opxcpttype 5
3825 * @optest mxcsr=0 -> op1=0
3826 * @optest mxcsr=0x2083 -> op1=0x2083
3827 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
3828 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
3829 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
3830 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
3831 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
3832 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
3833 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
3834 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
3835 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
3836 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
3837 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3838 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
3839 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3840 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
3841 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3842 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
3843 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
3844 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
3845 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
3846 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
3847 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
3848 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
3849 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
3850 * -> value.xcpt=0x6
3851 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
3852 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
3853 * APMv4 rev 3.17 page 509.
3854 * @todo Test this instruction on AMD Ryzen.
3855 */
3856FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
3857{
3858 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3859 IEM_MC_BEGIN(2, 0);
3860 IEM_MC_ARG(uint8_t, iEffSeg, 0);
3861 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3863 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3864 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3865 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3866 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
3867 IEM_MC_END();
3868 return VINF_SUCCESS;
3869}
3870
3871/* Opcode VEX.0F 0xae mem/4 - invalid. */
3872/* Opcode VEX.0F 0xae mem/5 - invalid. */
3873/* Opcode VEX.0F 0xae mem/6 - invalid. */
3874/* Opcode VEX.0F 0xae mem/7 - invalid. */
3875
3876/* Opcode VEX.0F 0xae 11b/0 - invalid. */
3877/* Opcode VEX.0F 0xae 11b/1 - invalid. */
3878/* Opcode VEX.0F 0xae 11b/2 - invalid. */
3879/* Opcode VEX.0F 0xae 11b/3 - invalid. */
3880/* Opcode VEX.0F 0xae 11b/4 - invalid. */
3881/* Opcode VEX.0F 0xae 11b/5 - invalid. */
3882/* Opcode VEX.0F 0xae 11b/6 - invalid. */
3883/* Opcode VEX.0F 0xae 11b/7 - invalid. */
3884
3885/**
3886 * Vex group 15 jump table for memory variant.
3887 */
3888IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
3889{ /* pfx: none, 066h, 0f3h, 0f2h */
3890 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3891 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3892 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3893 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3894 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3895 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3896 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3897 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3898};
3899AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
3900
3901
3902/** Opcode vex. 0xae. */
3903FNIEMOP_DEF(iemOp_VGrp15)
3904{
3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3906 if (IEM_IS_MODRM_REG_MODE(bRm))
3907 /* register, register */
3908 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
3909
3910 /* memory, register */
3911 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3912 + pVCpu->iem.s.idxPrefix], bRm);
3913}
3914
3915
3916/* Opcode VEX.0F 0xaf - invalid. */
3917
3918/* Opcode VEX.0F 0xb0 - invalid. */
3919/* Opcode VEX.0F 0xb1 - invalid. */
3920/* Opcode VEX.0F 0xb2 - invalid. */
3921/* Opcode VEX.0F 0xb2 - invalid. */
3922/* Opcode VEX.0F 0xb3 - invalid. */
3923/* Opcode VEX.0F 0xb4 - invalid. */
3924/* Opcode VEX.0F 0xb5 - invalid. */
3925/* Opcode VEX.0F 0xb6 - invalid. */
3926/* Opcode VEX.0F 0xb7 - invalid. */
3927/* Opcode VEX.0F 0xb8 - invalid. */
3928/* Opcode VEX.0F 0xb9 - invalid. */
3929/* Opcode VEX.0F 0xba - invalid. */
3930/* Opcode VEX.0F 0xbb - invalid. */
3931/* Opcode VEX.0F 0xbc - invalid. */
3932/* Opcode VEX.0F 0xbd - invalid. */
3933/* Opcode VEX.0F 0xbe - invalid. */
3934/* Opcode VEX.0F 0xbf - invalid. */
3935
3936/* Opcode VEX.0F 0xc0 - invalid. */
3937/* Opcode VEX.66.0F 0xc0 - invalid. */
3938/* Opcode VEX.F3.0F 0xc0 - invalid. */
3939/* Opcode VEX.F2.0F 0xc0 - invalid. */
3940
3941/* Opcode VEX.0F 0xc1 - invalid. */
3942/* Opcode VEX.66.0F 0xc1 - invalid. */
3943/* Opcode VEX.F3.0F 0xc1 - invalid. */
3944/* Opcode VEX.F2.0F 0xc1 - invalid. */
3945
3946/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
3947FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
3948/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
3949FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
3950/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
3951FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
3952/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
3953FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
3954
3955/* Opcode VEX.0F 0xc3 - invalid */
3956/* Opcode VEX.66.0F 0xc3 - invalid */
3957/* Opcode VEX.F3.0F 0xc3 - invalid */
3958/* Opcode VEX.F2.0F 0xc3 - invalid */
3959
3960/* Opcode VEX.0F 0xc4 - invalid */
3961/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
3962FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
3963/* Opcode VEX.F3.0F 0xc4 - invalid */
3964/* Opcode VEX.F2.0F 0xc4 - invalid */
3965
3966/* Opcode VEX.0F 0xc5 - invlid */
3967/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
3968FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
3969/* Opcode VEX.F3.0F 0xc5 - invalid */
3970/* Opcode VEX.F2.0F 0xc5 - invalid */
3971
3972/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
3973FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
3974/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
3975FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
3976/* Opcode VEX.F3.0F 0xc6 - invalid */
3977/* Opcode VEX.F2.0F 0xc6 - invalid */
3978
3979/* Opcode VEX.0F 0xc7 - invalid */
3980/* Opcode VEX.66.0F 0xc7 - invalid */
3981/* Opcode VEX.F3.0F 0xc7 - invalid */
3982/* Opcode VEX.F2.0F 0xc7 - invalid */
3983
3984/* Opcode VEX.0F 0xc8 - invalid */
3985/* Opcode VEX.0F 0xc9 - invalid */
3986/* Opcode VEX.0F 0xca - invalid */
3987/* Opcode VEX.0F 0xcb - invalid */
3988/* Opcode VEX.0F 0xcc - invalid */
3989/* Opcode VEX.0F 0xcd - invalid */
3990/* Opcode VEX.0F 0xce - invalid */
3991/* Opcode VEX.0F 0xcf - invalid */
3992
3993
3994/* Opcode VEX.0F 0xd0 - invalid */
3995/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
3996FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
3997/* Opcode VEX.F3.0F 0xd0 - invalid */
3998/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
3999FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4000
4001/* Opcode VEX.0F 0xd1 - invalid */
4002/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4003FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4004/* Opcode VEX.F3.0F 0xd1 - invalid */
4005/* Opcode VEX.F2.0F 0xd1 - invalid */
4006
4007/* Opcode VEX.0F 0xd2 - invalid */
4008/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4009FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4010/* Opcode VEX.F3.0F 0xd2 - invalid */
4011/* Opcode VEX.F2.0F 0xd2 - invalid */
4012
4013/* Opcode VEX.0F 0xd3 - invalid */
4014/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4015FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4016/* Opcode VEX.F3.0F 0xd3 - invalid */
4017/* Opcode VEX.F2.0F 0xd3 - invalid */
4018
4019/* Opcode VEX.0F 0xd4 - invalid */
4020
4021
4022/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4023FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4024{
4025 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4026 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4027 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4028}
4029
4030
4031/* Opcode VEX.F3.0F 0xd4 - invalid */
4032/* Opcode VEX.F2.0F 0xd4 - invalid */
4033
4034/* Opcode VEX.0F 0xd5 - invalid */
4035
4036
4037/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4038FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4039{
4040 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4041 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4042 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4043}
4044
4045
4046/* Opcode VEX.F3.0F 0xd5 - invalid */
4047/* Opcode VEX.F2.0F 0xd5 - invalid */
4048
4049/* Opcode VEX.0F 0xd6 - invalid */
4050
4051/**
4052 * @opcode 0xd6
4053 * @oppfx 0x66
4054 * @opcpuid avx
4055 * @opgroup og_avx_pcksclr_datamove
4056 * @opxcpttype none
4057 * @optest op1=-1 op2=2 -> op1=2
4058 * @optest op1=0 op2=-42 -> op1=-42
4059 */
4060FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4061{
4062 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4064 if (IEM_IS_MODRM_REG_MODE(bRm))
4065 {
4066 /*
4067 * Register, register.
4068 */
4069 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4070 IEM_MC_BEGIN(0, 0);
4071
4072 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4073 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4074
4075 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4076 IEM_GET_MODRM_REG(pVCpu, bRm));
4077 IEM_MC_ADVANCE_RIP();
4078 IEM_MC_END();
4079 }
4080 else
4081 {
4082 /*
4083 * Memory, register.
4084 */
4085 IEM_MC_BEGIN(0, 2);
4086 IEM_MC_LOCAL(uint64_t, uSrc);
4087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4088
4089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4090 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4091 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4092 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4093
4094 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4096
4097 IEM_MC_ADVANCE_RIP();
4098 IEM_MC_END();
4099 }
4100 return VINF_SUCCESS;
4101}
4102
4103/* Opcode VEX.F3.0F 0xd6 - invalid */
4104/* Opcode VEX.F2.0F 0xd6 - invalid */
4105
4106
4107/* Opcode VEX.0F 0xd7 - invalid */
4108
4109/** Opcode VEX.66.0F 0xd7 - */
4110FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4111{
4112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4113 /* Docs says register only. */
4114 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4115 {
4116 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4117 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
4118 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4119 if (pVCpu->iem.s.uVexLength)
4120 {
4121 IEM_MC_BEGIN(2, 1);
4122 IEM_MC_ARG(uint64_t *, puDst, 0);
4123 IEM_MC_LOCAL(RTUINT256U, uSrc);
4124 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4125 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4126 IEM_MC_PREPARE_AVX_USAGE();
4127 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4128 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4129 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4130 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4131 IEM_MC_ADVANCE_RIP();
4132 IEM_MC_END();
4133 }
4134 else
4135 {
4136 IEM_MC_BEGIN(2, 0);
4137 IEM_MC_ARG(uint64_t *, puDst, 0);
4138 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4139 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4140 IEM_MC_PREPARE_AVX_USAGE();
4141 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4142 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4143 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 }
4147 return VINF_SUCCESS;
4148 }
4149 return IEMOP_RAISE_INVALID_OPCODE();
4150}
4151
4152
4153/* Opcode VEX.F3.0F 0xd7 - invalid */
4154/* Opcode VEX.F2.0F 0xd7 - invalid */
4155
4156
4157/* Opcode VEX.0F 0xd8 - invalid */
4158/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4159FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4160/* Opcode VEX.F3.0F 0xd8 - invalid */
4161/* Opcode VEX.F2.0F 0xd8 - invalid */
4162
4163/* Opcode VEX.0F 0xd9 - invalid */
4164/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4165FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4166/* Opcode VEX.F3.0F 0xd9 - invalid */
4167/* Opcode VEX.F2.0F 0xd9 - invalid */
4168
4169/* Opcode VEX.0F 0xda - invalid */
4170
4171
4172/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4173FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4174{
4175 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4176 IEMOPMEDIAF3_INIT_VARS(vpminub);
4177 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4178}
4179
4180
4181/* Opcode VEX.F3.0F 0xda - invalid */
4182/* Opcode VEX.F2.0F 0xda - invalid */
4183
4184/* Opcode VEX.0F 0xdb - invalid */
4185
4186
4187/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4188FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4189{
4190 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4191 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4192 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4193}
4194
4195
4196/* Opcode VEX.F3.0F 0xdb - invalid */
4197/* Opcode VEX.F2.0F 0xdb - invalid */
4198
4199/* Opcode VEX.0F 0xdc - invalid */
4200/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4201FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4202/* Opcode VEX.F3.0F 0xdc - invalid */
4203/* Opcode VEX.F2.0F 0xdc - invalid */
4204
4205/* Opcode VEX.0F 0xdd - invalid */
4206/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4207FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4208/* Opcode VEX.F3.0F 0xdd - invalid */
4209/* Opcode VEX.F2.0F 0xdd - invalid */
4210
4211/* Opcode VEX.0F 0xde - invalid */
4212
4213
4214/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4215FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4216{
4217 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4218 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4219 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4220}
4221
4222
4223/* Opcode VEX.F3.0F 0xde - invalid */
4224/* Opcode VEX.F2.0F 0xde - invalid */
4225
4226/* Opcode VEX.0F 0xdf - invalid */
4227
4228
4229/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4230FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4231{
4232 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4233 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4234 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4235}
4236
4237
4238/* Opcode VEX.F3.0F 0xdf - invalid */
4239/* Opcode VEX.F2.0F 0xdf - invalid */
4240
4241/* Opcode VEX.0F 0xe0 - invalid */
4242
4243
4244/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4245FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4246{
4247 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4248 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4249 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4250}
4251
4252
4253/* Opcode VEX.F3.0F 0xe0 - invalid */
4254/* Opcode VEX.F2.0F 0xe0 - invalid */
4255
4256/* Opcode VEX.0F 0xe1 - invalid */
4257/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4258FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4259/* Opcode VEX.F3.0F 0xe1 - invalid */
4260/* Opcode VEX.F2.0F 0xe1 - invalid */
4261
4262/* Opcode VEX.0F 0xe2 - invalid */
4263/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4264FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4265/* Opcode VEX.F3.0F 0xe2 - invalid */
4266/* Opcode VEX.F2.0F 0xe2 - invalid */
4267
4268/* Opcode VEX.0F 0xe3 - invalid */
4269
4270
4271/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4272FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4273{
4274 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4275 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4277}
4278
4279
4280/* Opcode VEX.F3.0F 0xe3 - invalid */
4281/* Opcode VEX.F2.0F 0xe3 - invalid */
4282
4283/* Opcode VEX.0F 0xe4 - invalid */
4284
4285
4286/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4287FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4288{
4289 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4290 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4291 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4292}
4293
4294
4295/* Opcode VEX.F3.0F 0xe4 - invalid */
4296/* Opcode VEX.F2.0F 0xe4 - invalid */
4297
4298/* Opcode VEX.0F 0xe5 - invalid */
4299
4300
4301/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4302FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4303{
4304 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4305 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4306 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4307}
4308
4309
4310/* Opcode VEX.F3.0F 0xe5 - invalid */
4311/* Opcode VEX.F2.0F 0xe5 - invalid */
4312
4313/* Opcode VEX.0F 0xe6 - invalid */
4314/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4315FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4316/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4317FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4318/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4319FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4320
4321
4322/* Opcode VEX.0F 0xe7 - invalid */
4323
4324/**
4325 * @opcode 0xe7
4326 * @opcodesub !11 mr/reg
4327 * @oppfx 0x66
4328 * @opcpuid avx
4329 * @opgroup og_avx_cachect
4330 * @opxcpttype 1
4331 * @optest op1=-1 op2=2 -> op1=2
4332 * @optest op1=0 op2=-42 -> op1=-42
4333 */
4334FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
4335{
4336 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4337 Assert(pVCpu->iem.s.uVexLength <= 1);
4338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4339 if (IEM_IS_MODRM_MEM_MODE(bRm))
4340 {
4341 if (pVCpu->iem.s.uVexLength == 0)
4342 {
4343 /*
4344 * 128-bit: Memory, register.
4345 */
4346 IEM_MC_BEGIN(0, 2);
4347 IEM_MC_LOCAL(RTUINT128U, uSrc);
4348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4349
4350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4351 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4353 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4354
4355 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4356 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4357
4358 IEM_MC_ADVANCE_RIP();
4359 IEM_MC_END();
4360 }
4361 else
4362 {
4363 /*
4364 * 256-bit: Memory, register.
4365 */
4366 IEM_MC_BEGIN(0, 2);
4367 IEM_MC_LOCAL(RTUINT256U, uSrc);
4368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4369
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4371 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4373 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4374
4375 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4376 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4377
4378 IEM_MC_ADVANCE_RIP();
4379 IEM_MC_END();
4380 }
4381 return VINF_SUCCESS;
4382 }
4383 /**
4384 * @opdone
4385 * @opmnemonic udvex660fe7reg
4386 * @opcode 0xe7
4387 * @opcodesub 11 mr/reg
4388 * @oppfx 0x66
4389 * @opunused immediate
4390 * @opcpuid avx
4391 * @optest ->
4392 */
4393 return IEMOP_RAISE_INVALID_OPCODE();
4394}
4395
4396/* Opcode VEX.F3.0F 0xe7 - invalid */
4397/* Opcode VEX.F2.0F 0xe7 - invalid */
4398
4399
4400/* Opcode VEX.0F 0xe8 - invalid */
4401/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
4402FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
4403/* Opcode VEX.F3.0F 0xe8 - invalid */
4404/* Opcode VEX.F2.0F 0xe8 - invalid */
4405
4406/* Opcode VEX.0F 0xe9 - invalid */
4407/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
4408FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
4409/* Opcode VEX.F3.0F 0xe9 - invalid */
4410/* Opcode VEX.F2.0F 0xe9 - invalid */
4411
4412/* Opcode VEX.0F 0xea - invalid */
4413
4414
4415/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
4416FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
4417{
4418 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4419 IEMOPMEDIAF3_INIT_VARS(vpminsw);
4420 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4421}
4422
4423
4424/* Opcode VEX.F3.0F 0xea - invalid */
4425/* Opcode VEX.F2.0F 0xea - invalid */
4426
4427/* Opcode VEX.0F 0xeb - invalid */
4428
4429
4430/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
4431FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
4432{
4433 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4434 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4435 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
4436}
4437
4438
4439
4440/* Opcode VEX.F3.0F 0xeb - invalid */
4441/* Opcode VEX.F2.0F 0xeb - invalid */
4442
4443/* Opcode VEX.0F 0xec - invalid */
4444/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
4445FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
4446/* Opcode VEX.F3.0F 0xec - invalid */
4447/* Opcode VEX.F2.0F 0xec - invalid */
4448
4449/* Opcode VEX.0F 0xed - invalid */
4450/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
4451FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
4452/* Opcode VEX.F3.0F 0xed - invalid */
4453/* Opcode VEX.F2.0F 0xed - invalid */
4454
4455/* Opcode VEX.0F 0xee - invalid */
4456
4457
4458/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
4459FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
4460{
4461 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4462 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
4463 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4464}
4465
4466
4467/* Opcode VEX.F3.0F 0xee - invalid */
4468/* Opcode VEX.F2.0F 0xee - invalid */
4469
4470
4471/* Opcode VEX.0F 0xef - invalid */
4472
4473
4474/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
4475FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
4476{
4477 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4478 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4479 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
4480}
4481
4482
4483/* Opcode VEX.F3.0F 0xef - invalid */
4484/* Opcode VEX.F2.0F 0xef - invalid */
4485
4486/* Opcode VEX.0F 0xf0 - invalid */
4487/* Opcode VEX.66.0F 0xf0 - invalid */
4488/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
4489FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
4490
4491/* Opcode VEX.0F 0xf1 - invalid */
4492/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
4493FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
4494/* Opcode VEX.F2.0F 0xf1 - invalid */
4495
4496/* Opcode VEX.0F 0xf2 - invalid */
4497/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
4498FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
4499/* Opcode VEX.F2.0F 0xf2 - invalid */
4500
4501/* Opcode VEX.0F 0xf3 - invalid */
4502/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
4503FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
4504/* Opcode VEX.F2.0F 0xf3 - invalid */
4505
4506/* Opcode VEX.0F 0xf4 - invalid */
4507
4508
4509/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
4510FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
4511{
4512 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4513 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
4514 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4515}
4516
4517
4518/* Opcode VEX.F2.0F 0xf4 - invalid */
4519
4520/* Opcode VEX.0F 0xf5 - invalid */
4521/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
4522FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
4523/* Opcode VEX.F2.0F 0xf5 - invalid */
4524
4525/* Opcode VEX.0F 0xf6 - invalid */
4526
4527
4528/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
4529FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
4530{
4531 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4532 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
4533 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4534}
4535
4536
4537/* Opcode VEX.F2.0F 0xf6 - invalid */
4538
4539/* Opcode VEX.0F 0xf7 - invalid */
4540/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
4541FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
4542/* Opcode VEX.F2.0F 0xf7 - invalid */
4543
4544/* Opcode VEX.0F 0xf8 - invalid */
4545
4546
4547/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
4548FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
4549{
4550 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4551 IEMOPMEDIAF3_INIT_VARS( vpsubb);
4552 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4553}
4554
4555
4556/* Opcode VEX.F2.0F 0xf8 - invalid */
4557
4558/* Opcode VEX.0F 0xf9 - invalid */
4559
4560
4561/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
4562FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
4563{
4564 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4565 IEMOPMEDIAF3_INIT_VARS( vpsubw);
4566 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4567}
4568
4569
4570/* Opcode VEX.F2.0F 0xf9 - invalid */
4571
4572/* Opcode VEX.0F 0xfa - invalid */
4573
4574
4575/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
4576FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
4577{
4578 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4579 IEMOPMEDIAF3_INIT_VARS( vpsubd);
4580 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4581}
4582
4583
4584/* Opcode VEX.F2.0F 0xfa - invalid */
4585
4586/* Opcode VEX.0F 0xfb - invalid */
4587
4588
4589/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
4590FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
4591{
4592 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4593 IEMOPMEDIAF3_INIT_VARS( vpsubq);
4594 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4595}
4596
4597
4598/* Opcode VEX.F2.0F 0xfb - invalid */
4599
4600/* Opcode VEX.0F 0xfc - invalid */
4601
4602
4603/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
4604FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
4605{
4606 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4607 IEMOPMEDIAF3_INIT_VARS( vpaddb);
4608 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4609}
4610
4611
4612/* Opcode VEX.F2.0F 0xfc - invalid */
4613
4614/* Opcode VEX.0F 0xfd - invalid */
4615
4616
4617/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
4618FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
4619{
4620 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4621 IEMOPMEDIAF3_INIT_VARS( vpaddw);
4622 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4623}
4624
4625
4626/* Opcode VEX.F2.0F 0xfd - invalid */
4627
4628/* Opcode VEX.0F 0xfe - invalid */
4629
4630
4631/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
4632FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
4633{
4634 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4635 IEMOPMEDIAF3_INIT_VARS( vpaddd);
4636 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4637}
4638
4639
4640/* Opcode VEX.F2.0F 0xfe - invalid */
4641
4642
4643/** Opcode **** 0x0f 0xff - UD0 */
4644FNIEMOP_DEF(iemOp_vud0)
4645{
4646 IEMOP_MNEMONIC(vud0, "vud0");
4647 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
4648 {
4649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
4650#ifndef TST_IEM_CHECK_MC
4651 RTGCPTR GCPtrEff;
4652 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
4653 if (rcStrict != VINF_SUCCESS)
4654 return rcStrict;
4655#endif
4656 IEMOP_HLP_DONE_DECODING();
4657 }
4658 return IEMOP_RAISE_INVALID_OPCODE();
4659}
4660
4661
4662
4663/**
4664 * VEX opcode map \#1.
4665 *
4666 * @sa g_apfnTwoByteMap
4667 */
4668IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
4669{
4670 /* no prefix, 066h prefix f3h prefix, f2h prefix */
4671 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
4672 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
4673 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
4674 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
4675 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
4676 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
4677 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
4678 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
4679 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
4680 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
4681 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
4682 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
4683 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
4684 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
4685 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
4686 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
4687
4688 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
4689 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
4690 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
4691 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4692 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4693 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4694 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
4695 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4696 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
4697 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
4698 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
4699 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
4700 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
4701 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
4702 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
4703 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
4704
4705 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
4706 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
4707 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
4708 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
4709 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
4710 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
4711 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
4712 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
4713 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4714 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4715 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
4716 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4717 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
4718 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
4719 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4720 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4721
4722 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
4723 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
4724 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
4725 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
4726 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
4727 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
4728 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
4729 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
4730 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4731 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4732 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4733 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4734 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4735 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4736 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4737 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4738
4739 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
4740 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
4741 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
4742 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
4743 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
4744 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
4745 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
4746 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
4747 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
4748 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
4749 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
4750 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
4751 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
4752 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
4753 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
4754 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
4755
4756 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4757 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
4758 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4759 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4760 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4761 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4762 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4763 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4764 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
4765 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
4766 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
4767 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
4768 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
4769 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
4770 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
4771 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
4772
4773 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4774 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4775 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4776 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4777 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4778 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4779 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4780 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4781 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4782 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4783 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4784 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4785 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4786 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4787 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4788 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
4789
4790 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
4791 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4792 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4793 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4794 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4795 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4796 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4797 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4798 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
4799 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
4800 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
4801 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
4802 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
4803 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
4804 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
4805 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
4806
4807 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
4808 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
4809 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
4810 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
4811 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
4812 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
4813 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
4814 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
4815 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
4816 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
4817 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
4818 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
4819 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
4820 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
4821 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
4822 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
4823
4824 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
4825 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
4826 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
4827 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
4828 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
4829 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
4830 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
4831 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
4832 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
4833 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
4834 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
4835 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
4836 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
4837 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
4838 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
4839 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
4840
4841 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4842 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4843 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4844 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4845 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4846 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4847 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4848 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4849 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4850 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4851 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
4852 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
4853 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
4854 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
4855 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
4856 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
4857
4858 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4859 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4860 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4861 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4862 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4863 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4864 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4865 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4866 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4867 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4868 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
4869 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
4870 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
4871 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
4872 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
4873 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
4874
4875 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4876 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4877 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
4878 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4879 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4880 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4881 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
4882 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4883 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4884 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4885 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
4886 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
4887 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
4888 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
4889 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
4890 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
4891
4892 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
4893 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4894 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4895 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4896 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4897 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4898 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4899 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4900 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4901 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4902 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4903 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4904 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4905 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4906 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4907 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4908
4909 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4910 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4911 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4912 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4913 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4914 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4915 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
4916 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4917 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4918 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4919 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4920 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4921 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4922 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4923 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4924 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4925
4926 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
4927 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4928 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4929 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4930 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4931 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4932 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4933 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4934 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4935 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4936 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4937 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4938 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4939 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4940 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4941 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
4942};
4943AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
4944/** @} */
4945
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette