VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 96748

Last change on this file since 96748 was 96748, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]comiss/[v]ucomiss/[v]comisd/[v]ucomisd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 201.4 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 96748 2022-09-15 17:32:44Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
54 IEM_MC_BEGIN(4, 3);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
74 IEM_MC_BEGIN(4, 0);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 }
144 }
145 return VINF_SUCCESS;
146}
147
148
149/**
150 * Common worker for AVX2 instructions on the forms:
151 * - vpxxx xmm0, xmm1, xmm2/mem128
152 * - vpxxx ymm0, ymm1, ymm2/mem256
153 *
154 * Takes function table for function w/o implicit state parameter.
155 *
156 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
157 */
158FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
159{
160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
161 if (IEM_IS_MODRM_REG_MODE(bRm))
162 {
163 /*
164 * Register, register.
165 */
166 if (pVCpu->iem.s.uVexLength)
167 {
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_BEGIN(3, 3);
170 IEM_MC_LOCAL(RTUINT256U, uDst);
171 IEM_MC_LOCAL(RTUINT256U, uSrc1);
172 IEM_MC_LOCAL(RTUINT256U, uSrc2);
173 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
175 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
176 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
177 IEM_MC_PREPARE_AVX_USAGE();
178 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
179 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
181 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 }
185 else
186 {
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_BEGIN(3, 0);
189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
190 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
191 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
192 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
193 IEM_MC_PREPARE_AVX_USAGE();
194 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
195 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
196 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
198 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_ADVANCE_RIP();
200 IEM_MC_END();
201 }
202 }
203 else
204 {
205 /*
206 * Register, memory.
207 */
208 if (pVCpu->iem.s.uVexLength)
209 {
210 IEM_MC_BEGIN(3, 4);
211 IEM_MC_LOCAL(RTUINT256U, uDst);
212 IEM_MC_LOCAL(RTUINT256U, uSrc1);
213 IEM_MC_LOCAL(RTUINT256U, uSrc2);
214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
215 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
217 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
218
219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
220 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
221 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
222 IEM_MC_PREPARE_AVX_USAGE();
223
224 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
225 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
227 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
228
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 IEM_MC_BEGIN(3, 2);
235 IEM_MC_LOCAL(RTUINT128U, uSrc2);
236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
237 IEM_MC_ARG(PRTUINT128U, puDst, 0);
238 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
239 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
240
241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
242 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
243 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
244 IEM_MC_PREPARE_AVX_USAGE();
245
246 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
248 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
250 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
251
252 IEM_MC_ADVANCE_RIP();
253 IEM_MC_END();
254 }
255 }
256 return VINF_SUCCESS;
257}
258
259
260/**
261 * Common worker for AVX2 instructions on the forms:
262 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
263 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
264 *
265 * The 128-bit memory version of this instruction may elect to skip fetching the
266 * lower 64 bits of the operand. We, however, do not.
267 *
268 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
269 */
270FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
271{
272 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
273}
274
275
276/**
277 * Common worker for AVX2 instructions on the forms:
278 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
279 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
280 *
281 * The 128-bit memory version of this instruction may elect to skip fetching the
282 * higher 64 bits of the operand. We, however, do not.
283 *
284 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
285 */
286FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
287{
288 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
289}
290
291
292/**
293 * Common worker for AVX2 instructions on the forms:
294 * - vpxxx xmm0, xmm1/mem128
295 * - vpxxx ymm0, ymm1/mem256
296 *
297 * Takes function table for function w/o implicit state parameter.
298 *
299 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
300 */
301FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
302{
303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
304 if (IEM_IS_MODRM_REG_MODE(bRm))
305 {
306 /*
307 * Register, register.
308 */
309 if (pVCpu->iem.s.uVexLength)
310 {
311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
312 IEM_MC_BEGIN(2, 2);
313 IEM_MC_LOCAL(RTUINT256U, uDst);
314 IEM_MC_LOCAL(RTUINT256U, uSrc);
315 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
316 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
317 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
318 IEM_MC_PREPARE_AVX_USAGE();
319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
320 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
321 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
322 IEM_MC_ADVANCE_RIP();
323 IEM_MC_END();
324 }
325 else
326 {
327 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
328 IEM_MC_BEGIN(2, 0);
329 IEM_MC_ARG(PRTUINT128U, puDst, 0);
330 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
331 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
332 IEM_MC_PREPARE_AVX_USAGE();
333 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
334 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
335 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
336 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
337 IEM_MC_ADVANCE_RIP();
338 IEM_MC_END();
339 }
340 }
341 else
342 {
343 /*
344 * Register, memory.
345 */
346 if (pVCpu->iem.s.uVexLength)
347 {
348 IEM_MC_BEGIN(2, 3);
349 IEM_MC_LOCAL(RTUINT256U, uDst);
350 IEM_MC_LOCAL(RTUINT256U, uSrc);
351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
352 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
353 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
354
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
356 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
357 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
358 IEM_MC_PREPARE_AVX_USAGE();
359
360 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
361 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
362 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
363
364 IEM_MC_ADVANCE_RIP();
365 IEM_MC_END();
366 }
367 else
368 {
369 IEM_MC_BEGIN(2, 2);
370 IEM_MC_LOCAL(RTUINT128U, uSrc);
371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
373 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
374
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
377 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
378 IEM_MC_PREPARE_AVX_USAGE();
379
380 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
382 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
383 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
384
385 IEM_MC_ADVANCE_RIP();
386 IEM_MC_END();
387 }
388 }
389 return VINF_SUCCESS;
390}
391
392
393/* Opcode VEX.0F 0x00 - invalid */
394/* Opcode VEX.0F 0x01 - invalid */
395/* Opcode VEX.0F 0x02 - invalid */
396/* Opcode VEX.0F 0x03 - invalid */
397/* Opcode VEX.0F 0x04 - invalid */
398/* Opcode VEX.0F 0x05 - invalid */
399/* Opcode VEX.0F 0x06 - invalid */
400/* Opcode VEX.0F 0x07 - invalid */
401/* Opcode VEX.0F 0x08 - invalid */
402/* Opcode VEX.0F 0x09 - invalid */
403/* Opcode VEX.0F 0x0a - invalid */
404
405/** Opcode VEX.0F 0x0b. */
406FNIEMOP_DEF(iemOp_vud2)
407{
408 IEMOP_MNEMONIC(vud2, "vud2");
409 return IEMOP_RAISE_INVALID_OPCODE();
410}
411
412/* Opcode VEX.0F 0x0c - invalid */
413/* Opcode VEX.0F 0x0d - invalid */
414/* Opcode VEX.0F 0x0e - invalid */
415/* Opcode VEX.0F 0x0f - invalid */
416
417
418/**
419 * @opcode 0x10
420 * @oppfx none
421 * @opcpuid avx
422 * @opgroup og_avx_simdfp_datamove
423 * @opxcpttype 4UA
424 * @optest op1=1 op2=2 -> op1=2
425 * @optest op1=0 op2=-22 -> op1=-22
426 */
427FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
428{
429 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
430 Assert(pVCpu->iem.s.uVexLength <= 1);
431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
432 if (IEM_IS_MODRM_REG_MODE(bRm))
433 {
434 /*
435 * Register, register.
436 */
437 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
438 IEM_MC_BEGIN(0, 0);
439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
440 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
441 if (pVCpu->iem.s.uVexLength == 0)
442 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 else
445 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
446 IEM_GET_MODRM_RM(pVCpu, bRm));
447 IEM_MC_ADVANCE_RIP();
448 IEM_MC_END();
449 }
450 else if (pVCpu->iem.s.uVexLength == 0)
451 {
452 /*
453 * 128-bit: Register, Memory
454 */
455 IEM_MC_BEGIN(0, 2);
456 IEM_MC_LOCAL(RTUINT128U, uSrc);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
460 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
461 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
462 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
463
464 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
465 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 }
470 else
471 {
472 /*
473 * 256-bit: Register, Memory
474 */
475 IEM_MC_BEGIN(0, 2);
476 IEM_MC_LOCAL(RTUINT256U, uSrc);
477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
478
479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
480 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
482 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
483
484 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
485 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
486
487 IEM_MC_ADVANCE_RIP();
488 IEM_MC_END();
489 }
490 return VINF_SUCCESS;
491}
492
493
494/**
495 * @opcode 0x10
496 * @oppfx 0x66
497 * @opcpuid avx
498 * @opgroup og_avx_simdfp_datamove
499 * @opxcpttype 4UA
500 * @optest op1=1 op2=2 -> op1=2
501 * @optest op1=0 op2=-22 -> op1=-22
502 */
503FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
504{
505 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
506 Assert(pVCpu->iem.s.uVexLength <= 1);
507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
508 if (IEM_IS_MODRM_REG_MODE(bRm))
509 {
510 /*
511 * Register, register.
512 */
513 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
514 IEM_MC_BEGIN(0, 0);
515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
516 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
517 if (pVCpu->iem.s.uVexLength == 0)
518 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
519 IEM_GET_MODRM_RM(pVCpu, bRm));
520 else
521 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
522 IEM_GET_MODRM_RM(pVCpu, bRm));
523 IEM_MC_ADVANCE_RIP();
524 IEM_MC_END();
525 }
526 else if (pVCpu->iem.s.uVexLength == 0)
527 {
528 /*
529 * 128-bit: Memory, register.
530 */
531 IEM_MC_BEGIN(0, 2);
532 IEM_MC_LOCAL(RTUINT128U, uSrc);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
538 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
539
540 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
541 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
542
543 IEM_MC_ADVANCE_RIP();
544 IEM_MC_END();
545 }
546 else
547 {
548 /*
549 * 256-bit: Memory, register.
550 */
551 IEM_MC_BEGIN(0, 2);
552 IEM_MC_LOCAL(RTUINT256U, uSrc);
553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
554
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
556 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
558 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
559
560 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
561 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
562
563 IEM_MC_ADVANCE_RIP();
564 IEM_MC_END();
565 }
566 return VINF_SUCCESS;
567}
568
569
570FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
571{
572 Assert(pVCpu->iem.s.uVexLength <= 1);
573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
574 if (IEM_IS_MODRM_REG_MODE(bRm))
575 {
576 /**
577 * @opcode 0x10
578 * @oppfx 0xf3
579 * @opcodesub 11 mr/reg
580 * @opcpuid avx
581 * @opgroup og_avx_simdfp_datamerge
582 * @opxcpttype 5
583 * @optest op1=1 op2=0 op3=2 -> op1=2
584 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
585 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
586 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
587 * @note HssHi refers to bits 127:32.
588 */
589 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
590 IEMOP_HLP_DONE_VEX_DECODING();
591 IEM_MC_BEGIN(0, 0);
592
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
595 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
596 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
597 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
598 IEM_MC_ADVANCE_RIP();
599 IEM_MC_END();
600 }
601 else
602 {
603 /**
604 * @opdone
605 * @opcode 0x10
606 * @oppfx 0xf3
607 * @opcodesub !11 mr/reg
608 * @opcpuid avx
609 * @opgroup og_avx_simdfp_datamove
610 * @opxcpttype 5
611 * @opfunction iemOp_vmovss_Vss_Hss_Wss
612 * @optest op1=1 op2=2 -> op1=2
613 * @optest op1=0 op2=-22 -> op1=-22
614 */
615 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
616 IEM_MC_BEGIN(0, 2);
617 IEM_MC_LOCAL(uint32_t, uSrc);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619
620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624
625 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP();
629 IEM_MC_END();
630 }
631
632 return VINF_SUCCESS;
633}
634
635
636FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
637{
638 Assert(pVCpu->iem.s.uVexLength <= 1);
639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
640 if (IEM_IS_MODRM_REG_MODE(bRm))
641 {
642 /**
643 * @opcode 0x10
644 * @oppfx 0xf2
645 * @opcodesub 11 mr/reg
646 * @opcpuid avx
647 * @opgroup og_avx_simdfp_datamerge
648 * @opxcpttype 5
649 * @optest op1=1 op2=0 op3=2 -> op1=2
650 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
651 * @optest op1=3 op2=-1 op3=0x77 ->
652 * op1=0xffffffffffffffff0000000000000077
653 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
654 */
655 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
656 IEMOP_HLP_DONE_VEX_DECODING();
657 IEM_MC_BEGIN(0, 0);
658
659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
660 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
661 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
662 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
663 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
664 IEM_MC_ADVANCE_RIP();
665 IEM_MC_END();
666 }
667 else
668 {
669 /**
670 * @opdone
671 * @opcode 0x10
672 * @oppfx 0xf2
673 * @opcodesub !11 mr/reg
674 * @opcpuid avx
675 * @opgroup og_avx_simdfp_datamove
676 * @opxcpttype 5
677 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
678 * @optest op1=1 op2=2 -> op1=2
679 * @optest op1=0 op2=-22 -> op1=-22
680 */
681 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
682 IEM_MC_BEGIN(0, 2);
683 IEM_MC_LOCAL(uint64_t, uSrc);
684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
685
686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
687 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
688 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
689 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
690
691 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
692 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
693
694 IEM_MC_ADVANCE_RIP();
695 IEM_MC_END();
696 }
697
698 return VINF_SUCCESS;
699}
700
701
702/**
703 * @opcode 0x11
704 * @oppfx none
705 * @opcpuid avx
706 * @opgroup og_avx_simdfp_datamove
707 * @opxcpttype 4UA
708 * @optest op1=1 op2=2 -> op1=2
709 * @optest op1=0 op2=-22 -> op1=-22
710 */
711FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
712{
713 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
714 Assert(pVCpu->iem.s.uVexLength <= 1);
715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
716 if (IEM_IS_MODRM_REG_MODE(bRm))
717 {
718 /*
719 * Register, register.
720 */
721 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
722 IEM_MC_BEGIN(0, 0);
723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
724 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
725 if (pVCpu->iem.s.uVexLength == 0)
726 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
727 IEM_GET_MODRM_REG(pVCpu, bRm));
728 else
729 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
730 IEM_GET_MODRM_REG(pVCpu, bRm));
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 else if (pVCpu->iem.s.uVexLength == 0)
735 {
736 /*
737 * 128-bit: Memory, register.
738 */
739 IEM_MC_BEGIN(0, 2);
740 IEM_MC_LOCAL(RTUINT128U, uSrc);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
747
748 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
749 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
750
751 IEM_MC_ADVANCE_RIP();
752 IEM_MC_END();
753 }
754 else
755 {
756 /*
757 * 256-bit: Memory, register.
758 */
759 IEM_MC_BEGIN(0, 2);
760 IEM_MC_LOCAL(RTUINT256U, uSrc);
761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
762
763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
766 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
767
768 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
769 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
770
771 IEM_MC_ADVANCE_RIP();
772 IEM_MC_END();
773 }
774 return VINF_SUCCESS;
775}
776
777
778/**
779 * @opcode 0x11
780 * @oppfx 0x66
781 * @opcpuid avx
782 * @opgroup og_avx_simdfp_datamove
783 * @opxcpttype 4UA
784 * @optest op1=1 op2=2 -> op1=2
785 * @optest op1=0 op2=-22 -> op1=-22
786 */
787FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
788{
789 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
790 Assert(pVCpu->iem.s.uVexLength <= 1);
791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
792 if (IEM_IS_MODRM_REG_MODE(bRm))
793 {
794 /*
795 * Register, register.
796 */
797 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
798 IEM_MC_BEGIN(0, 0);
799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
801 if (pVCpu->iem.s.uVexLength == 0)
802 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
803 IEM_GET_MODRM_REG(pVCpu, bRm));
804 else
805 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
806 IEM_GET_MODRM_REG(pVCpu, bRm));
807 IEM_MC_ADVANCE_RIP();
808 IEM_MC_END();
809 }
810 else if (pVCpu->iem.s.uVexLength == 0)
811 {
812 /*
813 * 128-bit: Memory, register.
814 */
815 IEM_MC_BEGIN(0, 2);
816 IEM_MC_LOCAL(RTUINT128U, uSrc);
817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
818
819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
823
824 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 else
831 {
832 /*
833 * 256-bit: Memory, register.
834 */
835 IEM_MC_BEGIN(0, 2);
836 IEM_MC_LOCAL(RTUINT256U, uSrc);
837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
838
839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
840 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
841 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
842 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
843
844 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
845 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
846
847 IEM_MC_ADVANCE_RIP();
848 IEM_MC_END();
849 }
850 return VINF_SUCCESS;
851}
852
853
854FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
855{
856 Assert(pVCpu->iem.s.uVexLength <= 1);
857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
858 if (IEM_IS_MODRM_REG_MODE(bRm))
859 {
860 /**
861 * @opcode 0x11
862 * @oppfx 0xf3
863 * @opcodesub 11 mr/reg
864 * @opcpuid avx
865 * @opgroup og_avx_simdfp_datamerge
866 * @opxcpttype 5
867 * @optest op1=1 op2=0 op3=2 -> op1=2
868 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
869 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
870 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
871 */
872 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
873 IEMOP_HLP_DONE_VEX_DECODING();
874 IEM_MC_BEGIN(0, 0);
875
876 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
877 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
878 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
879 IEM_GET_MODRM_REG(pVCpu, bRm),
880 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
881 IEM_MC_ADVANCE_RIP();
882 IEM_MC_END();
883 }
884 else
885 {
886 /**
887 * @opdone
888 * @opcode 0x11
889 * @oppfx 0xf3
890 * @opcodesub !11 mr/reg
891 * @opcpuid avx
892 * @opgroup og_avx_simdfp_datamove
893 * @opxcpttype 5
894 * @opfunction iemOp_vmovss_Vss_Hss_Wss
895 * @optest op1=1 op2=2 -> op1=2
896 * @optest op1=0 op2=-22 -> op1=-22
897 */
898 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
899 IEM_MC_BEGIN(0, 2);
900 IEM_MC_LOCAL(uint32_t, uSrc);
901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
902
903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
905 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
906 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
907
908 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
909 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
910
911 IEM_MC_ADVANCE_RIP();
912 IEM_MC_END();
913 }
914
915 return VINF_SUCCESS;
916}
917
918
919FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
920{
921 Assert(pVCpu->iem.s.uVexLength <= 1);
922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
923 if (IEM_IS_MODRM_REG_MODE(bRm))
924 {
925 /**
926 * @opcode 0x11
927 * @oppfx 0xf2
928 * @opcodesub 11 mr/reg
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamerge
931 * @opxcpttype 5
932 * @optest op1=1 op2=0 op3=2 -> op1=2
933 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
934 * @optest op1=3 op2=-1 op3=0x77 ->
935 * op1=0xffffffffffffffff0000000000000077
936 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
937 */
938 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
939 IEMOP_HLP_DONE_VEX_DECODING();
940 IEM_MC_BEGIN(0, 0);
941
942 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
943 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
944 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
945 IEM_GET_MODRM_REG(pVCpu, bRm),
946 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
947 IEM_MC_ADVANCE_RIP();
948 IEM_MC_END();
949 }
950 else
951 {
952 /**
953 * @opdone
954 * @opcode 0x11
955 * @oppfx 0xf2
956 * @opcodesub !11 mr/reg
957 * @opcpuid avx
958 * @opgroup og_avx_simdfp_datamove
959 * @opxcpttype 5
960 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
961 * @optest op1=1 op2=2 -> op1=2
962 * @optest op1=0 op2=-22 -> op1=-22
963 */
964 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
965 IEM_MC_BEGIN(0, 2);
966 IEM_MC_LOCAL(uint64_t, uSrc);
967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
968
969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
970 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
973
974 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
975 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
976
977 IEM_MC_ADVANCE_RIP();
978 IEM_MC_END();
979 }
980
981 return VINF_SUCCESS;
982}
983
984
985FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
986{
987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
988 if (IEM_IS_MODRM_REG_MODE(bRm))
989 {
990 /**
991 * @opcode 0x12
992 * @opcodesub 11 mr/reg
993 * @oppfx none
994 * @opcpuid avx
995 * @opgroup og_avx_simdfp_datamerge
996 * @opxcpttype 7LZ
997 * @optest op2=0x2200220122022203
998 * op3=0x3304330533063307
999 * -> op1=0x22002201220222033304330533063307
1000 * @optest op2=-1 op3=-42 -> op1=-42
1001 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1002 */
1003 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1004
1005 IEMOP_HLP_DONE_VEX_DECODING_L0();
1006 IEM_MC_BEGIN(0, 0);
1007
1008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1010 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1011 IEM_GET_MODRM_RM(pVCpu, bRm),
1012 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1013
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 }
1017 else
1018 {
1019 /**
1020 * @opdone
1021 * @opcode 0x12
1022 * @opcodesub !11 mr/reg
1023 * @oppfx none
1024 * @opcpuid avx
1025 * @opgroup og_avx_simdfp_datamove
1026 * @opxcpttype 5LZ
1027 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1028 * @optest op1=1 op2=0 op3=0 -> op1=0
1029 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1030 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1031 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1032 */
1033 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1034
1035 IEM_MC_BEGIN(0, 2);
1036 IEM_MC_LOCAL(uint64_t, uSrc);
1037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1038
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1040 IEMOP_HLP_DONE_VEX_DECODING_L0();
1041 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1042 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1043
1044 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1045 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1046 uSrc,
1047 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/**
1057 * @opcode 0x12
1058 * @opcodesub !11 mr/reg
1059 * @oppfx 0x66
1060 * @opcpuid avx
1061 * @opgroup og_avx_pcksclr_datamerge
1062 * @opxcpttype 5LZ
1063 * @optest op2=0 op3=2 -> op1=2
1064 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1065 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1066 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1067 */
1068FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_MEM_MODE(bRm))
1072 {
1073 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1074
1075 IEM_MC_BEGIN(0, 2);
1076 IEM_MC_LOCAL(uint64_t, uSrc);
1077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1078
1079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1080 IEMOP_HLP_DONE_VEX_DECODING_L0();
1081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1083
1084 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1085 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1086 uSrc,
1087 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1088
1089 IEM_MC_ADVANCE_RIP();
1090 IEM_MC_END();
1091 return VINF_SUCCESS;
1092 }
1093
1094 /**
1095 * @opdone
1096 * @opmnemonic udvex660f12m3
1097 * @opcode 0x12
1098 * @opcodesub 11 mr/reg
1099 * @oppfx 0x66
1100 * @opunused immediate
1101 * @opcpuid avx
1102 * @optest ->
1103 */
1104 return IEMOP_RAISE_INVALID_OPCODE();
1105}
1106
1107
1108/**
1109 * @opcode 0x12
1110 * @oppfx 0xf3
1111 * @opcpuid avx
1112 * @opgroup og_avx_pcksclr_datamove
1113 * @opxcpttype 4
1114 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1115 * -> op1=0x00000002000000020000000100000001
1116 * @optest vex.l==1 /
1117 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1118 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1119 */
1120FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1121{
1122 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1123 Assert(pVCpu->iem.s.uVexLength <= 1);
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if (IEM_IS_MODRM_REG_MODE(bRm))
1126 {
1127 /*
1128 * Register, register.
1129 */
1130 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1131 if (pVCpu->iem.s.uVexLength == 0)
1132 {
1133 IEM_MC_BEGIN(2, 0);
1134 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1135 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1136
1137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1138 IEM_MC_PREPARE_AVX_USAGE();
1139
1140 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1142 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1143 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1144
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 IEM_MC_BEGIN(3, 0);
1151 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1152 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1153 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1154
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_PREPARE_AVX_USAGE();
1157 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1158
1159 IEM_MC_ADVANCE_RIP();
1160 IEM_MC_END();
1161 }
1162 }
1163 else
1164 {
1165 /*
1166 * Register, memory.
1167 */
1168 if (pVCpu->iem.s.uVexLength == 0)
1169 {
1170 IEM_MC_BEGIN(2, 2);
1171 IEM_MC_LOCAL(RTUINT128U, uSrc);
1172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1174 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1175
1176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1177 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1178 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1179 IEM_MC_PREPARE_AVX_USAGE();
1180
1181 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1182 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1183 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1184 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1185
1186 IEM_MC_ADVANCE_RIP();
1187 IEM_MC_END();
1188 }
1189 else
1190 {
1191 IEM_MC_BEGIN(3, 2);
1192 IEM_MC_LOCAL(RTUINT256U, uSrc);
1193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1194 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1195 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1196 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1197
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1201 IEM_MC_PREPARE_AVX_USAGE();
1202
1203 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1204 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1205
1206 IEM_MC_ADVANCE_RIP();
1207 IEM_MC_END();
1208 }
1209 }
1210 return VINF_SUCCESS;
1211}
1212
1213
1214/**
1215 * @opcode 0x12
1216 * @oppfx 0xf2
1217 * @opcpuid avx
1218 * @opgroup og_avx_pcksclr_datamove
1219 * @opxcpttype 5
1220 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1221 * -> op1=0x22222222111111112222222211111111
1222 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1223 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1224 */
1225FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1226{
1227 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1229 if (IEM_IS_MODRM_REG_MODE(bRm))
1230 {
1231 /*
1232 * Register, register.
1233 */
1234 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1235 if (pVCpu->iem.s.uVexLength == 0)
1236 {
1237 IEM_MC_BEGIN(2, 0);
1238 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1239 IEM_MC_ARG(uint64_t, uSrc, 1);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1245 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1246 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1247 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1248
1249 IEM_MC_ADVANCE_RIP();
1250 IEM_MC_END();
1251 }
1252 else
1253 {
1254 IEM_MC_BEGIN(3, 0);
1255 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1256 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1257 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1258
1259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1260 IEM_MC_PREPARE_AVX_USAGE();
1261 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1262
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 }
1267 else
1268 {
1269 /*
1270 * Register, memory.
1271 */
1272 if (pVCpu->iem.s.uVexLength == 0)
1273 {
1274 IEM_MC_BEGIN(2, 2);
1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1276 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1277 IEM_MC_ARG(uint64_t, uSrc, 1);
1278
1279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1280 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1281 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1282 IEM_MC_PREPARE_AVX_USAGE();
1283
1284 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1286 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1287 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1288
1289 IEM_MC_ADVANCE_RIP();
1290 IEM_MC_END();
1291 }
1292 else
1293 {
1294 IEM_MC_BEGIN(3, 2);
1295 IEM_MC_LOCAL(RTUINT256U, uSrc);
1296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1297 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1298 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1299 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1300
1301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1302 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1304 IEM_MC_PREPARE_AVX_USAGE();
1305
1306 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1307 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1308
1309 IEM_MC_ADVANCE_RIP();
1310 IEM_MC_END();
1311 }
1312 }
1313 return VINF_SUCCESS;
1314}
1315
1316
1317/**
1318 * @opcode 0x13
1319 * @opcodesub !11 mr/reg
1320 * @oppfx none
1321 * @opcpuid avx
1322 * @opgroup og_avx_simdfp_datamove
1323 * @opxcpttype 5
1324 * @optest op1=1 op2=2 -> op1=2
1325 * @optest op1=0 op2=-42 -> op1=-42
1326 */
1327FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1328{
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 if (IEM_IS_MODRM_MEM_MODE(bRm))
1331 {
1332 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1333
1334 IEM_MC_BEGIN(0, 2);
1335 IEM_MC_LOCAL(uint64_t, uSrc);
1336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1337
1338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1339 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1341 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1342
1343 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1344 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1345
1346 IEM_MC_ADVANCE_RIP();
1347 IEM_MC_END();
1348 return VINF_SUCCESS;
1349 }
1350
1351 /**
1352 * @opdone
1353 * @opmnemonic udvex0f13m3
1354 * @opcode 0x13
1355 * @opcodesub 11 mr/reg
1356 * @oppfx none
1357 * @opunused immediate
1358 * @opcpuid avx
1359 * @optest ->
1360 */
1361 return IEMOP_RAISE_INVALID_OPCODE();
1362}
1363
1364
1365/**
1366 * @opcode 0x13
1367 * @opcodesub !11 mr/reg
1368 * @oppfx 0x66
1369 * @opcpuid avx
1370 * @opgroup og_avx_pcksclr_datamove
1371 * @opxcpttype 5
1372 * @optest op1=1 op2=2 -> op1=2
1373 * @optest op1=0 op2=-42 -> op1=-42
1374 */
1375FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 if (IEM_IS_MODRM_MEM_MODE(bRm))
1379 {
1380 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1381 IEM_MC_BEGIN(0, 2);
1382 IEM_MC_LOCAL(uint64_t, uSrc);
1383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1384
1385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1386 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1387 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1388 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1389
1390 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1391 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1392
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 return VINF_SUCCESS;
1396 }
1397
1398 /**
1399 * @opdone
1400 * @opmnemonic udvex660f13m3
1401 * @opcode 0x13
1402 * @opcodesub 11 mr/reg
1403 * @oppfx 0x66
1404 * @opunused immediate
1405 * @opcpuid avx
1406 * @optest ->
1407 */
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410
1411/* Opcode VEX.F3.0F 0x13 - invalid */
1412/* Opcode VEX.F2.0F 0x13 - invalid */
1413
1414/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1415FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1416{
1417 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1418 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1420}
1421
1422
1423/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1424FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1425{
1426 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1427 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1429}
1430
1431
1432/* Opcode VEX.F3.0F 0x14 - invalid */
1433/* Opcode VEX.F2.0F 0x14 - invalid */
1434
1435
1436/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1437FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1438{
1439 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1440 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1442}
1443
1444
1445/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1446FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1447{
1448 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1449 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1451}
1452
1453
1454/* Opcode VEX.F3.0F 0x15 - invalid */
1455/* Opcode VEX.F2.0F 0x15 - invalid */
1456
1457
1458FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1459{
1460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1461 if (IEM_IS_MODRM_REG_MODE(bRm))
1462 {
1463 /**
1464 * @opcode 0x16
1465 * @opcodesub 11 mr/reg
1466 * @oppfx none
1467 * @opcpuid avx
1468 * @opgroup og_avx_simdfp_datamerge
1469 * @opxcpttype 7LZ
1470 */
1471 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1472
1473 IEMOP_HLP_DONE_VEX_DECODING_L0();
1474 IEM_MC_BEGIN(0, 0);
1475
1476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1478 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1479 IEM_GET_MODRM_RM(pVCpu, bRm),
1480 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 }
1485 else
1486 {
1487 /**
1488 * @opdone
1489 * @opcode 0x16
1490 * @opcodesub !11 mr/reg
1491 * @oppfx none
1492 * @opcpuid avx
1493 * @opgroup og_avx_simdfp_datamove
1494 * @opxcpttype 5LZ
1495 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1496 */
1497 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1498
1499 IEM_MC_BEGIN(0, 2);
1500 IEM_MC_LOCAL(uint64_t, uSrc);
1501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_VEX_DECODING_L0();
1505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1507
1508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1509 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1510 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1511 uSrc);
1512
1513 IEM_MC_ADVANCE_RIP();
1514 IEM_MC_END();
1515 }
1516 return VINF_SUCCESS;
1517}
1518
1519
1520/**
1521 * @opcode 0x16
1522 * @opcodesub !11 mr/reg
1523 * @oppfx 0x66
1524 * @opcpuid avx
1525 * @opgroup og_avx_pcksclr_datamerge
1526 * @opxcpttype 5LZ
1527 */
1528FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1529{
1530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1531 if (IEM_IS_MODRM_MEM_MODE(bRm))
1532 {
1533 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1534
1535 IEM_MC_BEGIN(0, 2);
1536 IEM_MC_LOCAL(uint64_t, uSrc);
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538
1539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1540 IEMOP_HLP_DONE_VEX_DECODING_L0();
1541 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1542 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1543
1544 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1545 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1546 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1547 uSrc);
1548
1549 IEM_MC_ADVANCE_RIP();
1550 IEM_MC_END();
1551 return VINF_SUCCESS;
1552 }
1553
1554 /**
1555 * @opdone
1556 * @opmnemonic udvex660f16m3
1557 * @opcode 0x12
1558 * @opcodesub 11 mr/reg
1559 * @oppfx 0x66
1560 * @opunused immediate
1561 * @opcpuid avx
1562 * @optest ->
1563 */
1564 return IEMOP_RAISE_INVALID_OPCODE();
1565}
1566
1567
1568/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1569/**
1570 * @opcode 0x16
1571 * @oppfx 0xf3
1572 * @opcpuid avx
1573 * @opgroup og_avx_pcksclr_datamove
1574 * @opxcpttype 4
1575 */
1576FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1577{
1578 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1579 Assert(pVCpu->iem.s.uVexLength <= 1);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 if (IEM_IS_MODRM_REG_MODE(bRm))
1582 {
1583 /*
1584 * Register, register.
1585 */
1586 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1587 if (pVCpu->iem.s.uVexLength == 0)
1588 {
1589 IEM_MC_BEGIN(2, 0);
1590 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1591 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595
1596 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1597 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1598 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1599 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1600
1601 IEM_MC_ADVANCE_RIP();
1602 IEM_MC_END();
1603 }
1604 else
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1608 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1609 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1610
1611 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1612 IEM_MC_PREPARE_AVX_USAGE();
1613 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 }
1619 else
1620 {
1621 /*
1622 * Register, memory.
1623 */
1624 if (pVCpu->iem.s.uVexLength == 0)
1625 {
1626 IEM_MC_BEGIN(2, 2);
1627 IEM_MC_LOCAL(RTUINT128U, uSrc);
1628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1629 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1630 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1631
1632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1633 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1634 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1635 IEM_MC_PREPARE_AVX_USAGE();
1636
1637 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1638 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1639 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1640 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1641
1642 IEM_MC_ADVANCE_RIP();
1643 IEM_MC_END();
1644 }
1645 else
1646 {
1647 IEM_MC_BEGIN(3, 2);
1648 IEM_MC_LOCAL(RTUINT256U, uSrc);
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1651 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1652 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1653
1654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1657 IEM_MC_PREPARE_AVX_USAGE();
1658
1659 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1660 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1661
1662 IEM_MC_ADVANCE_RIP();
1663 IEM_MC_END();
1664 }
1665 }
1666 return VINF_SUCCESS;
1667}
1668
1669
1670/* Opcode VEX.F2.0F 0x16 - invalid */
1671
1672
1673/**
1674 * @opcode 0x17
1675 * @opcodesub !11 mr/reg
1676 * @oppfx none
1677 * @opcpuid avx
1678 * @opgroup og_avx_simdfp_datamove
1679 * @opxcpttype 5
1680 */
1681FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1682{
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 if (IEM_IS_MODRM_MEM_MODE(bRm))
1685 {
1686 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1687
1688 IEM_MC_BEGIN(0, 2);
1689 IEM_MC_LOCAL(uint64_t, uSrc);
1690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1691
1692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1693 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1694 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1695 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1696
1697 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1698 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1699
1700 IEM_MC_ADVANCE_RIP();
1701 IEM_MC_END();
1702 return VINF_SUCCESS;
1703 }
1704
1705 /**
1706 * @opdone
1707 * @opmnemonic udvex0f17m3
1708 * @opcode 0x17
1709 * @opcodesub 11 mr/reg
1710 * @oppfx none
1711 * @opunused immediate
1712 * @opcpuid avx
1713 * @optest ->
1714 */
1715 return IEMOP_RAISE_INVALID_OPCODE();
1716}
1717
1718
1719/**
1720 * @opcode 0x17
1721 * @opcodesub !11 mr/reg
1722 * @oppfx 0x66
1723 * @opcpuid avx
1724 * @opgroup og_avx_pcksclr_datamove
1725 * @opxcpttype 5
1726 */
1727FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1728{
1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1730 if (IEM_IS_MODRM_MEM_MODE(bRm))
1731 {
1732 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733 IEM_MC_BEGIN(0, 2);
1734 IEM_MC_LOCAL(uint64_t, uSrc);
1735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1736
1737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1738 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1739 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1740 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1741
1742 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1744
1745 IEM_MC_ADVANCE_RIP();
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748 }
1749
1750 /**
1751 * @opdone
1752 * @opmnemonic udvex660f17m3
1753 * @opcode 0x17
1754 * @opcodesub 11 mr/reg
1755 * @oppfx 0x66
1756 * @opunused immediate
1757 * @opcpuid avx
1758 * @optest ->
1759 */
1760 return IEMOP_RAISE_INVALID_OPCODE();
1761}
1762
1763
1764/* Opcode VEX.F3.0F 0x17 - invalid */
1765/* Opcode VEX.F2.0F 0x17 - invalid */
1766
1767
1768/* Opcode VEX.0F 0x18 - invalid */
1769/* Opcode VEX.0F 0x19 - invalid */
1770/* Opcode VEX.0F 0x1a - invalid */
1771/* Opcode VEX.0F 0x1b - invalid */
1772/* Opcode VEX.0F 0x1c - invalid */
1773/* Opcode VEX.0F 0x1d - invalid */
1774/* Opcode VEX.0F 0x1e - invalid */
1775/* Opcode VEX.0F 0x1f - invalid */
1776
1777/* Opcode VEX.0F 0x20 - invalid */
1778/* Opcode VEX.0F 0x21 - invalid */
1779/* Opcode VEX.0F 0x22 - invalid */
1780/* Opcode VEX.0F 0x23 - invalid */
1781/* Opcode VEX.0F 0x24 - invalid */
1782/* Opcode VEX.0F 0x25 - invalid */
1783/* Opcode VEX.0F 0x26 - invalid */
1784/* Opcode VEX.0F 0x27 - invalid */
1785
1786/**
1787 * @opcode 0x28
1788 * @oppfx none
1789 * @opcpuid avx
1790 * @opgroup og_avx_pcksclr_datamove
1791 * @opxcpttype 1
1792 * @optest op1=1 op2=2 -> op1=2
1793 * @optest op1=0 op2=-42 -> op1=-42
1794 * @note Almost identical to vmovapd.
1795 */
1796FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1797{
1798 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 Assert(pVCpu->iem.s.uVexLength <= 1);
1801 if (IEM_IS_MODRM_REG_MODE(bRm))
1802 {
1803 /*
1804 * Register, register.
1805 */
1806 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1807 IEM_MC_BEGIN(1, 0);
1808
1809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1810 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1811 if (pVCpu->iem.s.uVexLength == 0)
1812 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1813 IEM_GET_MODRM_RM(pVCpu, bRm));
1814 else
1815 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1816 IEM_GET_MODRM_RM(pVCpu, bRm));
1817 IEM_MC_ADVANCE_RIP();
1818 IEM_MC_END();
1819 }
1820 else
1821 {
1822 /*
1823 * Register, memory.
1824 */
1825 if (pVCpu->iem.s.uVexLength == 0)
1826 {
1827 IEM_MC_BEGIN(0, 2);
1828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1829 IEM_MC_LOCAL(RTUINT128U, uSrc);
1830
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1832 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1833 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1834 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1835
1836 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1837 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 IEM_MC_BEGIN(0, 2);
1845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1852
1853 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1854 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1855
1856 IEM_MC_ADVANCE_RIP();
1857 IEM_MC_END();
1858 }
1859 }
1860 return VINF_SUCCESS;
1861}
1862
1863
1864/**
1865 * @opcode 0x28
1866 * @oppfx 66
1867 * @opcpuid avx
1868 * @opgroup og_avx_pcksclr_datamove
1869 * @opxcpttype 1
1870 * @optest op1=1 op2=2 -> op1=2
1871 * @optest op1=0 op2=-42 -> op1=-42
1872 * @note Almost identical to vmovaps
1873 */
1874FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1875{
1876 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1878 Assert(pVCpu->iem.s.uVexLength <= 1);
1879 if (IEM_IS_MODRM_REG_MODE(bRm))
1880 {
1881 /*
1882 * Register, register.
1883 */
1884 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1885 IEM_MC_BEGIN(1, 0);
1886
1887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1888 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1889 if (pVCpu->iem.s.uVexLength == 0)
1890 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1891 IEM_GET_MODRM_RM(pVCpu, bRm));
1892 else
1893 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1894 IEM_GET_MODRM_RM(pVCpu, bRm));
1895 IEM_MC_ADVANCE_RIP();
1896 IEM_MC_END();
1897 }
1898 else
1899 {
1900 /*
1901 * Register, memory.
1902 */
1903 if (pVCpu->iem.s.uVexLength == 0)
1904 {
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1907 IEM_MC_LOCAL(RTUINT128U, uSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1911 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1912 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1913
1914 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1916
1917 IEM_MC_ADVANCE_RIP();
1918 IEM_MC_END();
1919 }
1920 else
1921 {
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1924 IEM_MC_LOCAL(RTUINT256U, uSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 }
1937 }
1938 return VINF_SUCCESS;
1939}
1940
1941/**
1942 * @opmnemonic udvexf30f28
1943 * @opcode 0x28
1944 * @oppfx 0xf3
1945 * @opunused vex.modrm
1946 * @opcpuid avx
1947 * @optest ->
1948 * @opdone
1949 */
1950
1951/**
1952 * @opmnemonic udvexf20f28
1953 * @opcode 0x28
1954 * @oppfx 0xf2
1955 * @opunused vex.modrm
1956 * @opcpuid avx
1957 * @optest ->
1958 * @opdone
1959 */
1960
1961/**
1962 * @opcode 0x29
1963 * @oppfx none
1964 * @opcpuid avx
1965 * @opgroup og_avx_pcksclr_datamove
1966 * @opxcpttype 1
1967 * @optest op1=1 op2=2 -> op1=2
1968 * @optest op1=0 op2=-42 -> op1=-42
1969 * @note Almost identical to vmovapd.
1970 */
1971FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1972{
1973 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 Assert(pVCpu->iem.s.uVexLength <= 1);
1976 if (IEM_IS_MODRM_REG_MODE(bRm))
1977 {
1978 /*
1979 * Register, register.
1980 */
1981 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1982 IEM_MC_BEGIN(1, 0);
1983
1984 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1985 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1986 if (pVCpu->iem.s.uVexLength == 0)
1987 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1988 IEM_GET_MODRM_REG(pVCpu, bRm));
1989 else
1990 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1991 IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_ADVANCE_RIP();
1993 IEM_MC_END();
1994 }
1995 else
1996 {
1997 /*
1998 * Register, memory.
1999 */
2000 if (pVCpu->iem.s.uVexLength == 0)
2001 {
2002 IEM_MC_BEGIN(0, 2);
2003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2004 IEM_MC_LOCAL(RTUINT128U, uSrc);
2005
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2010
2011 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2012 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2013
2014 IEM_MC_ADVANCE_RIP();
2015 IEM_MC_END();
2016 }
2017 else
2018 {
2019 IEM_MC_BEGIN(0, 2);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021 IEM_MC_LOCAL(RTUINT256U, uSrc);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2024 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2027
2028 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2029 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2030
2031 IEM_MC_ADVANCE_RIP();
2032 IEM_MC_END();
2033 }
2034 }
2035 return VINF_SUCCESS;
2036}
2037
2038/**
2039 * @opcode 0x29
2040 * @oppfx 66
2041 * @opcpuid avx
2042 * @opgroup og_avx_pcksclr_datamove
2043 * @opxcpttype 1
2044 * @optest op1=1 op2=2 -> op1=2
2045 * @optest op1=0 op2=-42 -> op1=-42
2046 * @note Almost identical to vmovaps
2047 */
2048FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2049{
2050 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2051 Assert(pVCpu->iem.s.uVexLength <= 1);
2052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2053 if (IEM_IS_MODRM_REG_MODE(bRm))
2054 {
2055 /*
2056 * Register, register.
2057 */
2058 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2059 IEM_MC_BEGIN(1, 0);
2060
2061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2063 if (pVCpu->iem.s.uVexLength == 0)
2064 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2065 IEM_GET_MODRM_REG(pVCpu, bRm));
2066 else
2067 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2068 IEM_GET_MODRM_REG(pVCpu, bRm));
2069 IEM_MC_ADVANCE_RIP();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 /*
2075 * Register, memory.
2076 */
2077 if (pVCpu->iem.s.uVexLength == 0)
2078 {
2079 IEM_MC_BEGIN(0, 2);
2080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2081 IEM_MC_LOCAL(RTUINT128U, uSrc);
2082
2083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2084 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2085 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2086 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2087
2088 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2089 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2090
2091 IEM_MC_ADVANCE_RIP();
2092 IEM_MC_END();
2093 }
2094 else
2095 {
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2098 IEM_MC_LOCAL(RTUINT256U, uSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2104
2105 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2106 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 }
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/**
2117 * @opmnemonic udvexf30f29
2118 * @opcode 0x29
2119 * @oppfx 0xf3
2120 * @opunused vex.modrm
2121 * @opcpuid avx
2122 * @optest ->
2123 * @opdone
2124 */
2125
2126/**
2127 * @opmnemonic udvexf20f29
2128 * @opcode 0x29
2129 * @oppfx 0xf2
2130 * @opunused vex.modrm
2131 * @opcpuid avx
2132 * @optest ->
2133 * @opdone
2134 */
2135
2136
2137/** Opcode VEX.0F 0x2a - invalid */
2138/** Opcode VEX.66.0F 0x2a - invalid */
2139/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2140FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2141/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2142FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2143
2144
2145/**
2146 * @opcode 0x2b
2147 * @opcodesub !11 mr/reg
2148 * @oppfx none
2149 * @opcpuid avx
2150 * @opgroup og_avx_cachect
2151 * @opxcpttype 1
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-42 -> op1=-42
2154 * @note Identical implementation to vmovntpd
2155 */
2156FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2157{
2158 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2159 Assert(pVCpu->iem.s.uVexLength <= 1);
2160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2161 if (IEM_IS_MODRM_MEM_MODE(bRm))
2162 {
2163 /*
2164 * memory, register.
2165 */
2166 if (pVCpu->iem.s.uVexLength == 0)
2167 {
2168 IEM_MC_BEGIN(0, 2);
2169 IEM_MC_LOCAL(RTUINT128U, uSrc);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2174 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2175 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2176
2177 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2178 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2179
2180 IEM_MC_ADVANCE_RIP();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 IEM_MC_BEGIN(0, 2);
2186 IEM_MC_LOCAL(RTUINT256U, uSrc);
2187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2188
2189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2193
2194 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2195 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2196
2197 IEM_MC_ADVANCE_RIP();
2198 IEM_MC_END();
2199 }
2200 }
2201 /* The register, register encoding is invalid. */
2202 else
2203 return IEMOP_RAISE_INVALID_OPCODE();
2204 return VINF_SUCCESS;
2205}
2206
2207/**
2208 * @opcode 0x2b
2209 * @opcodesub !11 mr/reg
2210 * @oppfx 0x66
2211 * @opcpuid avx
2212 * @opgroup og_avx_cachect
2213 * @opxcpttype 1
2214 * @optest op1=1 op2=2 -> op1=2
2215 * @optest op1=0 op2=-42 -> op1=-42
2216 * @note Identical implementation to vmovntps
2217 */
2218FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2219{
2220 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2223 if (IEM_IS_MODRM_MEM_MODE(bRm))
2224 {
2225 /*
2226 * memory, register.
2227 */
2228 if (pVCpu->iem.s.uVexLength == 0)
2229 {
2230 IEM_MC_BEGIN(0, 2);
2231 IEM_MC_LOCAL(RTUINT128U, uSrc);
2232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2233
2234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2235 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2236 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2238
2239 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2240 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2241
2242 IEM_MC_ADVANCE_RIP();
2243 IEM_MC_END();
2244 }
2245 else
2246 {
2247 IEM_MC_BEGIN(0, 2);
2248 IEM_MC_LOCAL(RTUINT256U, uSrc);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2252 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2255
2256 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2257 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2258
2259 IEM_MC_ADVANCE_RIP();
2260 IEM_MC_END();
2261 }
2262 }
2263 /* The register, register encoding is invalid. */
2264 else
2265 return IEMOP_RAISE_INVALID_OPCODE();
2266 return VINF_SUCCESS;
2267}
2268
2269/**
2270 * @opmnemonic udvexf30f2b
2271 * @opcode 0x2b
2272 * @oppfx 0xf3
2273 * @opunused vex.modrm
2274 * @opcpuid avx
2275 * @optest ->
2276 * @opdone
2277 */
2278
2279/**
2280 * @opmnemonic udvexf20f2b
2281 * @opcode 0x2b
2282 * @oppfx 0xf2
2283 * @opunused vex.modrm
2284 * @opcpuid avx
2285 * @optest ->
2286 * @opdone
2287 */
2288
2289
2290/* Opcode VEX.0F 0x2c - invalid */
2291/* Opcode VEX.66.0F 0x2c - invalid */
2292/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2293FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2294/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2295FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2296
2297/* Opcode VEX.0F 0x2d - invalid */
2298/* Opcode VEX.66.0F 0x2d - invalid */
2299/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2300FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2301/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2302FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2303
2304
2305/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2306FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2307{
2308 IEMOP_MNEMONIC2(RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if (IEM_IS_MODRM_REG_MODE(bRm))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2316 IEM_MC_BEGIN(4, 1);
2317 IEM_MC_LOCAL(uint32_t, fEFlags);
2318 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2319 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2320 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2321 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2323 IEM_MC_PREPARE_AVX_USAGE();
2324 IEM_MC_FETCH_EFLAGS(fEFlags);
2325 IEM_MC_REF_MXCSR(pfMxcsr);
2326 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2327 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2328 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2329 IEM_MC_IF_MXCSR_XCPT_PENDING()
2330 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2331 IEM_MC_ELSE()
2332 IEM_MC_STORE_EFLAGS(fEFlags);
2333 IEM_MC_ENDIF();
2334
2335 IEM_MC_ADVANCE_RIP();
2336 IEM_MC_END();
2337 }
2338 else
2339 {
2340 /*
2341 * Register, memory.
2342 */
2343 IEM_MC_BEGIN(4, 3);
2344 IEM_MC_LOCAL(uint32_t, fEFlags);
2345 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2346 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2347 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2348 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2349 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2351
2352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2353 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2354 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2355 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2356
2357 IEM_MC_PREPARE_AVX_USAGE();
2358 IEM_MC_REF_MXCSR(pfMxcsr);
2359 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2360 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2361 IEM_MC_IF_MXCSR_XCPT_PENDING()
2362 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2363 IEM_MC_ELSE()
2364 IEM_MC_STORE_EFLAGS(fEFlags);
2365 IEM_MC_ENDIF();
2366
2367 IEM_MC_ADVANCE_RIP();
2368 IEM_MC_END();
2369 }
2370 return VINF_SUCCESS;
2371}
2372
2373
2374/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2375FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2376{
2377 IEMOP_MNEMONIC2(RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2379 if (IEM_IS_MODRM_REG_MODE(bRm))
2380 {
2381 /*
2382 * Register, register.
2383 */
2384 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2385 IEM_MC_BEGIN(4, 1);
2386 IEM_MC_LOCAL(uint32_t, fEFlags);
2387 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2389 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2390 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2391 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2392 IEM_MC_PREPARE_AVX_USAGE();
2393 IEM_MC_FETCH_EFLAGS(fEFlags);
2394 IEM_MC_REF_MXCSR(pfMxcsr);
2395 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2396 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2397 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2398 IEM_MC_IF_MXCSR_XCPT_PENDING()
2399 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2400 IEM_MC_ELSE()
2401 IEM_MC_STORE_EFLAGS(fEFlags);
2402 IEM_MC_ENDIF();
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Register, memory.
2411 */
2412 IEM_MC_BEGIN(4, 3);
2413 IEM_MC_LOCAL(uint32_t, fEFlags);
2414 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2415 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2416 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2417 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2418 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420
2421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2422 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2423 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2424 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2425
2426 IEM_MC_PREPARE_AVX_USAGE();
2427 IEM_MC_REF_MXCSR(pfMxcsr);
2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2429 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2430 IEM_MC_IF_MXCSR_XCPT_PENDING()
2431 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2432 IEM_MC_ELSE()
2433 IEM_MC_STORE_EFLAGS(fEFlags);
2434 IEM_MC_ENDIF();
2435
2436 IEM_MC_ADVANCE_RIP();
2437 IEM_MC_END();
2438 }
2439 return VINF_SUCCESS;
2440}
2441
2442
2443/* Opcode VEX.F3.0F 0x2e - invalid */
2444/* Opcode VEX.F2.0F 0x2e - invalid */
2445
2446/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2447FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2448{
2449 IEMOP_MNEMONIC2(RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2451 if (IEM_IS_MODRM_REG_MODE(bRm))
2452 {
2453 /*
2454 * Register, register.
2455 */
2456 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2457 IEM_MC_BEGIN(4, 1);
2458 IEM_MC_LOCAL(uint32_t, fEFlags);
2459 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2460 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2461 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2462 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2463 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2464 IEM_MC_PREPARE_AVX_USAGE();
2465 IEM_MC_FETCH_EFLAGS(fEFlags);
2466 IEM_MC_REF_MXCSR(pfMxcsr);
2467 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2468 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2469 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vcomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2470 IEM_MC_IF_MXCSR_XCPT_PENDING()
2471 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2472 IEM_MC_ELSE()
2473 IEM_MC_STORE_EFLAGS(fEFlags);
2474 IEM_MC_ENDIF();
2475
2476 IEM_MC_ADVANCE_RIP();
2477 IEM_MC_END();
2478 }
2479 else
2480 {
2481 /*
2482 * Register, memory.
2483 */
2484 IEM_MC_BEGIN(4, 3);
2485 IEM_MC_LOCAL(uint32_t, fEFlags);
2486 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2487 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2488 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2489 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2490 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2492
2493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2494 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2495 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2496 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2497
2498 IEM_MC_PREPARE_AVX_USAGE();
2499 IEM_MC_REF_MXCSR(pfMxcsr);
2500 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2501 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vcomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2502 IEM_MC_IF_MXCSR_XCPT_PENDING()
2503 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2504 IEM_MC_ELSE()
2505 IEM_MC_STORE_EFLAGS(fEFlags);
2506 IEM_MC_ENDIF();
2507
2508 IEM_MC_ADVANCE_RIP();
2509 IEM_MC_END();
2510 }
2511 return VINF_SUCCESS;
2512}
2513
2514
2515/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2516FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2517{
2518 IEMOP_MNEMONIC2(RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2520 if (IEM_IS_MODRM_REG_MODE(bRm))
2521 {
2522 /*
2523 * Register, register.
2524 */
2525 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2526 IEM_MC_BEGIN(4, 1);
2527 IEM_MC_LOCAL(uint32_t, fEFlags);
2528 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2529 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2530 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2531 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2532 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2533 IEM_MC_PREPARE_AVX_USAGE();
2534 IEM_MC_FETCH_EFLAGS(fEFlags);
2535 IEM_MC_REF_MXCSR(pfMxcsr);
2536 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2538 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vcomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2539 IEM_MC_IF_MXCSR_XCPT_PENDING()
2540 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2541 IEM_MC_ELSE()
2542 IEM_MC_STORE_EFLAGS(fEFlags);
2543 IEM_MC_ENDIF();
2544
2545 IEM_MC_ADVANCE_RIP();
2546 IEM_MC_END();
2547 }
2548 else
2549 {
2550 /*
2551 * Register, memory.
2552 */
2553 IEM_MC_BEGIN(4, 3);
2554 IEM_MC_LOCAL(uint32_t, fEFlags);
2555 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2556 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2557 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2558 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2559 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561
2562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2563 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2564 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2565 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2566
2567 IEM_MC_PREPARE_AVX_USAGE();
2568 IEM_MC_REF_MXCSR(pfMxcsr);
2569 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2570 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_vcomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
2571 IEM_MC_IF_MXCSR_XCPT_PENDING()
2572 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2573 IEM_MC_ELSE()
2574 IEM_MC_STORE_EFLAGS(fEFlags);
2575 IEM_MC_ENDIF();
2576
2577 IEM_MC_ADVANCE_RIP();
2578 IEM_MC_END();
2579 }
2580 return VINF_SUCCESS;
2581}
2582
2583
2584/* Opcode VEX.F3.0F 0x2f - invalid */
2585/* Opcode VEX.F2.0F 0x2f - invalid */
2586
2587/* Opcode VEX.0F 0x30 - invalid */
2588/* Opcode VEX.0F 0x31 - invalid */
2589/* Opcode VEX.0F 0x32 - invalid */
2590/* Opcode VEX.0F 0x33 - invalid */
2591/* Opcode VEX.0F 0x34 - invalid */
2592/* Opcode VEX.0F 0x35 - invalid */
2593/* Opcode VEX.0F 0x36 - invalid */
2594/* Opcode VEX.0F 0x37 - invalid */
2595/* Opcode VEX.0F 0x38 - invalid */
2596/* Opcode VEX.0F 0x39 - invalid */
2597/* Opcode VEX.0F 0x3a - invalid */
2598/* Opcode VEX.0F 0x3b - invalid */
2599/* Opcode VEX.0F 0x3c - invalid */
2600/* Opcode VEX.0F 0x3d - invalid */
2601/* Opcode VEX.0F 0x3e - invalid */
2602/* Opcode VEX.0F 0x3f - invalid */
2603/* Opcode VEX.0F 0x40 - invalid */
2604/* Opcode VEX.0F 0x41 - invalid */
2605/* Opcode VEX.0F 0x42 - invalid */
2606/* Opcode VEX.0F 0x43 - invalid */
2607/* Opcode VEX.0F 0x44 - invalid */
2608/* Opcode VEX.0F 0x45 - invalid */
2609/* Opcode VEX.0F 0x46 - invalid */
2610/* Opcode VEX.0F 0x47 - invalid */
2611/* Opcode VEX.0F 0x48 - invalid */
2612/* Opcode VEX.0F 0x49 - invalid */
2613/* Opcode VEX.0F 0x4a - invalid */
2614/* Opcode VEX.0F 0x4b - invalid */
2615/* Opcode VEX.0F 0x4c - invalid */
2616/* Opcode VEX.0F 0x4d - invalid */
2617/* Opcode VEX.0F 0x4e - invalid */
2618/* Opcode VEX.0F 0x4f - invalid */
2619
2620
2621/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2622FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2623{
2624 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2626 if (IEM_IS_MODRM_REG_MODE(bRm))
2627 {
2628 /*
2629 * Register, register.
2630 */
2631 if (pVCpu->iem.s.uVexLength == 0)
2632 {
2633 IEMOP_HLP_DONE_VEX_DECODING();
2634 IEM_MC_BEGIN(2, 1);
2635 IEM_MC_LOCAL(uint8_t, u8Dst);
2636 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2637 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2638 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2639 IEM_MC_PREPARE_AVX_USAGE();
2640 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2641 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2642 pu8Dst, puSrc);
2643 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2644 IEM_MC_ADVANCE_RIP();
2645 IEM_MC_END();
2646 }
2647 else
2648 {
2649 IEMOP_HLP_DONE_VEX_DECODING();
2650 IEM_MC_BEGIN(2, 2);
2651 IEM_MC_LOCAL(uint8_t, u8Dst);
2652 IEM_MC_LOCAL(RTUINT256U, uSrc);
2653 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2654 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2655
2656 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2657 IEM_MC_PREPARE_AVX_USAGE();
2658 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2659 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2660 pu8Dst, puSrc);
2661 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2662 IEM_MC_ADVANCE_RIP();
2663 IEM_MC_END();
2664 }
2665 return VINF_SUCCESS;
2666 }
2667
2668 /* No memory operand. */
2669 return IEMOP_RAISE_INVALID_OPCODE();
2670}
2671
2672
2673/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2674FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2675{
2676{
2677 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2679 if (IEM_IS_MODRM_REG_MODE(bRm))
2680 {
2681 /*
2682 * Register, register.
2683 */
2684 if (pVCpu->iem.s.uVexLength == 0)
2685 {
2686 IEMOP_HLP_DONE_VEX_DECODING();
2687 IEM_MC_BEGIN(2, 1);
2688 IEM_MC_LOCAL(uint8_t, u8Dst);
2689 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2690 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2691 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2692 IEM_MC_PREPARE_AVX_USAGE();
2693 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2694 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2695 pu8Dst, puSrc);
2696 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2697 IEM_MC_ADVANCE_RIP();
2698 IEM_MC_END();
2699 }
2700 else
2701 {
2702 IEMOP_HLP_DONE_VEX_DECODING();
2703 IEM_MC_BEGIN(2, 2);
2704 IEM_MC_LOCAL(uint8_t, u8Dst);
2705 IEM_MC_LOCAL(RTUINT256U, uSrc);
2706 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2707 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2708
2709 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2710 IEM_MC_PREPARE_AVX_USAGE();
2711 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2712 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2713 pu8Dst, puSrc);
2714 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2715 IEM_MC_ADVANCE_RIP();
2716 IEM_MC_END();
2717 }
2718 return VINF_SUCCESS;
2719 }
2720
2721 /* No memory operand. */
2722 return IEMOP_RAISE_INVALID_OPCODE();
2723}
2724}
2725
2726
2727/* Opcode VEX.F3.0F 0x50 - invalid */
2728/* Opcode VEX.F2.0F 0x50 - invalid */
2729
2730/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2731FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2732/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2733FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2734/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2735FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2736/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2737FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2738
2739/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2740FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2741/* Opcode VEX.66.0F 0x52 - invalid */
2742/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2743FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2744/* Opcode VEX.F2.0F 0x52 - invalid */
2745
2746/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2747FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2748/* Opcode VEX.66.0F 0x53 - invalid */
2749/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2750FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2751/* Opcode VEX.F2.0F 0x53 - invalid */
2752
2753
2754/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2755FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2756{
2757 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2758 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2759 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2760}
2761
2762
2763/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2764FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2765{
2766 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2767 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2768 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2769}
2770
2771
2772/* Opcode VEX.F3.0F 0x54 - invalid */
2773/* Opcode VEX.F2.0F 0x54 - invalid */
2774
2775
2776/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2777FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2778{
2779 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2780 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2781 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2782}
2783
2784
2785/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2786FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2787{
2788 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2790 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2791}
2792
2793
2794/* Opcode VEX.F3.0F 0x55 - invalid */
2795/* Opcode VEX.F2.0F 0x55 - invalid */
2796
2797/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2798FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2799{
2800 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2802 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2803}
2804
2805
2806/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2807FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2808{
2809 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2810 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2811 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2812}
2813
2814
2815/* Opcode VEX.F3.0F 0x56 - invalid */
2816/* Opcode VEX.F2.0F 0x56 - invalid */
2817
2818
2819/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2820FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2821{
2822 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2823 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2824 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2825}
2826
2827
2828/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2829FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2830{
2831 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2833 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2834}
2835
2836
2837/* Opcode VEX.F3.0F 0x57 - invalid */
2838/* Opcode VEX.F2.0F 0x57 - invalid */
2839
2840/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2841FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2842/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2843FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2844/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2845FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2846/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2847FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2848
2849/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2850FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2851/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2852FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2853/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2854FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2855/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2856FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2857
2858/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2859FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2860/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2861FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2862/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2863FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2864/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2865FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2866
2867/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2868FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2869/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2870FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2871/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2872FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2873/* Opcode VEX.F2.0F 0x5b - invalid */
2874
2875/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2876FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2877/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2878FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2879/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2880FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2881/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2882FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2883
2884/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2885FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2886/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2887FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2888/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2889FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2890/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2891FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2892
2893/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2894FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2895/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2896FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2897/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2898FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2899/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2900FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2901
2902/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2903FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2904/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2905FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2906/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2907FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2908/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2909FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2910
2911
2912/* Opcode VEX.0F 0x60 - invalid */
2913
2914
2915/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2916FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2917{
2918 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2919 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2920 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2921}
2922
2923
2924/* Opcode VEX.F3.0F 0x60 - invalid */
2925
2926
2927/* Opcode VEX.0F 0x61 - invalid */
2928
2929
2930/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2931FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2932{
2933 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2934 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2935 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2936}
2937
2938
2939/* Opcode VEX.F3.0F 0x61 - invalid */
2940
2941
2942/* Opcode VEX.0F 0x62 - invalid */
2943
2944/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2945FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2946{
2947 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2948 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2949 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2950}
2951
2952
2953/* Opcode VEX.F3.0F 0x62 - invalid */
2954
2955
2956
2957/* Opcode VEX.0F 0x63 - invalid */
2958
2959
2960/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2961FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2962{
2963 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2964 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2965 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2966}
2967
2968
2969/* Opcode VEX.F3.0F 0x63 - invalid */
2970
2971/* Opcode VEX.0F 0x64 - invalid */
2972
2973
2974/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2975FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2976{
2977 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2978 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2979 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2980}
2981
2982
2983/* Opcode VEX.F3.0F 0x64 - invalid */
2984
2985/* Opcode VEX.0F 0x65 - invalid */
2986
2987
2988/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2989FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2990{
2991 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2992 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2993 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2994}
2995
2996
2997/* Opcode VEX.F3.0F 0x65 - invalid */
2998
2999/* Opcode VEX.0F 0x66 - invalid */
3000
3001
3002/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3003FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3004{
3005 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3006 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3007 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3008}
3009
3010
3011/* Opcode VEX.F3.0F 0x66 - invalid */
3012
3013/* Opcode VEX.0F 0x67 - invalid */
3014
3015
3016/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3017FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3018{
3019 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3020 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3021 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3022}
3023
3024
3025/* Opcode VEX.F3.0F 0x67 - invalid */
3026
3027
3028///**
3029// * Common worker for SSE2 instructions on the form:
3030// * pxxxx xmm1, xmm2/mem128
3031// *
3032// * The 2nd operand is the second half of a register, which in the memory case
3033// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3034// * where it may read the full 128 bits or only the upper 64 bits.
3035// *
3036// * Exceptions type 4.
3037// */
3038//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3039//{
3040// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041// if (IEM_IS_MODRM_REG_MODE(bRm))
3042// {
3043// /*
3044// * Register, register.
3045// */
3046// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047// IEM_MC_BEGIN(2, 0);
3048// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3049// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3050// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3051// IEM_MC_PREPARE_SSE_USAGE();
3052// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3053// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3054// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3055// IEM_MC_ADVANCE_RIP();
3056// IEM_MC_END();
3057// }
3058// else
3059// {
3060// /*
3061// * Register, memory.
3062// */
3063// IEM_MC_BEGIN(2, 2);
3064// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3065// IEM_MC_LOCAL(RTUINT128U, uSrc);
3066// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3067// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3068//
3069// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3070// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3071// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3072// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3073//
3074// IEM_MC_PREPARE_SSE_USAGE();
3075// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3076// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3077//
3078// IEM_MC_ADVANCE_RIP();
3079// IEM_MC_END();
3080// }
3081// return VINF_SUCCESS;
3082//}
3083
3084
3085/* Opcode VEX.0F 0x68 - invalid */
3086
3087/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3088FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3089{
3090 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3091 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3093}
3094
3095
3096/* Opcode VEX.F3.0F 0x68 - invalid */
3097
3098
3099/* Opcode VEX.0F 0x69 - invalid */
3100
3101
3102/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3103FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3104{
3105 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3106 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3108}
3109
3110
3111/* Opcode VEX.F3.0F 0x69 - invalid */
3112
3113
3114/* Opcode VEX.0F 0x6a - invalid */
3115
3116
3117/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3118FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3119{
3120 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3121 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3122 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3123}
3124
3125
3126/* Opcode VEX.F3.0F 0x6a - invalid */
3127
3128
3129/* Opcode VEX.0F 0x6b - invalid */
3130
3131
3132/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3133FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3134{
3135 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3136 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3137 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3138}
3139
3140
3141/* Opcode VEX.F3.0F 0x6b - invalid */
3142
3143
3144/* Opcode VEX.0F 0x6c - invalid */
3145
3146
3147/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3148FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3149{
3150 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3151 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3153}
3154
3155
3156/* Opcode VEX.F3.0F 0x6c - invalid */
3157/* Opcode VEX.F2.0F 0x6c - invalid */
3158
3159
3160/* Opcode VEX.0F 0x6d - invalid */
3161
3162
3163/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3164FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3165{
3166 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3167 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3168 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3169}
3170
3171
3172/* Opcode VEX.F3.0F 0x6d - invalid */
3173
3174
3175/* Opcode VEX.0F 0x6e - invalid */
3176
3177FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3181 {
3182 /**
3183 * @opcode 0x6e
3184 * @opcodesub rex.w=1
3185 * @oppfx 0x66
3186 * @opcpuid avx
3187 * @opgroup og_avx_simdint_datamov
3188 * @opxcpttype 5
3189 * @optest 64-bit / op1=1 op2=2 -> op1=2
3190 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3191 */
3192 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3193 if (IEM_IS_MODRM_REG_MODE(bRm))
3194 {
3195 /* XMM, greg64 */
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3197 IEM_MC_BEGIN(0, 1);
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199
3200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3201 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3202
3203 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3204 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3205
3206 IEM_MC_ADVANCE_RIP();
3207 IEM_MC_END();
3208 }
3209 else
3210 {
3211 /* XMM, [mem64] */
3212 IEM_MC_BEGIN(0, 2);
3213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3214 IEM_MC_LOCAL(uint64_t, u64Tmp);
3215
3216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3217 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3218 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3219 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3220
3221 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3222 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3223
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 }
3228 else
3229 {
3230 /**
3231 * @opdone
3232 * @opcode 0x6e
3233 * @opcodesub rex.w=0
3234 * @oppfx 0x66
3235 * @opcpuid avx
3236 * @opgroup og_avx_simdint_datamov
3237 * @opxcpttype 5
3238 * @opfunction iemOp_vmovd_q_Vy_Ey
3239 * @optest op1=1 op2=2 -> op1=2
3240 * @optest op1=0 op2=-42 -> op1=-42
3241 */
3242 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3243 if (IEM_IS_MODRM_REG_MODE(bRm))
3244 {
3245 /* XMM, greg32 */
3246 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3247 IEM_MC_BEGIN(0, 1);
3248 IEM_MC_LOCAL(uint32_t, u32Tmp);
3249
3250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3251 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3252
3253 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3254 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3255
3256 IEM_MC_ADVANCE_RIP();
3257 IEM_MC_END();
3258 }
3259 else
3260 {
3261 /* XMM, [mem32] */
3262 IEM_MC_BEGIN(0, 2);
3263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3264 IEM_MC_LOCAL(uint32_t, u32Tmp);
3265
3266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3267 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3269 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3270
3271 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3272 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3273
3274 IEM_MC_ADVANCE_RIP();
3275 IEM_MC_END();
3276 }
3277 }
3278 return VINF_SUCCESS;
3279}
3280
3281
3282/* Opcode VEX.F3.0F 0x6e - invalid */
3283
3284
3285/* Opcode VEX.0F 0x6f - invalid */
3286
3287/**
3288 * @opcode 0x6f
3289 * @oppfx 0x66
3290 * @opcpuid avx
3291 * @opgroup og_avx_simdint_datamove
3292 * @opxcpttype 1
3293 * @optest op1=1 op2=2 -> op1=2
3294 * @optest op1=0 op2=-42 -> op1=-42
3295 */
3296FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3297{
3298 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3299 Assert(pVCpu->iem.s.uVexLength <= 1);
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 if (IEM_IS_MODRM_REG_MODE(bRm))
3302 {
3303 /*
3304 * Register, register.
3305 */
3306 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3307 IEM_MC_BEGIN(0, 0);
3308
3309 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3310 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3311 if (pVCpu->iem.s.uVexLength == 0)
3312 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3313 IEM_GET_MODRM_RM(pVCpu, bRm));
3314 else
3315 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3316 IEM_GET_MODRM_RM(pVCpu, bRm));
3317 IEM_MC_ADVANCE_RIP();
3318 IEM_MC_END();
3319 }
3320 else if (pVCpu->iem.s.uVexLength == 0)
3321 {
3322 /*
3323 * Register, memory128.
3324 */
3325 IEM_MC_BEGIN(0, 2);
3326 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3328
3329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3330 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3333
3334 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3335 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3336
3337 IEM_MC_ADVANCE_RIP();
3338 IEM_MC_END();
3339 }
3340 else
3341 {
3342 /*
3343 * Register, memory256.
3344 */
3345 IEM_MC_BEGIN(0, 2);
3346 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3348
3349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3352 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3353
3354 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3355 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3356
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363/**
3364 * @opcode 0x6f
3365 * @oppfx 0xf3
3366 * @opcpuid avx
3367 * @opgroup og_avx_simdint_datamove
3368 * @opxcpttype 4UA
3369 * @optest op1=1 op2=2 -> op1=2
3370 * @optest op1=0 op2=-42 -> op1=-42
3371 */
3372FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3373{
3374 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3375 Assert(pVCpu->iem.s.uVexLength <= 1);
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377 if (IEM_IS_MODRM_REG_MODE(bRm))
3378 {
3379 /*
3380 * Register, register.
3381 */
3382 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3383 IEM_MC_BEGIN(0, 0);
3384
3385 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3386 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3387 if (pVCpu->iem.s.uVexLength == 0)
3388 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3389 IEM_GET_MODRM_RM(pVCpu, bRm));
3390 else
3391 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3392 IEM_GET_MODRM_RM(pVCpu, bRm));
3393 IEM_MC_ADVANCE_RIP();
3394 IEM_MC_END();
3395 }
3396 else if (pVCpu->iem.s.uVexLength == 0)
3397 {
3398 /*
3399 * Register, memory128.
3400 */
3401 IEM_MC_BEGIN(0, 2);
3402 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3404
3405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3406 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3407 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3408 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3409
3410 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3411 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3412
3413 IEM_MC_ADVANCE_RIP();
3414 IEM_MC_END();
3415 }
3416 else
3417 {
3418 /*
3419 * Register, memory256.
3420 */
3421 IEM_MC_BEGIN(0, 2);
3422 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3424
3425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3426 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3428 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3429
3430 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3431 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3432
3433 IEM_MC_ADVANCE_RIP();
3434 IEM_MC_END();
3435 }
3436 return VINF_SUCCESS;
3437}
3438
3439
3440/* Opcode VEX.0F 0x70 - invalid */
3441
3442
3443/**
3444 * Common worker for AVX/AVX2 instructions on the forms:
3445 * - vpxxx xmm0, xmm2/mem128, imm8
3446 * - vpxxx ymm0, ymm2/mem256, imm8
3447 *
3448 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3449 */
3450FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3451{
3452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3453 if (IEM_IS_MODRM_REG_MODE(bRm))
3454 {
3455 /*
3456 * Register, register.
3457 */
3458 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3459 if (pVCpu->iem.s.uVexLength)
3460 {
3461 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3462 IEM_MC_BEGIN(3, 2);
3463 IEM_MC_LOCAL(RTUINT256U, uDst);
3464 IEM_MC_LOCAL(RTUINT256U, uSrc);
3465 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3466 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3467 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3468 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3469 IEM_MC_PREPARE_AVX_USAGE();
3470 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3471 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3472 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3473 IEM_MC_ADVANCE_RIP();
3474 IEM_MC_END();
3475 }
3476 else
3477 {
3478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3479 IEM_MC_BEGIN(3, 0);
3480 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3481 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3482 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3483 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3484 IEM_MC_PREPARE_AVX_USAGE();
3485 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3486 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3487 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3488 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 }
3493 else
3494 {
3495 /*
3496 * Register, memory.
3497 */
3498 if (pVCpu->iem.s.uVexLength)
3499 {
3500 IEM_MC_BEGIN(3, 3);
3501 IEM_MC_LOCAL(RTUINT256U, uDst);
3502 IEM_MC_LOCAL(RTUINT256U, uSrc);
3503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3504 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3505 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3506
3507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3508 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3509 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3510 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3511 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3512 IEM_MC_PREPARE_AVX_USAGE();
3513
3514 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3515 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3516 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3517
3518 IEM_MC_ADVANCE_RIP();
3519 IEM_MC_END();
3520 }
3521 else
3522 {
3523 IEM_MC_BEGIN(3, 1);
3524 IEM_MC_LOCAL(RTUINT128U, uSrc);
3525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3526 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3527 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3528
3529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3530 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3531 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3532 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3533 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3534 IEM_MC_PREPARE_AVX_USAGE();
3535
3536 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3537 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3538 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3539 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3540
3541 IEM_MC_ADVANCE_RIP();
3542 IEM_MC_END();
3543 }
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548
3549/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3550FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3551{
3552 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3553 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3554 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3555
3556}
3557
3558
3559/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3560FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3561{
3562 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3563 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3564 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3565
3566}
3567
3568
3569/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3570FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3571{
3572 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3573 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3574 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3575}
3576
3577
3578/* Opcode VEX.0F 0x71 11/2 - invalid. */
3579/** Opcode VEX.66.0F 0x71 11/2. */
3580FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3581
3582/* Opcode VEX.0F 0x71 11/4 - invalid */
3583/** Opcode VEX.66.0F 0x71 11/4. */
3584FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3585
3586/* Opcode VEX.0F 0x71 11/6 - invalid */
3587/** Opcode VEX.66.0F 0x71 11/6. */
3588FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3589
3590
3591/**
3592 * VEX Group 12 jump table for register variant.
3593 */
3594IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3595{
3596 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3597 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3598 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3599 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3600 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3601 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3602 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3603 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3604};
3605AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3606
3607
3608/** Opcode VEX.0F 0x71. */
3609FNIEMOP_DEF(iemOp_VGrp12)
3610{
3611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3612 if (IEM_IS_MODRM_REG_MODE(bRm))
3613 /* register, register */
3614 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3615 + pVCpu->iem.s.idxPrefix], bRm);
3616 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3617}
3618
3619
3620/* Opcode VEX.0F 0x72 11/2 - invalid. */
3621/** Opcode VEX.66.0F 0x72 11/2. */
3622FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3623
3624/* Opcode VEX.0F 0x72 11/4 - invalid. */
3625/** Opcode VEX.66.0F 0x72 11/4. */
3626FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3627
3628/* Opcode VEX.0F 0x72 11/6 - invalid. */
3629/** Opcode VEX.66.0F 0x72 11/6. */
3630FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3631
3632
3633/**
3634 * Group 13 jump table for register variant.
3635 */
3636IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3637{
3638 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3639 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3640 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3641 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3642 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3643 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3644 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3645 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3646};
3647AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3648
3649/** Opcode VEX.0F 0x72. */
3650FNIEMOP_DEF(iemOp_VGrp13)
3651{
3652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3653 if (IEM_IS_MODRM_REG_MODE(bRm))
3654 /* register, register */
3655 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3656 + pVCpu->iem.s.idxPrefix], bRm);
3657 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3658}
3659
3660
3661/* Opcode VEX.0F 0x73 11/2 - invalid. */
3662/** Opcode VEX.66.0F 0x73 11/2. */
3663FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3664
3665/** Opcode VEX.66.0F 0x73 11/3. */
3666FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3667
3668/* Opcode VEX.0F 0x73 11/6 - invalid. */
3669/** Opcode VEX.66.0F 0x73 11/6. */
3670FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3671
3672/** Opcode VEX.66.0F 0x73 11/7. */
3673FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3674
3675/**
3676 * Group 14 jump table for register variant.
3677 */
3678IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3679{
3680 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3681 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3682 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3683 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3684 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3685 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3686 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3687 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3688};
3689AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3690
3691
3692/** Opcode VEX.0F 0x73. */
3693FNIEMOP_DEF(iemOp_VGrp14)
3694{
3695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3696 if (IEM_IS_MODRM_REG_MODE(bRm))
3697 /* register, register */
3698 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3699 + pVCpu->iem.s.idxPrefix], bRm);
3700 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3701}
3702
3703
3704/* Opcode VEX.0F 0x74 - invalid */
3705
3706
3707/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3708FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3709{
3710 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3711 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3712 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3713}
3714
3715/* Opcode VEX.F3.0F 0x74 - invalid */
3716/* Opcode VEX.F2.0F 0x74 - invalid */
3717
3718
3719/* Opcode VEX.0F 0x75 - invalid */
3720
3721
3722/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3723FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3724{
3725 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3726 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3727 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3728}
3729
3730
3731/* Opcode VEX.F3.0F 0x75 - invalid */
3732/* Opcode VEX.F2.0F 0x75 - invalid */
3733
3734
3735/* Opcode VEX.0F 0x76 - invalid */
3736
3737
3738/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3739FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3740{
3741 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3742 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3743 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3744}
3745
3746
3747/* Opcode VEX.F3.0F 0x76 - invalid */
3748/* Opcode VEX.F2.0F 0x76 - invalid */
3749
3750
3751/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3752FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
3753/* Opcode VEX.66.0F 0x77 - invalid */
3754/* Opcode VEX.F3.0F 0x77 - invalid */
3755/* Opcode VEX.F2.0F 0x77 - invalid */
3756
3757/* Opcode VEX.0F 0x78 - invalid */
3758/* Opcode VEX.66.0F 0x78 - invalid */
3759/* Opcode VEX.F3.0F 0x78 - invalid */
3760/* Opcode VEX.F2.0F 0x78 - invalid */
3761
3762/* Opcode VEX.0F 0x79 - invalid */
3763/* Opcode VEX.66.0F 0x79 - invalid */
3764/* Opcode VEX.F3.0F 0x79 - invalid */
3765/* Opcode VEX.F2.0F 0x79 - invalid */
3766
3767/* Opcode VEX.0F 0x7a - invalid */
3768/* Opcode VEX.66.0F 0x7a - invalid */
3769/* Opcode VEX.F3.0F 0x7a - invalid */
3770/* Opcode VEX.F2.0F 0x7a - invalid */
3771
3772/* Opcode VEX.0F 0x7b - invalid */
3773/* Opcode VEX.66.0F 0x7b - invalid */
3774/* Opcode VEX.F3.0F 0x7b - invalid */
3775/* Opcode VEX.F2.0F 0x7b - invalid */
3776
3777/* Opcode VEX.0F 0x7c - invalid */
3778/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3779FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3780/* Opcode VEX.F3.0F 0x7c - invalid */
3781/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3782FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3783
3784/* Opcode VEX.0F 0x7d - invalid */
3785/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3786FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3787/* Opcode VEX.F3.0F 0x7d - invalid */
3788/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3789FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3790
3791
3792/* Opcode VEX.0F 0x7e - invalid */
3793
3794FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3795{
3796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3797 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3798 {
3799 /**
3800 * @opcode 0x7e
3801 * @opcodesub rex.w=1
3802 * @oppfx 0x66
3803 * @opcpuid avx
3804 * @opgroup og_avx_simdint_datamov
3805 * @opxcpttype 5
3806 * @optest 64-bit / op1=1 op2=2 -> op1=2
3807 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3808 */
3809 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3810 if (IEM_IS_MODRM_REG_MODE(bRm))
3811 {
3812 /* greg64, XMM */
3813 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3814 IEM_MC_BEGIN(0, 1);
3815 IEM_MC_LOCAL(uint64_t, u64Tmp);
3816
3817 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3818 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3819
3820 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3821 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3822
3823 IEM_MC_ADVANCE_RIP();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 /* [mem64], XMM */
3829 IEM_MC_BEGIN(0, 2);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3831 IEM_MC_LOCAL(uint64_t, u64Tmp);
3832
3833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3834 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3836 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3837
3838 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3839 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3840
3841 IEM_MC_ADVANCE_RIP();
3842 IEM_MC_END();
3843 }
3844 }
3845 else
3846 {
3847 /**
3848 * @opdone
3849 * @opcode 0x7e
3850 * @opcodesub rex.w=0
3851 * @oppfx 0x66
3852 * @opcpuid avx
3853 * @opgroup og_avx_simdint_datamov
3854 * @opxcpttype 5
3855 * @opfunction iemOp_vmovd_q_Vy_Ey
3856 * @optest op1=1 op2=2 -> op1=2
3857 * @optest op1=0 op2=-42 -> op1=-42
3858 */
3859 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3860 if (IEM_IS_MODRM_REG_MODE(bRm))
3861 {
3862 /* greg32, XMM */
3863 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3864 IEM_MC_BEGIN(0, 1);
3865 IEM_MC_LOCAL(uint32_t, u32Tmp);
3866
3867 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3868 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3869
3870 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3871 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3872
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 else
3877 {
3878 /* [mem32], XMM */
3879 IEM_MC_BEGIN(0, 2);
3880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3881 IEM_MC_LOCAL(uint32_t, u32Tmp);
3882
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3884 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3887
3888 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3889 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3890
3891 IEM_MC_ADVANCE_RIP();
3892 IEM_MC_END();
3893 }
3894 }
3895 return VINF_SUCCESS;
3896}
3897
3898/**
3899 * @opcode 0x7e
3900 * @oppfx 0xf3
3901 * @opcpuid avx
3902 * @opgroup og_avx_pcksclr_datamove
3903 * @opxcpttype none
3904 * @optest op1=1 op2=2 -> op1=2
3905 * @optest op1=0 op2=-42 -> op1=-42
3906 */
3907FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3908{
3909 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3911 if (IEM_IS_MODRM_REG_MODE(bRm))
3912 {
3913 /*
3914 * Register, register.
3915 */
3916 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3917 IEM_MC_BEGIN(0, 0);
3918
3919 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3920 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3921
3922 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3923 IEM_GET_MODRM_RM(pVCpu, bRm));
3924 IEM_MC_ADVANCE_RIP();
3925 IEM_MC_END();
3926 }
3927 else
3928 {
3929 /*
3930 * Memory, register.
3931 */
3932 IEM_MC_BEGIN(0, 2);
3933 IEM_MC_LOCAL(uint64_t, uSrc);
3934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3935
3936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3937 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3938 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3939 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3940
3941 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3942 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3943
3944 IEM_MC_ADVANCE_RIP();
3945 IEM_MC_END();
3946 }
3947 return VINF_SUCCESS;
3948
3949}
3950/* Opcode VEX.F2.0F 0x7e - invalid */
3951
3952
3953/* Opcode VEX.0F 0x7f - invalid */
3954
3955/**
3956 * @opcode 0x7f
3957 * @oppfx 0x66
3958 * @opcpuid avx
3959 * @opgroup og_avx_simdint_datamove
3960 * @opxcpttype 1
3961 * @optest op1=1 op2=2 -> op1=2
3962 * @optest op1=0 op2=-42 -> op1=-42
3963 */
3964FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3965{
3966 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3967 Assert(pVCpu->iem.s.uVexLength <= 1);
3968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3969 if (IEM_IS_MODRM_REG_MODE(bRm))
3970 {
3971 /*
3972 * Register, register.
3973 */
3974 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3975 IEM_MC_BEGIN(0, 0);
3976
3977 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3978 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3979 if (pVCpu->iem.s.uVexLength == 0)
3980 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3981 IEM_GET_MODRM_REG(pVCpu, bRm));
3982 else
3983 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3984 IEM_GET_MODRM_REG(pVCpu, bRm));
3985 IEM_MC_ADVANCE_RIP();
3986 IEM_MC_END();
3987 }
3988 else if (pVCpu->iem.s.uVexLength == 0)
3989 {
3990 /*
3991 * Register, memory128.
3992 */
3993 IEM_MC_BEGIN(0, 2);
3994 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3996
3997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3998 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3999 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4000 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4001
4002 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4003 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4004
4005 IEM_MC_ADVANCE_RIP();
4006 IEM_MC_END();
4007 }
4008 else
4009 {
4010 /*
4011 * Register, memory256.
4012 */
4013 IEM_MC_BEGIN(0, 2);
4014 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4016
4017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4018 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4019 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4020 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4021
4022 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4023 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4024
4025 IEM_MC_ADVANCE_RIP();
4026 IEM_MC_END();
4027 }
4028 return VINF_SUCCESS;
4029}
4030
4031/**
4032 * @opcode 0x7f
4033 * @oppfx 0xf3
4034 * @opcpuid avx
4035 * @opgroup og_avx_simdint_datamove
4036 * @opxcpttype 4UA
4037 * @optest op1=1 op2=2 -> op1=2
4038 * @optest op1=0 op2=-42 -> op1=-42
4039 */
4040FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4041{
4042 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4043 Assert(pVCpu->iem.s.uVexLength <= 1);
4044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4045 if (IEM_IS_MODRM_REG_MODE(bRm))
4046 {
4047 /*
4048 * Register, register.
4049 */
4050 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4051 IEM_MC_BEGIN(0, 0);
4052
4053 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4054 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4055 if (pVCpu->iem.s.uVexLength == 0)
4056 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4057 IEM_GET_MODRM_REG(pVCpu, bRm));
4058 else
4059 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4060 IEM_GET_MODRM_REG(pVCpu, bRm));
4061 IEM_MC_ADVANCE_RIP();
4062 IEM_MC_END();
4063 }
4064 else if (pVCpu->iem.s.uVexLength == 0)
4065 {
4066 /*
4067 * Register, memory128.
4068 */
4069 IEM_MC_BEGIN(0, 2);
4070 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4072
4073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4074 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4075 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4076 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4077
4078 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4079 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4080
4081 IEM_MC_ADVANCE_RIP();
4082 IEM_MC_END();
4083 }
4084 else
4085 {
4086 /*
4087 * Register, memory256.
4088 */
4089 IEM_MC_BEGIN(0, 2);
4090 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4092
4093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4094 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4095 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4096 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4097
4098 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4099 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4100
4101 IEM_MC_ADVANCE_RIP();
4102 IEM_MC_END();
4103 }
4104 return VINF_SUCCESS;
4105}
4106
4107/* Opcode VEX.F2.0F 0x7f - invalid */
4108
4109
4110/* Opcode VEX.0F 0x80 - invalid */
4111/* Opcode VEX.0F 0x81 - invalid */
4112/* Opcode VEX.0F 0x82 - invalid */
4113/* Opcode VEX.0F 0x83 - invalid */
4114/* Opcode VEX.0F 0x84 - invalid */
4115/* Opcode VEX.0F 0x85 - invalid */
4116/* Opcode VEX.0F 0x86 - invalid */
4117/* Opcode VEX.0F 0x87 - invalid */
4118/* Opcode VEX.0F 0x88 - invalid */
4119/* Opcode VEX.0F 0x89 - invalid */
4120/* Opcode VEX.0F 0x8a - invalid */
4121/* Opcode VEX.0F 0x8b - invalid */
4122/* Opcode VEX.0F 0x8c - invalid */
4123/* Opcode VEX.0F 0x8d - invalid */
4124/* Opcode VEX.0F 0x8e - invalid */
4125/* Opcode VEX.0F 0x8f - invalid */
4126/* Opcode VEX.0F 0x90 - invalid */
4127/* Opcode VEX.0F 0x91 - invalid */
4128/* Opcode VEX.0F 0x92 - invalid */
4129/* Opcode VEX.0F 0x93 - invalid */
4130/* Opcode VEX.0F 0x94 - invalid */
4131/* Opcode VEX.0F 0x95 - invalid */
4132/* Opcode VEX.0F 0x96 - invalid */
4133/* Opcode VEX.0F 0x97 - invalid */
4134/* Opcode VEX.0F 0x98 - invalid */
4135/* Opcode VEX.0F 0x99 - invalid */
4136/* Opcode VEX.0F 0x9a - invalid */
4137/* Opcode VEX.0F 0x9b - invalid */
4138/* Opcode VEX.0F 0x9c - invalid */
4139/* Opcode VEX.0F 0x9d - invalid */
4140/* Opcode VEX.0F 0x9e - invalid */
4141/* Opcode VEX.0F 0x9f - invalid */
4142/* Opcode VEX.0F 0xa0 - invalid */
4143/* Opcode VEX.0F 0xa1 - invalid */
4144/* Opcode VEX.0F 0xa2 - invalid */
4145/* Opcode VEX.0F 0xa3 - invalid */
4146/* Opcode VEX.0F 0xa4 - invalid */
4147/* Opcode VEX.0F 0xa5 - invalid */
4148/* Opcode VEX.0F 0xa6 - invalid */
4149/* Opcode VEX.0F 0xa7 - invalid */
4150/* Opcode VEX.0F 0xa8 - invalid */
4151/* Opcode VEX.0F 0xa9 - invalid */
4152/* Opcode VEX.0F 0xaa - invalid */
4153/* Opcode VEX.0F 0xab - invalid */
4154/* Opcode VEX.0F 0xac - invalid */
4155/* Opcode VEX.0F 0xad - invalid */
4156
4157
4158/* Opcode VEX.0F 0xae mem/0 - invalid. */
4159/* Opcode VEX.0F 0xae mem/1 - invalid. */
4160
4161/**
4162 * @ opmaps grp15
4163 * @ opcode !11/2
4164 * @ oppfx none
4165 * @ opcpuid sse
4166 * @ opgroup og_sse_mxcsrsm
4167 * @ opxcpttype 5
4168 * @ optest op1=0 -> mxcsr=0
4169 * @ optest op1=0x2083 -> mxcsr=0x2083
4170 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4171 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4172 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4173 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4174 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4175 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4176 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4177 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4178 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4179 */
4180FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4181//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4182//{
4183// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4184// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
4185// return IEMOP_RAISE_INVALID_OPCODE();
4186//
4187// IEM_MC_BEGIN(2, 0);
4188// IEM_MC_ARG(uint8_t, iEffSeg, 0);
4189// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4190// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4191// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4193// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4194// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4195// IEM_MC_END();
4196// return VINF_SUCCESS;
4197//}
4198
4199
4200/**
4201 * @opmaps vexgrp15
4202 * @opcode !11/3
4203 * @oppfx none
4204 * @opcpuid avx
4205 * @opgroup og_avx_mxcsrsm
4206 * @opxcpttype 5
4207 * @optest mxcsr=0 -> op1=0
4208 * @optest mxcsr=0x2083 -> op1=0x2083
4209 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4210 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4211 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4212 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4213 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4214 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4215 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4216 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4217 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4218 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4219 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4220 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4221 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4222 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4223 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4224 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4225 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4226 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4227 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4228 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4229 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4230 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4231 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4232 * -> value.xcpt=0x6
4233 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4234 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4235 * APMv4 rev 3.17 page 509.
4236 * @todo Test this instruction on AMD Ryzen.
4237 */
4238FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4239{
4240 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4241 IEM_MC_BEGIN(2, 0);
4242 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4243 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4245 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4247 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4248 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4249 IEM_MC_END();
4250 return VINF_SUCCESS;
4251}
4252
4253/* Opcode VEX.0F 0xae mem/4 - invalid. */
4254/* Opcode VEX.0F 0xae mem/5 - invalid. */
4255/* Opcode VEX.0F 0xae mem/6 - invalid. */
4256/* Opcode VEX.0F 0xae mem/7 - invalid. */
4257
4258/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4259/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4260/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4261/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4262/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4263/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4264/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4265/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4266
4267/**
4268 * Vex group 15 jump table for memory variant.
4269 */
4270IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4271{ /* pfx: none, 066h, 0f3h, 0f2h */
4272 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4273 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4274 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4275 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4276 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4277 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4278 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4279 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4280};
4281AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4282
4283
4284/** Opcode vex. 0xae. */
4285FNIEMOP_DEF(iemOp_VGrp15)
4286{
4287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4288 if (IEM_IS_MODRM_REG_MODE(bRm))
4289 /* register, register */
4290 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4291
4292 /* memory, register */
4293 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4294 + pVCpu->iem.s.idxPrefix], bRm);
4295}
4296
4297
4298/* Opcode VEX.0F 0xaf - invalid. */
4299
4300/* Opcode VEX.0F 0xb0 - invalid. */
4301/* Opcode VEX.0F 0xb1 - invalid. */
4302/* Opcode VEX.0F 0xb2 - invalid. */
4303/* Opcode VEX.0F 0xb2 - invalid. */
4304/* Opcode VEX.0F 0xb3 - invalid. */
4305/* Opcode VEX.0F 0xb4 - invalid. */
4306/* Opcode VEX.0F 0xb5 - invalid. */
4307/* Opcode VEX.0F 0xb6 - invalid. */
4308/* Opcode VEX.0F 0xb7 - invalid. */
4309/* Opcode VEX.0F 0xb8 - invalid. */
4310/* Opcode VEX.0F 0xb9 - invalid. */
4311/* Opcode VEX.0F 0xba - invalid. */
4312/* Opcode VEX.0F 0xbb - invalid. */
4313/* Opcode VEX.0F 0xbc - invalid. */
4314/* Opcode VEX.0F 0xbd - invalid. */
4315/* Opcode VEX.0F 0xbe - invalid. */
4316/* Opcode VEX.0F 0xbf - invalid. */
4317
4318/* Opcode VEX.0F 0xc0 - invalid. */
4319/* Opcode VEX.66.0F 0xc0 - invalid. */
4320/* Opcode VEX.F3.0F 0xc0 - invalid. */
4321/* Opcode VEX.F2.0F 0xc0 - invalid. */
4322
4323/* Opcode VEX.0F 0xc1 - invalid. */
4324/* Opcode VEX.66.0F 0xc1 - invalid. */
4325/* Opcode VEX.F3.0F 0xc1 - invalid. */
4326/* Opcode VEX.F2.0F 0xc1 - invalid. */
4327
4328/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4329FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4330/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4331FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4332/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4333FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4334/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4335FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4336
4337/* Opcode VEX.0F 0xc3 - invalid */
4338/* Opcode VEX.66.0F 0xc3 - invalid */
4339/* Opcode VEX.F3.0F 0xc3 - invalid */
4340/* Opcode VEX.F2.0F 0xc3 - invalid */
4341
4342/* Opcode VEX.0F 0xc4 - invalid */
4343
4344
4345/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4346FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4347{
4348 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4350 if (IEM_IS_MODRM_REG_MODE(bRm))
4351 {
4352 /*
4353 * Register, register.
4354 */
4355 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4356 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4357 IEM_MC_BEGIN(4, 0);
4358 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4359 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4360 IEM_MC_ARG(uint16_t, u16Src, 2);
4361 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4362 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4363 IEM_MC_PREPARE_AVX_USAGE();
4364 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4365 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4366 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4367 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4368 puDst, puSrc, u16Src, bEvilArg);
4369 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4370 IEM_MC_ADVANCE_RIP();
4371 IEM_MC_END();
4372 }
4373 else
4374 {
4375 /*
4376 * Register, memory.
4377 */
4378 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4379 IEM_MC_BEGIN(4, 1);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4382 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4383 IEM_MC_ARG(uint16_t, u16Src, 2);
4384 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4385
4386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4387 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4388 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4389 IEM_MC_PREPARE_AVX_USAGE();
4390
4391 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4392 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4393 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4394 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4395 puDst, puSrc, u16Src, bEvilArg);
4396 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4397
4398 IEM_MC_ADVANCE_RIP();
4399 IEM_MC_END();
4400 }
4401
4402 return VINF_SUCCESS;
4403}
4404
4405
4406/* Opcode VEX.F3.0F 0xc4 - invalid */
4407/* Opcode VEX.F2.0F 0xc4 - invalid */
4408
4409/* Opcode VEX.0F 0xc5 - invlid */
4410
4411
4412/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4413FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4414{
4415 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4417 if (IEM_IS_MODRM_REG_MODE(bRm))
4418 {
4419 /*
4420 * Register, register.
4421 */
4422 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4423 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4424 IEM_MC_BEGIN(3, 1);
4425 IEM_MC_LOCAL(uint16_t, u16Dst);
4426 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4427 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4428 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4429 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4430 IEM_MC_PREPARE_AVX_USAGE();
4431 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4432 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4433 pu16Dst, puSrc, bEvilArg);
4434 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4435 IEM_MC_ADVANCE_RIP();
4436 IEM_MC_END();
4437 return VINF_SUCCESS;
4438 }
4439
4440 /* No memory operand. */
4441 return IEMOP_RAISE_INVALID_OPCODE();
4442}
4443
4444
4445/* Opcode VEX.F3.0F 0xc5 - invalid */
4446/* Opcode VEX.F2.0F 0xc5 - invalid */
4447
4448
4449#define VSHUFP_X(a_Instr) \
4450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4451 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4452 { \
4453 /* \
4454 * Register, register. \
4455 */ \
4456 if (pVCpu->iem.s.uVexLength) \
4457 { \
4458 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4459 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4460 IEM_MC_BEGIN(4, 3); \
4461 IEM_MC_LOCAL(RTUINT256U, uDst); \
4462 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4463 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4464 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4465 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4466 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4467 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4468 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4469 IEM_MC_PREPARE_AVX_USAGE(); \
4470 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4471 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4472 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4473 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4474 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4475 IEM_MC_ADVANCE_RIP(); \
4476 IEM_MC_END(); \
4477 } \
4478 else \
4479 { \
4480 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4481 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4482 IEM_MC_BEGIN(4, 0); \
4483 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4484 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4485 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4486 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4487 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4488 IEM_MC_PREPARE_AVX_USAGE(); \
4489 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4490 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4491 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4492 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4493 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4494 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4495 IEM_MC_ADVANCE_RIP(); \
4496 IEM_MC_END(); \
4497 } \
4498 } \
4499 else \
4500 { \
4501 /* \
4502 * Register, memory. \
4503 */ \
4504 if (pVCpu->iem.s.uVexLength) \
4505 { \
4506 IEM_MC_BEGIN(4, 4); \
4507 IEM_MC_LOCAL(RTUINT256U, uDst); \
4508 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4509 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4511 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4512 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4513 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4515 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4516 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4517 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4518 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4519 IEM_MC_PREPARE_AVX_USAGE(); \
4520 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4521 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4522 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4523 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4524 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4525 IEM_MC_ADVANCE_RIP(); \
4526 IEM_MC_END(); \
4527 } \
4528 else \
4529 { \
4530 IEM_MC_BEGIN(4, 2); \
4531 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4533 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4534 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4535 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4537 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4538 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4539 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4540 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4541 IEM_MC_PREPARE_AVX_USAGE(); \
4542 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4543 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4544 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4545 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4546 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4547 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4548 IEM_MC_ADVANCE_RIP(); \
4549 IEM_MC_END(); \
4550 } \
4551 } \
4552 return VINF_SUCCESS;
4553
4554/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4555FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4556{
4557 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4558 VSHUFP_X(vshufps);
4559}
4560
4561
4562/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4563FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4564{
4565 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4566 VSHUFP_X(vshufpd);
4567}
4568#undef VSHUFP_X
4569
4570
4571/* Opcode VEX.F3.0F 0xc6 - invalid */
4572/* Opcode VEX.F2.0F 0xc6 - invalid */
4573
4574/* Opcode VEX.0F 0xc7 - invalid */
4575/* Opcode VEX.66.0F 0xc7 - invalid */
4576/* Opcode VEX.F3.0F 0xc7 - invalid */
4577/* Opcode VEX.F2.0F 0xc7 - invalid */
4578
4579/* Opcode VEX.0F 0xc8 - invalid */
4580/* Opcode VEX.0F 0xc9 - invalid */
4581/* Opcode VEX.0F 0xca - invalid */
4582/* Opcode VEX.0F 0xcb - invalid */
4583/* Opcode VEX.0F 0xcc - invalid */
4584/* Opcode VEX.0F 0xcd - invalid */
4585/* Opcode VEX.0F 0xce - invalid */
4586/* Opcode VEX.0F 0xcf - invalid */
4587
4588
4589/* Opcode VEX.0F 0xd0 - invalid */
4590/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4591FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4592/* Opcode VEX.F3.0F 0xd0 - invalid */
4593/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4594FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4595
4596/* Opcode VEX.0F 0xd1 - invalid */
4597/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4598FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4599/* Opcode VEX.F3.0F 0xd1 - invalid */
4600/* Opcode VEX.F2.0F 0xd1 - invalid */
4601
4602/* Opcode VEX.0F 0xd2 - invalid */
4603/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4604FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4605/* Opcode VEX.F3.0F 0xd2 - invalid */
4606/* Opcode VEX.F2.0F 0xd2 - invalid */
4607
4608/* Opcode VEX.0F 0xd3 - invalid */
4609/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4610FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4611/* Opcode VEX.F3.0F 0xd3 - invalid */
4612/* Opcode VEX.F2.0F 0xd3 - invalid */
4613
4614/* Opcode VEX.0F 0xd4 - invalid */
4615
4616
4617/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4618FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4619{
4620 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4621 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4622 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4623}
4624
4625
4626/* Opcode VEX.F3.0F 0xd4 - invalid */
4627/* Opcode VEX.F2.0F 0xd4 - invalid */
4628
4629/* Opcode VEX.0F 0xd5 - invalid */
4630
4631
4632/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4633FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4634{
4635 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4636 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4637 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4638}
4639
4640
4641/* Opcode VEX.F3.0F 0xd5 - invalid */
4642/* Opcode VEX.F2.0F 0xd5 - invalid */
4643
4644/* Opcode VEX.0F 0xd6 - invalid */
4645
4646/**
4647 * @opcode 0xd6
4648 * @oppfx 0x66
4649 * @opcpuid avx
4650 * @opgroup og_avx_pcksclr_datamove
4651 * @opxcpttype none
4652 * @optest op1=-1 op2=2 -> op1=2
4653 * @optest op1=0 op2=-42 -> op1=-42
4654 */
4655FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4656{
4657 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4659 if (IEM_IS_MODRM_REG_MODE(bRm))
4660 {
4661 /*
4662 * Register, register.
4663 */
4664 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4665 IEM_MC_BEGIN(0, 0);
4666
4667 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4668 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4669
4670 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4671 IEM_GET_MODRM_REG(pVCpu, bRm));
4672 IEM_MC_ADVANCE_RIP();
4673 IEM_MC_END();
4674 }
4675 else
4676 {
4677 /*
4678 * Memory, register.
4679 */
4680 IEM_MC_BEGIN(0, 2);
4681 IEM_MC_LOCAL(uint64_t, uSrc);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4683
4684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4685 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4686 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4687 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4688
4689 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4690 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4691
4692 IEM_MC_ADVANCE_RIP();
4693 IEM_MC_END();
4694 }
4695 return VINF_SUCCESS;
4696}
4697
4698/* Opcode VEX.F3.0F 0xd6 - invalid */
4699/* Opcode VEX.F2.0F 0xd6 - invalid */
4700
4701
4702/* Opcode VEX.0F 0xd7 - invalid */
4703
4704/** Opcode VEX.66.0F 0xd7 - */
4705FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4706{
4707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4708 /* Docs says register only. */
4709 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4710 {
4711 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4712 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
4713 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4714 if (pVCpu->iem.s.uVexLength)
4715 {
4716 IEM_MC_BEGIN(2, 1);
4717 IEM_MC_ARG(uint64_t *, puDst, 0);
4718 IEM_MC_LOCAL(RTUINT256U, uSrc);
4719 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4720 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4721 IEM_MC_PREPARE_AVX_USAGE();
4722 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4723 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4724 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4725 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 IEM_MC_BEGIN(2, 0);
4732 IEM_MC_ARG(uint64_t *, puDst, 0);
4733 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4734 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4735 IEM_MC_PREPARE_AVX_USAGE();
4736 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4737 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4738 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4739 IEM_MC_ADVANCE_RIP();
4740 IEM_MC_END();
4741 }
4742 return VINF_SUCCESS;
4743 }
4744 return IEMOP_RAISE_INVALID_OPCODE();
4745}
4746
4747
4748/* Opcode VEX.F3.0F 0xd7 - invalid */
4749/* Opcode VEX.F2.0F 0xd7 - invalid */
4750
4751
4752/* Opcode VEX.0F 0xd8 - invalid */
4753/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4754FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4755/* Opcode VEX.F3.0F 0xd8 - invalid */
4756/* Opcode VEX.F2.0F 0xd8 - invalid */
4757
4758/* Opcode VEX.0F 0xd9 - invalid */
4759/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4760FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4761/* Opcode VEX.F3.0F 0xd9 - invalid */
4762/* Opcode VEX.F2.0F 0xd9 - invalid */
4763
4764/* Opcode VEX.0F 0xda - invalid */
4765
4766
4767/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4768FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4769{
4770 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4771 IEMOPMEDIAF3_INIT_VARS(vpminub);
4772 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4773}
4774
4775
4776/* Opcode VEX.F3.0F 0xda - invalid */
4777/* Opcode VEX.F2.0F 0xda - invalid */
4778
4779/* Opcode VEX.0F 0xdb - invalid */
4780
4781
4782/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4783FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4784{
4785 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4786 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4787 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4788}
4789
4790
4791/* Opcode VEX.F3.0F 0xdb - invalid */
4792/* Opcode VEX.F2.0F 0xdb - invalid */
4793
4794/* Opcode VEX.0F 0xdc - invalid */
4795/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4796FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4797/* Opcode VEX.F3.0F 0xdc - invalid */
4798/* Opcode VEX.F2.0F 0xdc - invalid */
4799
4800/* Opcode VEX.0F 0xdd - invalid */
4801/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4802FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4803/* Opcode VEX.F3.0F 0xdd - invalid */
4804/* Opcode VEX.F2.0F 0xdd - invalid */
4805
4806/* Opcode VEX.0F 0xde - invalid */
4807
4808
4809/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4810FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4811{
4812 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4813 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4814 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4815}
4816
4817
4818/* Opcode VEX.F3.0F 0xde - invalid */
4819/* Opcode VEX.F2.0F 0xde - invalid */
4820
4821/* Opcode VEX.0F 0xdf - invalid */
4822
4823
4824/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4825FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4826{
4827 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4828 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4829 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4830}
4831
4832
4833/* Opcode VEX.F3.0F 0xdf - invalid */
4834/* Opcode VEX.F2.0F 0xdf - invalid */
4835
4836/* Opcode VEX.0F 0xe0 - invalid */
4837
4838
4839/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4840FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4841{
4842 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4843 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4844 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4845}
4846
4847
4848/* Opcode VEX.F3.0F 0xe0 - invalid */
4849/* Opcode VEX.F2.0F 0xe0 - invalid */
4850
4851/* Opcode VEX.0F 0xe1 - invalid */
4852/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4853FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4854/* Opcode VEX.F3.0F 0xe1 - invalid */
4855/* Opcode VEX.F2.0F 0xe1 - invalid */
4856
4857/* Opcode VEX.0F 0xe2 - invalid */
4858/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4859FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4860/* Opcode VEX.F3.0F 0xe2 - invalid */
4861/* Opcode VEX.F2.0F 0xe2 - invalid */
4862
4863/* Opcode VEX.0F 0xe3 - invalid */
4864
4865
4866/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4867FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4868{
4869 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4870 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4871 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4872}
4873
4874
4875/* Opcode VEX.F3.0F 0xe3 - invalid */
4876/* Opcode VEX.F2.0F 0xe3 - invalid */
4877
4878/* Opcode VEX.0F 0xe4 - invalid */
4879
4880
4881/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4882FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4883{
4884 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4885 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4886 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4887}
4888
4889
4890/* Opcode VEX.F3.0F 0xe4 - invalid */
4891/* Opcode VEX.F2.0F 0xe4 - invalid */
4892
4893/* Opcode VEX.0F 0xe5 - invalid */
4894
4895
4896/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4897FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4898{
4899 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4900 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4901 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4902}
4903
4904
4905/* Opcode VEX.F3.0F 0xe5 - invalid */
4906/* Opcode VEX.F2.0F 0xe5 - invalid */
4907
4908/* Opcode VEX.0F 0xe6 - invalid */
4909/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4910FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4911/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4912FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4913/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4914FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4915
4916
4917/* Opcode VEX.0F 0xe7 - invalid */
4918
4919/**
4920 * @opcode 0xe7
4921 * @opcodesub !11 mr/reg
4922 * @oppfx 0x66
4923 * @opcpuid avx
4924 * @opgroup og_avx_cachect
4925 * @opxcpttype 1
4926 * @optest op1=-1 op2=2 -> op1=2
4927 * @optest op1=0 op2=-42 -> op1=-42
4928 */
4929FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
4930{
4931 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4932 Assert(pVCpu->iem.s.uVexLength <= 1);
4933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4934 if (IEM_IS_MODRM_MEM_MODE(bRm))
4935 {
4936 if (pVCpu->iem.s.uVexLength == 0)
4937 {
4938 /*
4939 * 128-bit: Memory, register.
4940 */
4941 IEM_MC_BEGIN(0, 2);
4942 IEM_MC_LOCAL(RTUINT128U, uSrc);
4943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4944
4945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4949
4950 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4951 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4952
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 }
4956 else
4957 {
4958 /*
4959 * 256-bit: Memory, register.
4960 */
4961 IEM_MC_BEGIN(0, 2);
4962 IEM_MC_LOCAL(RTUINT256U, uSrc);
4963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4964
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4966 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4967 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4968 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4969
4970 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4971 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4972
4973 IEM_MC_ADVANCE_RIP();
4974 IEM_MC_END();
4975 }
4976 return VINF_SUCCESS;
4977 }
4978 /**
4979 * @opdone
4980 * @opmnemonic udvex660fe7reg
4981 * @opcode 0xe7
4982 * @opcodesub 11 mr/reg
4983 * @oppfx 0x66
4984 * @opunused immediate
4985 * @opcpuid avx
4986 * @optest ->
4987 */
4988 return IEMOP_RAISE_INVALID_OPCODE();
4989}
4990
4991/* Opcode VEX.F3.0F 0xe7 - invalid */
4992/* Opcode VEX.F2.0F 0xe7 - invalid */
4993
4994
4995/* Opcode VEX.0F 0xe8 - invalid */
4996/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
4997FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
4998/* Opcode VEX.F3.0F 0xe8 - invalid */
4999/* Opcode VEX.F2.0F 0xe8 - invalid */
5000
5001/* Opcode VEX.0F 0xe9 - invalid */
5002/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5003FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
5004/* Opcode VEX.F3.0F 0xe9 - invalid */
5005/* Opcode VEX.F2.0F 0xe9 - invalid */
5006
5007/* Opcode VEX.0F 0xea - invalid */
5008
5009
5010/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5011FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5012{
5013 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5014 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5015 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5016}
5017
5018
5019/* Opcode VEX.F3.0F 0xea - invalid */
5020/* Opcode VEX.F2.0F 0xea - invalid */
5021
5022/* Opcode VEX.0F 0xeb - invalid */
5023
5024
5025/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5026FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5027{
5028 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5029 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5030 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5031}
5032
5033
5034
5035/* Opcode VEX.F3.0F 0xeb - invalid */
5036/* Opcode VEX.F2.0F 0xeb - invalid */
5037
5038/* Opcode VEX.0F 0xec - invalid */
5039/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5040FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
5041/* Opcode VEX.F3.0F 0xec - invalid */
5042/* Opcode VEX.F2.0F 0xec - invalid */
5043
5044/* Opcode VEX.0F 0xed - invalid */
5045/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5046FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
5047/* Opcode VEX.F3.0F 0xed - invalid */
5048/* Opcode VEX.F2.0F 0xed - invalid */
5049
5050/* Opcode VEX.0F 0xee - invalid */
5051
5052
5053/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5054FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5055{
5056 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5057 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5058 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5059}
5060
5061
5062/* Opcode VEX.F3.0F 0xee - invalid */
5063/* Opcode VEX.F2.0F 0xee - invalid */
5064
5065
5066/* Opcode VEX.0F 0xef - invalid */
5067
5068
5069/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5070FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5071{
5072 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5073 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5074 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5075}
5076
5077
5078/* Opcode VEX.F3.0F 0xef - invalid */
5079/* Opcode VEX.F2.0F 0xef - invalid */
5080
5081/* Opcode VEX.0F 0xf0 - invalid */
5082/* Opcode VEX.66.0F 0xf0 - invalid */
5083
5084
5085/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5086FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5087{
5088 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5089 Assert(pVCpu->iem.s.uVexLength <= 1);
5090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5091 if (IEM_IS_MODRM_REG_MODE(bRm))
5092 {
5093 /*
5094 * Register, register - (not implemented, assuming it raises \#UD).
5095 */
5096 return IEMOP_RAISE_INVALID_OPCODE();
5097 }
5098 else if (pVCpu->iem.s.uVexLength == 0)
5099 {
5100 /*
5101 * Register, memory128.
5102 */
5103 IEM_MC_BEGIN(0, 2);
5104 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5106
5107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5108 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5109 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5110 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5111
5112 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5113 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5114
5115 IEM_MC_ADVANCE_RIP();
5116 IEM_MC_END();
5117 }
5118 else
5119 {
5120 /*
5121 * Register, memory256.
5122 */
5123 IEM_MC_BEGIN(0, 2);
5124 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5126
5127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5128 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5129 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5130 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5131
5132 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5133 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5134
5135 IEM_MC_ADVANCE_RIP();
5136 IEM_MC_END();
5137 }
5138 return VINF_SUCCESS;
5139}
5140
5141
5142/* Opcode VEX.0F 0xf1 - invalid */
5143/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5144FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
5145/* Opcode VEX.F2.0F 0xf1 - invalid */
5146
5147/* Opcode VEX.0F 0xf2 - invalid */
5148/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5149FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
5150/* Opcode VEX.F2.0F 0xf2 - invalid */
5151
5152/* Opcode VEX.0F 0xf3 - invalid */
5153/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5154FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
5155/* Opcode VEX.F2.0F 0xf3 - invalid */
5156
5157/* Opcode VEX.0F 0xf4 - invalid */
5158
5159
5160/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5161FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5162{
5163 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5164 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5165 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5166}
5167
5168
5169/* Opcode VEX.F2.0F 0xf4 - invalid */
5170
5171/* Opcode VEX.0F 0xf5 - invalid */
5172/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5173FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5174/* Opcode VEX.F2.0F 0xf5 - invalid */
5175
5176/* Opcode VEX.0F 0xf6 - invalid */
5177
5178
5179/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5180FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5181{
5182 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5183 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5184 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5185}
5186
5187
5188/* Opcode VEX.F2.0F 0xf6 - invalid */
5189
5190/* Opcode VEX.0F 0xf7 - invalid */
5191/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5192FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5193/* Opcode VEX.F2.0F 0xf7 - invalid */
5194
5195/* Opcode VEX.0F 0xf8 - invalid */
5196
5197
5198/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5199FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5200{
5201 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5202 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5203 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5204}
5205
5206
5207/* Opcode VEX.F2.0F 0xf8 - invalid */
5208
5209/* Opcode VEX.0F 0xf9 - invalid */
5210
5211
5212/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5213FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5214{
5215 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5216 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5217 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5218}
5219
5220
5221/* Opcode VEX.F2.0F 0xf9 - invalid */
5222
5223/* Opcode VEX.0F 0xfa - invalid */
5224
5225
5226/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5227FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5228{
5229 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5230 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5231 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5232}
5233
5234
5235/* Opcode VEX.F2.0F 0xfa - invalid */
5236
5237/* Opcode VEX.0F 0xfb - invalid */
5238
5239
5240/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5241FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5242{
5243 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5244 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5245 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5246}
5247
5248
5249/* Opcode VEX.F2.0F 0xfb - invalid */
5250
5251/* Opcode VEX.0F 0xfc - invalid */
5252
5253
5254/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5255FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5256{
5257 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5258 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5259 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5260}
5261
5262
5263/* Opcode VEX.F2.0F 0xfc - invalid */
5264
5265/* Opcode VEX.0F 0xfd - invalid */
5266
5267
5268/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5269FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5270{
5271 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5272 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5273 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5274}
5275
5276
5277/* Opcode VEX.F2.0F 0xfd - invalid */
5278
5279/* Opcode VEX.0F 0xfe - invalid */
5280
5281
5282/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5283FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5284{
5285 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5286 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5287 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5288}
5289
5290
5291/* Opcode VEX.F2.0F 0xfe - invalid */
5292
5293
5294/** Opcode **** 0x0f 0xff - UD0 */
5295FNIEMOP_DEF(iemOp_vud0)
5296{
5297 IEMOP_MNEMONIC(vud0, "vud0");
5298 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5299 {
5300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5301#ifndef TST_IEM_CHECK_MC
5302 RTGCPTR GCPtrEff;
5303 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
5304 if (rcStrict != VINF_SUCCESS)
5305 return rcStrict;
5306#endif
5307 IEMOP_HLP_DONE_DECODING();
5308 }
5309 return IEMOP_RAISE_INVALID_OPCODE();
5310}
5311
5312
5313
5314/**
5315 * VEX opcode map \#1.
5316 *
5317 * @sa g_apfnTwoByteMap
5318 */
5319IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
5320{
5321 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5322 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5323 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5324 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5325 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5326 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5327 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5328 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5329 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5330 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5331 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5332 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5333 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5334 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5335 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5336 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5337 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5338
5339 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5340 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5341 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5342 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5343 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5344 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5345 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5346 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5347 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5348 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5349 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5350 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5351 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5352 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5353 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5354 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5355
5356 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5357 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5358 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5359 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5360 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5361 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5362 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5363 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5364 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5365 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5366 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5367 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5368 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5369 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5370 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5371 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5372
5373 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5374 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5375 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5376 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5377 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5378 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5379 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5380 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5381 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5382 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5383 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5384 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5385 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5386 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5387 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5388 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5389
5390 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5391 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5392 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5393 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5394 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5395 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5396 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5397 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5398 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5399 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5400 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5401 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5402 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5403 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5404 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5405 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5406
5407 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5408 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5409 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5410 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5411 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5412 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5413 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5414 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5415 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5416 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5417 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5418 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5419 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5420 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5421 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5422 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5423
5424 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5425 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5426 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5427 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5428 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5429 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5430 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5431 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5432 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5433 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5434 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5435 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5436 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5437 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5438 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5439 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5440
5441 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5442 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5443 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5444 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5445 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5446 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5447 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5448 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5449 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5450 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5451 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5452 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5453 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5454 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5455 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5456 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5457
5458 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5459 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5460 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5461 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5462 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5463 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5464 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5465 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5466 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5467 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5468 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5469 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5470 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5471 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5472 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5473 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5474
5475 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5476 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5477 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5478 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5479 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5480 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5481 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5482 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5483 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5484 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5485 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5486 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5487 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5488 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5489 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5490 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5491
5492 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5493 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5494 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5495 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5496 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5497 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5498 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5499 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5500 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5501 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5502 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5503 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5504 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5505 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5506 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5507 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5508
5509 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5510 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5511 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5512 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5513 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5514 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5515 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5516 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5517 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5518 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5519 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5520 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5521 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5522 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5523 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5524 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5525
5526 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5527 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5528 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5529 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5530 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5531 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5532 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5533 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5534 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5535 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5536 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5537 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5538 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5539 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5540 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5541 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5542
5543 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5544 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5545 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5546 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5547 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5548 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5549 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5550 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5551 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5552 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5553 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5554 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5555 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5556 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5557 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5558 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5559
5560 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5561 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5562 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5563 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5564 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5565 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5566 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5567 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5568 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5569 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5570 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5571 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5572 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5573 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5574 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5575 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5576
5577 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5578 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5579 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5580 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5581 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5582 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5583 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5584 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5585 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5586 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5587 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5588 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5589 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5590 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5591 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5592 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5593};
5594AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5595/** @} */
5596
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette