VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h

Last change on this file was 104439, checked in by vboxsync, 3 weeks ago

VMM/IEM: Implement maskmovq, [v]maskmovdqu instruction decoding, dispatch & emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 223.1 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 104439 2024-04-26 10:30:18Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36#if 0 /*Unused*/
37/**
38 * Common worker for AVX2 instructions on the forms:
39 * - vpxxx xmm0, xmm1, xmm2/mem128
40 * - vpxxx ymm0, ymm1, ymm2/mem256
41 *
42 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
43 */
44FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * Register, register.
51 */
52 if (pVCpu->iem.s.uVexLength)
53 {
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
56 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
57 IEM_MC_PREPARE_AVX_USAGE();
58
59 IEM_MC_LOCAL(RTUINT256U, uSrc1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
61 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
62
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66
67 IEM_MC_LOCAL(RTUINT256U, uDst);
68 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
69
70 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
71
72 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
73 IEM_MC_ADVANCE_RIP_AND_FINISH();
74 IEM_MC_END();
75 }
76 else
77 {
78 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
81 IEM_MC_PREPARE_AVX_USAGE();
82
83 IEM_MC_ARG(PRTUINT128U, puDst, 0);
84 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
85 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
105 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
106 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
107 IEM_MC_PREPARE_AVX_USAGE();
108
109 IEM_MC_LOCAL(RTUINT256U, uSrc2);
110 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
111 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
112
113 IEM_MC_LOCAL(RTUINT256U, uSrc1);
114 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
115 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
116
117 IEM_MC_LOCAL(RTUINT256U, uDst);
118 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
119
120 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
121
122 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_LOCAL(RTUINT128U, uSrc2);
136 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
137 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
138
139 IEM_MC_ARG(PRTUINT128U, puDst, 0);
140 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
141 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
142 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
143
144 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
146
147 IEM_MC_ADVANCE_RIP_AND_FINISH();
148 IEM_MC_END();
149 }
150 }
151}
152#endif
153
154/**
155 * Common worker for AVX2 instructions on the forms:
156 * - vpxxx xmm0, xmm1, xmm2/mem128
157 * - vpxxx ymm0, ymm1, ymm2/mem256
158 *
159 * Takes function table for function w/o implicit state parameter.
160 *
161 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
162 */
163FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 if (pVCpu->iem.s.uVexLength)
172 {
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
175 IEM_MC_LOCAL(RTUINT256U, uDst);
176 IEM_MC_LOCAL(RTUINT256U, uSrc1);
177 IEM_MC_LOCAL(RTUINT256U, uSrc2);
178 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
181 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
182 IEM_MC_PREPARE_AVX_USAGE();
183 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
184 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
186 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_ARG(PRTUINT128U, puDst, 0);
195 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
196 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
198 IEM_MC_PREPARE_AVX_USAGE();
199 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
200 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
201 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
203 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
204 IEM_MC_ADVANCE_RIP_AND_FINISH();
205 IEM_MC_END();
206 }
207 }
208 else
209 {
210 /*
211 * Register, memory.
212 */
213 if (pVCpu->iem.s.uVexLength)
214 {
215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
216 IEM_MC_LOCAL(RTUINT256U, uDst);
217 IEM_MC_LOCAL(RTUINT256U, uSrc1);
218 IEM_MC_LOCAL(RTUINT256U, uSrc2);
219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
220 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
221 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
222 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
223
224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
225 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
226 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
227 IEM_MC_PREPARE_AVX_USAGE();
228
229 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
230 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
232 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
233
234 IEM_MC_ADVANCE_RIP_AND_FINISH();
235 IEM_MC_END();
236 }
237 else
238 {
239 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
240 IEM_MC_LOCAL(RTUINT128U, uSrc2);
241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
242 IEM_MC_ARG(PRTUINT128U, puDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
244 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
248 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
249 IEM_MC_PREPARE_AVX_USAGE();
250
251 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
252 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
253 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
255 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
256
257 IEM_MC_ADVANCE_RIP_AND_FINISH();
258 IEM_MC_END();
259 }
260 }
261}
262
263
264/**
265 * Common worker for AVX2 instructions on the forms:
266 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
267 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
268 *
269 * The 128-bit memory version of this instruction may elect to skip fetching the
270 * lower 64 bits of the operand. We, however, do not.
271 *
272 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
273 */
274FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
275{
276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
277}
278
279
280/**
281 * Common worker for AVX2 instructions on the forms:
282 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
283 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
284 *
285 * The 128-bit memory version of this instruction may elect to skip fetching the
286 * higher 64 bits of the operand. We, however, do not.
287 *
288 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
289 */
290FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
291{
292 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
293}
294
295
296/**
297 * Common worker for AVX2 instructions on the forms:
298 * - vpxxx xmm0, xmm1/mem128
299 * - vpxxx ymm0, ymm1/mem256
300 *
301 * Takes function table for function w/o implicit state parameter.
302 *
303 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
304 */
305FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
306{
307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
308 if (IEM_IS_MODRM_REG_MODE(bRm))
309 {
310 /*
311 * Register, register.
312 */
313 if (pVCpu->iem.s.uVexLength)
314 {
315 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
316 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
317 IEM_MC_LOCAL(RTUINT256U, uDst);
318 IEM_MC_LOCAL(RTUINT256U, uSrc);
319 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
320 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
322 IEM_MC_PREPARE_AVX_USAGE();
323 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
324 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
325 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
326 IEM_MC_ADVANCE_RIP_AND_FINISH();
327 IEM_MC_END();
328 }
329 else
330 {
331 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
332 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
333 IEM_MC_ARG(PRTUINT128U, puDst, 0);
334 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
335 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
336 IEM_MC_PREPARE_AVX_USAGE();
337 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
338 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
339 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
340 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
341 IEM_MC_ADVANCE_RIP_AND_FINISH();
342 IEM_MC_END();
343 }
344 }
345 else
346 {
347 /*
348 * Register, memory.
349 */
350 if (pVCpu->iem.s.uVexLength)
351 {
352 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
353 IEM_MC_LOCAL(RTUINT256U, uDst);
354 IEM_MC_LOCAL(RTUINT256U, uSrc);
355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
356 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
357 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
358
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
360 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
361 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
362 IEM_MC_PREPARE_AVX_USAGE();
363
364 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
365 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
366 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
367
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
374 IEM_MC_LOCAL(RTUINT128U, uSrc);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376 IEM_MC_ARG(PRTUINT128U, puDst, 0);
377 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
378
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
381 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
382 IEM_MC_PREPARE_AVX_USAGE();
383
384 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
385 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
386 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
387 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
388
389 IEM_MC_ADVANCE_RIP_AND_FINISH();
390 IEM_MC_END();
391 }
392 }
393}
394
395
396/* Opcode VEX.0F 0x00 - invalid */
397/* Opcode VEX.0F 0x01 - invalid */
398/* Opcode VEX.0F 0x02 - invalid */
399/* Opcode VEX.0F 0x03 - invalid */
400/* Opcode VEX.0F 0x04 - invalid */
401/* Opcode VEX.0F 0x05 - invalid */
402/* Opcode VEX.0F 0x06 - invalid */
403/* Opcode VEX.0F 0x07 - invalid */
404/* Opcode VEX.0F 0x08 - invalid */
405/* Opcode VEX.0F 0x09 - invalid */
406/* Opcode VEX.0F 0x0a - invalid */
407
408/** Opcode VEX.0F 0x0b. */
409FNIEMOP_DEF(iemOp_vud2)
410{
411 IEMOP_MNEMONIC(vud2, "vud2");
412 IEMOP_RAISE_INVALID_OPCODE_RET();
413}
414
415/* Opcode VEX.0F 0x0c - invalid */
416/* Opcode VEX.0F 0x0d - invalid */
417/* Opcode VEX.0F 0x0e - invalid */
418/* Opcode VEX.0F 0x0f - invalid */
419
420
421/**
422 * @opcode 0x10
423 * @oppfx none
424 * @opcpuid avx
425 * @opgroup og_avx_simdfp_datamove
426 * @opxcpttype 4UA
427 * @optest op1=1 op2=2 -> op1=2
428 * @optest op1=0 op2=-22 -> op1=-22
429 */
430FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
431{
432 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
433 Assert(pVCpu->iem.s.uVexLength <= 1);
434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
435 if (IEM_IS_MODRM_REG_MODE(bRm))
436 {
437 /*
438 * Register, register.
439 */
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
442 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
443 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
444 if (pVCpu->iem.s.uVexLength == 0)
445 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
446 IEM_GET_MODRM_RM(pVCpu, bRm));
447 else
448 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
449 IEM_GET_MODRM_RM(pVCpu, bRm));
450 IEM_MC_ADVANCE_RIP_AND_FINISH();
451 IEM_MC_END();
452 }
453 else if (pVCpu->iem.s.uVexLength == 0)
454 {
455 /*
456 * 128-bit: Register, Memory
457 */
458 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
459 IEM_MC_LOCAL(RTUINT128U, uSrc);
460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
461
462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
463 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
465 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
466
467 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
468 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
469
470 IEM_MC_ADVANCE_RIP_AND_FINISH();
471 IEM_MC_END();
472 }
473 else
474 {
475 /*
476 * 256-bit: Register, Memory
477 */
478 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481
482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
485 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
486
487 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
488 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
489
490 IEM_MC_ADVANCE_RIP_AND_FINISH();
491 IEM_MC_END();
492 }
493}
494
495
496/**
497 * @opcode 0x10
498 * @oppfx 0x66
499 * @opcpuid avx
500 * @opgroup og_avx_simdfp_datamove
501 * @opxcpttype 4UA
502 * @optest op1=1 op2=2 -> op1=2
503 * @optest op1=0 op2=-22 -> op1=-22
504 */
505FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
506{
507 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
508 Assert(pVCpu->iem.s.uVexLength <= 1);
509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
510 if (IEM_IS_MODRM_REG_MODE(bRm))
511 {
512 /*
513 * Register, register.
514 */
515 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
516 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
517 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
518 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
519 if (pVCpu->iem.s.uVexLength == 0)
520 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
521 IEM_GET_MODRM_RM(pVCpu, bRm));
522 else
523 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
524 IEM_GET_MODRM_RM(pVCpu, bRm));
525 IEM_MC_ADVANCE_RIP_AND_FINISH();
526 IEM_MC_END();
527 }
528 else if (pVCpu->iem.s.uVexLength == 0)
529 {
530 /*
531 * 128-bit: Memory, register.
532 */
533 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
534 IEM_MC_LOCAL(RTUINT128U, uSrc);
535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
536
537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
538 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
539 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
540 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
541
542 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
543 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
544
545 IEM_MC_ADVANCE_RIP_AND_FINISH();
546 IEM_MC_END();
547 }
548 else
549 {
550 /*
551 * 256-bit: Memory, register.
552 */
553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
554 IEM_MC_LOCAL(RTUINT256U, uSrc);
555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
556
557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
558 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
559 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
560 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
561
562 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
563 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
564
565 IEM_MC_ADVANCE_RIP_AND_FINISH();
566 IEM_MC_END();
567 }
568}
569
570
571FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
572{
573 Assert(pVCpu->iem.s.uVexLength <= 1);
574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
575 if (IEM_IS_MODRM_REG_MODE(bRm))
576 {
577 /**
578 * @opcode 0x10
579 * @oppfx 0xf3
580 * @opcodesub 11 mr/reg
581 * @opcpuid avx
582 * @opgroup og_avx_simdfp_datamerge
583 * @opxcpttype 5
584 * @optest op1=1 op2=0 op3=2 -> op1=2
585 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
586 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
587 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
588 * @note HssHi refers to bits 127:32.
589 */
590 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
591 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
592 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
595 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
596 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
597 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
598 IEM_MC_ADVANCE_RIP_AND_FINISH();
599 IEM_MC_END();
600 }
601 else
602 {
603 /**
604 * @opdone
605 * @opcode 0x10
606 * @oppfx 0xf3
607 * @opcodesub !11 mr/reg
608 * @opcpuid avx
609 * @opgroup og_avx_simdfp_datamove
610 * @opxcpttype 5
611 * @opfunction iemOp_vmovss_Vss_Hss_Wss
612 * @optest op1=1 op2=2 -> op1=2
613 * @optest op1=0 op2=-22 -> op1=-22
614 */
615 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
617 IEM_MC_LOCAL(uint32_t, uSrc);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619
620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624
625 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP_AND_FINISH();
629 IEM_MC_END();
630 }
631}
632
633
634FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
635{
636 Assert(pVCpu->iem.s.uVexLength <= 1);
637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
638 if (IEM_IS_MODRM_REG_MODE(bRm))
639 {
640 /**
641 * @opcode 0x10
642 * @oppfx 0xf2
643 * @opcodesub 11 mr/reg
644 * @opcpuid avx
645 * @opgroup og_avx_simdfp_datamerge
646 * @opxcpttype 5
647 * @optest op1=1 op2=0 op3=2 -> op1=2
648 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
649 * @optest op1=3 op2=-1 op3=0x77 ->
650 * op1=0xffffffffffffffff0000000000000077
651 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
652 */
653 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
655 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
656
657 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
658 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
659 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
660 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
661 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
662 IEM_MC_ADVANCE_RIP_AND_FINISH();
663 IEM_MC_END();
664 }
665 else
666 {
667 /**
668 * @opdone
669 * @opcode 0x10
670 * @oppfx 0xf2
671 * @opcodesub !11 mr/reg
672 * @opcpuid avx
673 * @opgroup og_avx_simdfp_datamove
674 * @opxcpttype 5
675 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
676 * @optest op1=1 op2=2 -> op1=2
677 * @optest op1=0 op2=-22 -> op1=-22
678 */
679 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
680 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
681 IEM_MC_LOCAL(uint64_t, uSrc);
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
683
684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
685 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
686 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
687 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
688
689 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
690 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
691
692 IEM_MC_ADVANCE_RIP_AND_FINISH();
693 IEM_MC_END();
694 }
695}
696
697
698/**
699 * @opcode 0x11
700 * @oppfx none
701 * @opcpuid avx
702 * @opgroup og_avx_simdfp_datamove
703 * @opxcpttype 4UA
704 * @optest op1=1 op2=2 -> op1=2
705 * @optest op1=0 op2=-22 -> op1=-22
706 */
707FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
708{
709 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
710 Assert(pVCpu->iem.s.uVexLength <= 1);
711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
712 if (IEM_IS_MODRM_REG_MODE(bRm))
713 {
714 /*
715 * Register, register.
716 */
717 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
718 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
719 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
720 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
721 if (pVCpu->iem.s.uVexLength == 0)
722 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
723 IEM_GET_MODRM_REG(pVCpu, bRm));
724 else
725 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
726 IEM_GET_MODRM_REG(pVCpu, bRm));
727 IEM_MC_ADVANCE_RIP_AND_FINISH();
728 IEM_MC_END();
729 }
730 else if (pVCpu->iem.s.uVexLength == 0)
731 {
732 /*
733 * 128-bit: Memory, register.
734 */
735 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
736 IEM_MC_LOCAL(RTUINT128U, uSrc);
737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
738
739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
740 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
741 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
742 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
743
744 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
745 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750 else
751 {
752 /*
753 * 256-bit: Memory, register.
754 */
755 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
756 IEM_MC_LOCAL(RTUINT256U, uSrc);
757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
758
759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
760 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
761 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
762 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
763
764 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
765 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
766
767 IEM_MC_ADVANCE_RIP_AND_FINISH();
768 IEM_MC_END();
769 }
770}
771
772
773/**
774 * @opcode 0x11
775 * @oppfx 0x66
776 * @opcpuid avx
777 * @opgroup og_avx_simdfp_datamove
778 * @opxcpttype 4UA
779 * @optest op1=1 op2=2 -> op1=2
780 * @optest op1=0 op2=-22 -> op1=-22
781 */
782FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
783{
784 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
785 Assert(pVCpu->iem.s.uVexLength <= 1);
786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
787 if (IEM_IS_MODRM_REG_MODE(bRm))
788 {
789 /*
790 * Register, register.
791 */
792 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
793 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
794 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
795 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
796 if (pVCpu->iem.s.uVexLength == 0)
797 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
798 IEM_GET_MODRM_REG(pVCpu, bRm));
799 else
800 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
801 IEM_GET_MODRM_REG(pVCpu, bRm));
802 IEM_MC_ADVANCE_RIP_AND_FINISH();
803 IEM_MC_END();
804 }
805 else if (pVCpu->iem.s.uVexLength == 0)
806 {
807 /*
808 * 128-bit: Memory, register.
809 */
810 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
811 IEM_MC_LOCAL(RTUINT128U, uSrc);
812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
813
814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
815 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
817 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
818
819 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
820 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
821
822 IEM_MC_ADVANCE_RIP_AND_FINISH();
823 IEM_MC_END();
824 }
825 else
826 {
827 /*
828 * 256-bit: Memory, register.
829 */
830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
831 IEM_MC_LOCAL(RTUINT256U, uSrc);
832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
833
834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
835 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
836 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
837 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
838
839 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
840 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
841
842 IEM_MC_ADVANCE_RIP_AND_FINISH();
843 IEM_MC_END();
844 }
845}
846
847
848FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
849{
850 Assert(pVCpu->iem.s.uVexLength <= 1);
851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
852 if (IEM_IS_MODRM_REG_MODE(bRm))
853 {
854 /**
855 * @opcode 0x11
856 * @oppfx 0xf3
857 * @opcodesub 11 mr/reg
858 * @opcpuid avx
859 * @opgroup og_avx_simdfp_datamerge
860 * @opxcpttype 5
861 * @optest op1=1 op2=0 op3=2 -> op1=2
862 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
863 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
864 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
865 */
866 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
867 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
868 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
869
870 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
871 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
872 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
873 IEM_GET_MODRM_REG(pVCpu, bRm),
874 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
875 IEM_MC_ADVANCE_RIP_AND_FINISH();
876 IEM_MC_END();
877 }
878 else
879 {
880 /**
881 * @opdone
882 * @opcode 0x11
883 * @oppfx 0xf3
884 * @opcodesub !11 mr/reg
885 * @opcpuid avx
886 * @opgroup og_avx_simdfp_datamove
887 * @opxcpttype 5
888 * @opfunction iemOp_vmovss_Vss_Hss_Wss
889 * @optest op1=1 op2=2 -> op1=2
890 * @optest op1=0 op2=-22 -> op1=-22
891 */
892 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
893 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEM_MC_LOCAL(uint32_t, uSrc);
895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
896
897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
898 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
899 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
900 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
901
902 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
904
905 IEM_MC_ADVANCE_RIP_AND_FINISH();
906 IEM_MC_END();
907 }
908}
909
910
911FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
912{
913 Assert(pVCpu->iem.s.uVexLength <= 1);
914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
915 if (IEM_IS_MODRM_REG_MODE(bRm))
916 {
917 /**
918 * @opcode 0x11
919 * @oppfx 0xf2
920 * @opcodesub 11 mr/reg
921 * @opcpuid avx
922 * @opgroup og_avx_simdfp_datamerge
923 * @opxcpttype 5
924 * @optest op1=1 op2=0 op3=2 -> op1=2
925 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
926 * @optest op1=3 op2=-1 op3=0x77 ->
927 * op1=0xffffffffffffffff0000000000000077
928 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
929 */
930 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
931 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
932 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
933
934 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
935 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
936 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
937 IEM_GET_MODRM_REG(pVCpu, bRm),
938 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
939 IEM_MC_ADVANCE_RIP_AND_FINISH();
940 IEM_MC_END();
941 }
942 else
943 {
944 /**
945 * @opdone
946 * @opcode 0x11
947 * @oppfx 0xf2
948 * @opcodesub !11 mr/reg
949 * @opcpuid avx
950 * @opgroup og_avx_simdfp_datamove
951 * @opxcpttype 5
952 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
953 * @optest op1=1 op2=2 -> op1=2
954 * @optest op1=0 op2=-22 -> op1=-22
955 */
956 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
957 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEM_MC_LOCAL(uint64_t, uSrc);
959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
960
961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
962 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
963 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
964 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
965
966 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
967 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
968
969 IEM_MC_ADVANCE_RIP_AND_FINISH();
970 IEM_MC_END();
971 }
972}
973
974
975FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
976{
977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
978 if (IEM_IS_MODRM_REG_MODE(bRm))
979 {
980 /**
981 * @opcode 0x12
982 * @opcodesub 11 mr/reg
983 * @oppfx none
984 * @opcpuid avx
985 * @opgroup og_avx_simdfp_datamerge
986 * @opxcpttype 7LZ
987 * @optest op2=0x2200220122022203
988 * op3=0x3304330533063307
989 * -> op1=0x22002201220222033304330533063307
990 * @optest op2=-1 op3=-42 -> op1=-42
991 * @note op3 and op2 are only the 8-byte high XMM register halfs.
992 */
993 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
995 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
996
997 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
998 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
999 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1000 IEM_GET_MODRM_RM(pVCpu, bRm),
1001 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1002
1003 IEM_MC_ADVANCE_RIP_AND_FINISH();
1004 IEM_MC_END();
1005 }
1006 else
1007 {
1008 /**
1009 * @opdone
1010 * @opcode 0x12
1011 * @opcodesub !11 mr/reg
1012 * @oppfx none
1013 * @opcpuid avx
1014 * @opgroup og_avx_simdfp_datamove
1015 * @opxcpttype 5LZ
1016 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1017 * @optest op1=1 op2=0 op3=0 -> op1=0
1018 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1019 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1020 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1021 */
1022 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1023
1024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1025 IEM_MC_LOCAL(uint64_t, uSrc);
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1035 uSrc,
1036 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1037
1038 IEM_MC_ADVANCE_RIP_AND_FINISH();
1039 IEM_MC_END();
1040 }
1041}
1042
1043
1044/**
1045 * @opcode 0x12
1046 * @opcodesub !11 mr/reg
1047 * @oppfx 0x66
1048 * @opcpuid avx
1049 * @opgroup og_avx_pcksclr_datamerge
1050 * @opxcpttype 5LZ
1051 * @optest op2=0 op3=2 -> op1=2
1052 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1053 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1054 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1055 */
1056FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1057{
1058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1059 if (IEM_IS_MODRM_MEM_MODE(bRm))
1060 {
1061 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1062
1063 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1064 IEM_MC_LOCAL(uint64_t, uSrc);
1065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1066
1067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1068 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1070 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1071
1072 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1073 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1074 uSrc,
1075 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1076
1077 IEM_MC_ADVANCE_RIP_AND_FINISH();
1078 IEM_MC_END();
1079 }
1080
1081 /**
1082 * @opdone
1083 * @opmnemonic udvex660f12m3
1084 * @opcode 0x12
1085 * @opcodesub 11 mr/reg
1086 * @oppfx 0x66
1087 * @opunused immediate
1088 * @opcpuid avx
1089 * @optest ->
1090 */
1091 else
1092 IEMOP_RAISE_INVALID_OPCODE_RET();
1093}
1094
1095
1096/**
1097 * @opcode 0x12
1098 * @oppfx 0xf3
1099 * @opcpuid avx
1100 * @opgroup og_avx_pcksclr_datamove
1101 * @opxcpttype 4
1102 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1103 * -> op1=0x00000002000000020000000100000001
1104 * @optest vex.l==1 /
1105 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1106 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1107 */
1108FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1109{
1110 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1111 Assert(pVCpu->iem.s.uVexLength <= 1);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 if (IEM_IS_MODRM_REG_MODE(bRm))
1114 {
1115 /*
1116 * Register, register.
1117 */
1118 if (pVCpu->iem.s.uVexLength == 0)
1119 {
1120 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1121 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1122 IEM_MC_LOCAL(RTUINT128U, uSrc);
1123
1124 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1125 IEM_MC_PREPARE_AVX_USAGE();
1126
1127 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1128 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1129 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1130 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1131 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1132 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1133
1134 IEM_MC_ADVANCE_RIP_AND_FINISH();
1135 IEM_MC_END();
1136 }
1137 else
1138 {
1139 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1140 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1141 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1142 IEM_MC_PREPARE_AVX_USAGE();
1143
1144 IEM_MC_LOCAL(RTUINT256U, uSrc);
1145 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1146 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1147 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1148 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1149 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1150 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1151 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1152 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1153 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1154 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1155
1156 IEM_MC_ADVANCE_RIP_AND_FINISH();
1157 IEM_MC_END();
1158 }
1159 }
1160 else
1161 {
1162 /*
1163 * Register, memory.
1164 */
1165 if (pVCpu->iem.s.uVexLength == 0)
1166 {
1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(RTUINT128U, uSrc);
1169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1170
1171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1172 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1173 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1174 IEM_MC_PREPARE_AVX_USAGE();
1175
1176 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1177 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1178 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1179 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1180 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1181 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1182
1183 IEM_MC_ADVANCE_RIP_AND_FINISH();
1184 IEM_MC_END();
1185 }
1186 else
1187 {
1188 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1191 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1192 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1193 IEM_MC_PREPARE_AVX_USAGE();
1194
1195 IEM_MC_LOCAL(RTUINT256U, uSrc);
1196 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197
1198 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1199 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1200 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1201 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1202 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1203 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1204 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1205 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1206 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1207
1208 IEM_MC_ADVANCE_RIP_AND_FINISH();
1209 IEM_MC_END();
1210 }
1211 }
1212}
1213
1214
1215/**
1216 * @opcode 0x12
1217 * @oppfx 0xf2
1218 * @opcpuid avx
1219 * @opgroup og_avx_pcksclr_datamove
1220 * @opxcpttype 5
1221 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1222 * -> op1=0x22222222111111112222222211111111
1223 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1224 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1225 */
1226FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1227{
1228 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1230 if (IEM_IS_MODRM_REG_MODE(bRm))
1231 {
1232 /*
1233 * Register, register.
1234 */
1235 if (pVCpu->iem.s.uVexLength == 0)
1236 {
1237 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1238 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1239 IEM_MC_LOCAL(uint64_t, uSrc);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1245 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1247 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1248
1249 IEM_MC_ADVANCE_RIP_AND_FINISH();
1250 IEM_MC_END();
1251 }
1252 else
1253 {
1254 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1255 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1256 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1257 IEM_MC_PREPARE_AVX_USAGE();
1258
1259 IEM_MC_LOCAL(uint64_t, uSrc1);
1260 IEM_MC_LOCAL(uint64_t, uSrc2);
1261 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1262 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1263
1264 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1265 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1266 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1267 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1268 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1269
1270 IEM_MC_ADVANCE_RIP_AND_FINISH();
1271 IEM_MC_END();
1272 }
1273 }
1274 else
1275 {
1276 /*
1277 * Register, memory.
1278 */
1279 if (pVCpu->iem.s.uVexLength == 0)
1280 {
1281 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1283 IEM_MC_LOCAL(uint64_t, uSrc);
1284
1285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1286 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1288 IEM_MC_PREPARE_AVX_USAGE();
1289
1290 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1291 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1292 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1293 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1294
1295 IEM_MC_ADVANCE_RIP_AND_FINISH();
1296 IEM_MC_END();
1297 }
1298 else
1299 {
1300 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1302
1303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1305 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1306 IEM_MC_PREPARE_AVX_USAGE();
1307
1308 IEM_MC_LOCAL(RTUINT256U, uSrc);
1309 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1310
1311 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1312 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1313 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1314 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1315 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1316
1317 IEM_MC_ADVANCE_RIP_AND_FINISH();
1318 IEM_MC_END();
1319 }
1320 }
1321}
1322
1323
1324/**
1325 * @opcode 0x13
1326 * @opcodesub !11 mr/reg
1327 * @oppfx none
1328 * @opcpuid avx
1329 * @opgroup og_avx_simdfp_datamove
1330 * @opxcpttype 5
1331 * @optest op1=1 op2=2 -> op1=2
1332 * @optest op1=0 op2=-42 -> op1=-42
1333 */
1334FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1335{
1336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1337 if (IEM_IS_MODRM_MEM_MODE(bRm))
1338 {
1339 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1340
1341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1342 IEM_MC_LOCAL(uint64_t, uSrc);
1343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1344
1345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1346 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1349
1350 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1351 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1352
1353 IEM_MC_ADVANCE_RIP_AND_FINISH();
1354 IEM_MC_END();
1355 }
1356
1357 /**
1358 * @opdone
1359 * @opmnemonic udvex0f13m3
1360 * @opcode 0x13
1361 * @opcodesub 11 mr/reg
1362 * @oppfx none
1363 * @opunused immediate
1364 * @opcpuid avx
1365 * @optest ->
1366 */
1367 else
1368 IEMOP_RAISE_INVALID_OPCODE_RET();
1369}
1370
1371
1372/**
1373 * @opcode 0x13
1374 * @opcodesub !11 mr/reg
1375 * @oppfx 0x66
1376 * @opcpuid avx
1377 * @opgroup og_avx_pcksclr_datamove
1378 * @opxcpttype 5
1379 * @optest op1=1 op2=2 -> op1=2
1380 * @optest op1=0 op2=-42 -> op1=-42
1381 */
1382FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1383{
1384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1385 if (IEM_IS_MODRM_MEM_MODE(bRm))
1386 {
1387 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1388 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1389 IEM_MC_LOCAL(uint64_t, uSrc);
1390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1391
1392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1393 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1394 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1395 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1396
1397 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1398 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1399
1400 IEM_MC_ADVANCE_RIP_AND_FINISH();
1401 IEM_MC_END();
1402 }
1403
1404 /**
1405 * @opdone
1406 * @opmnemonic udvex660f13m3
1407 * @opcode 0x13
1408 * @opcodesub 11 mr/reg
1409 * @oppfx 0x66
1410 * @opunused immediate
1411 * @opcpuid avx
1412 * @optest ->
1413 */
1414 else
1415 IEMOP_RAISE_INVALID_OPCODE_RET();
1416}
1417
1418/* Opcode VEX.F3.0F 0x13 - invalid */
1419/* Opcode VEX.F2.0F 0x13 - invalid */
1420
1421/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1422FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1423{
1424 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1425 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1426 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1427}
1428
1429
1430/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1431FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1432{
1433 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1434 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1435 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1436}
1437
1438
1439/* Opcode VEX.F3.0F 0x14 - invalid */
1440/* Opcode VEX.F2.0F 0x14 - invalid */
1441
1442
1443/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1444FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1445{
1446 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1447 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1448 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1449}
1450
1451
1452/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1453FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1454{
1455 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1456 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1457 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1458}
1459
1460
1461/* Opcode VEX.F3.0F 0x15 - invalid */
1462/* Opcode VEX.F2.0F 0x15 - invalid */
1463
1464
1465FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1466{
1467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1468 if (IEM_IS_MODRM_REG_MODE(bRm))
1469 {
1470 /**
1471 * @opcode 0x16
1472 * @opcodesub 11 mr/reg
1473 * @oppfx none
1474 * @opcpuid avx
1475 * @opgroup og_avx_simdfp_datamerge
1476 * @opxcpttype 7LZ
1477 */
1478 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1479
1480 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1481 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1482
1483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1485 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1486 IEM_GET_MODRM_RM(pVCpu, bRm),
1487 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1488
1489 IEM_MC_ADVANCE_RIP_AND_FINISH();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /**
1495 * @opdone
1496 * @opcode 0x16
1497 * @opcodesub !11 mr/reg
1498 * @oppfx none
1499 * @opcpuid avx
1500 * @opgroup og_avx_simdfp_datamove
1501 * @opxcpttype 5LZ
1502 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1503 */
1504 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1505
1506 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1507 IEM_MC_LOCAL(uint64_t, uSrc);
1508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1509
1510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1511 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1514
1515 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1516 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1517 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1518 uSrc);
1519
1520 IEM_MC_ADVANCE_RIP_AND_FINISH();
1521 IEM_MC_END();
1522 }
1523}
1524
1525
1526/**
1527 * @opcode 0x16
1528 * @opcodesub !11 mr/reg
1529 * @oppfx 0x66
1530 * @opcpuid avx
1531 * @opgroup og_avx_pcksclr_datamerge
1532 * @opxcpttype 5LZ
1533 */
1534FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1535{
1536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1537 if (IEM_IS_MODRM_MEM_MODE(bRm))
1538 {
1539 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1540
1541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1542 IEM_MC_LOCAL(uint64_t, uSrc);
1543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1544
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1548 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1549
1550 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1551 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1552 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1553 uSrc);
1554
1555 IEM_MC_ADVANCE_RIP_AND_FINISH();
1556 IEM_MC_END();
1557 }
1558
1559 /**
1560 * @opdone
1561 * @opmnemonic udvex660f16m3
1562 * @opcode 0x12
1563 * @opcodesub 11 mr/reg
1564 * @oppfx 0x66
1565 * @opunused immediate
1566 * @opcpuid avx
1567 * @optest ->
1568 */
1569 else
1570 IEMOP_RAISE_INVALID_OPCODE_RET();
1571}
1572
1573
1574/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1575/**
1576 * @opcode 0x16
1577 * @oppfx 0xf3
1578 * @opcpuid avx
1579 * @opgroup og_avx_pcksclr_datamove
1580 * @opxcpttype 4
1581 */
1582FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1583{
1584 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1585 Assert(pVCpu->iem.s.uVexLength <= 1);
1586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1587 if (IEM_IS_MODRM_REG_MODE(bRm))
1588 {
1589 /*
1590 * Register, register.
1591 */
1592 if (pVCpu->iem.s.uVexLength == 0)
1593 {
1594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1595 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1596 IEM_MC_LOCAL(RTUINT128U, uSrc);
1597
1598 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1599 IEM_MC_PREPARE_AVX_USAGE();
1600
1601 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1602 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1605 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1606 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1607
1608 IEM_MC_ADVANCE_RIP_AND_FINISH();
1609 IEM_MC_END();
1610 }
1611 else
1612 {
1613 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1614 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1615 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1616 IEM_MC_PREPARE_AVX_USAGE();
1617
1618 IEM_MC_LOCAL(RTUINT256U, uSrc);
1619 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1620 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1621 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1622 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1623 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1624 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1625 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1626 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1627 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1628 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1629
1630 IEM_MC_ADVANCE_RIP_AND_FINISH();
1631 IEM_MC_END();
1632 }
1633 }
1634 else
1635 {
1636 /*
1637 * Register, memory.
1638 */
1639 if (pVCpu->iem.s.uVexLength == 0)
1640 {
1641 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1642 IEM_MC_LOCAL(RTUINT128U, uSrc);
1643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1644
1645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1646 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1647 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1648 IEM_MC_PREPARE_AVX_USAGE();
1649
1650 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1653 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1654 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1655 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1656
1657 IEM_MC_ADVANCE_RIP_AND_FINISH();
1658 IEM_MC_END();
1659 }
1660 else
1661 {
1662 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1665 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1666 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1667 IEM_MC_PREPARE_AVX_USAGE();
1668
1669 IEM_MC_LOCAL(RTUINT256U, uSrc);
1670 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1671
1672 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1673 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1674 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1675 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1676 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1677 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1678 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1679 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1680 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1681
1682 IEM_MC_ADVANCE_RIP_AND_FINISH();
1683 IEM_MC_END();
1684 }
1685 }
1686}
1687
1688
1689/* Opcode VEX.F2.0F 0x16 - invalid */
1690
1691
1692/**
1693 * @opcode 0x17
1694 * @opcodesub !11 mr/reg
1695 * @oppfx none
1696 * @opcpuid avx
1697 * @opgroup og_avx_simdfp_datamove
1698 * @opxcpttype 5
1699 */
1700FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1701{
1702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1703 if (IEM_IS_MODRM_MEM_MODE(bRm))
1704 {
1705 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1706
1707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1708 IEM_MC_LOCAL(uint64_t, uSrc);
1709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1710
1711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1712 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1715
1716 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1717 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1718
1719 IEM_MC_ADVANCE_RIP_AND_FINISH();
1720 IEM_MC_END();
1721 }
1722
1723 /**
1724 * @opdone
1725 * @opmnemonic udvex0f17m3
1726 * @opcode 0x17
1727 * @opcodesub 11 mr/reg
1728 * @oppfx none
1729 * @opunused immediate
1730 * @opcpuid avx
1731 * @optest ->
1732 */
1733 else
1734 IEMOP_RAISE_INVALID_OPCODE_RET();
1735}
1736
1737
1738/**
1739 * @opcode 0x17
1740 * @opcodesub !11 mr/reg
1741 * @oppfx 0x66
1742 * @opcpuid avx
1743 * @opgroup og_avx_pcksclr_datamove
1744 * @opxcpttype 5
1745 */
1746FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1747{
1748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1749 if (IEM_IS_MODRM_MEM_MODE(bRm))
1750 {
1751 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1752
1753 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1754 IEM_MC_LOCAL(uint64_t, uSrc);
1755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1756
1757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1758 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1761
1762 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1763 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1764
1765 IEM_MC_ADVANCE_RIP_AND_FINISH();
1766 IEM_MC_END();
1767 }
1768
1769 /**
1770 * @opdone
1771 * @opmnemonic udvex660f17m3
1772 * @opcode 0x17
1773 * @opcodesub 11 mr/reg
1774 * @oppfx 0x66
1775 * @opunused immediate
1776 * @opcpuid avx
1777 * @optest ->
1778 */
1779 else
1780 IEMOP_RAISE_INVALID_OPCODE_RET();
1781}
1782
1783
1784/* Opcode VEX.F3.0F 0x17 - invalid */
1785/* Opcode VEX.F2.0F 0x17 - invalid */
1786
1787
1788/* Opcode VEX.0F 0x18 - invalid */
1789/* Opcode VEX.0F 0x19 - invalid */
1790/* Opcode VEX.0F 0x1a - invalid */
1791/* Opcode VEX.0F 0x1b - invalid */
1792/* Opcode VEX.0F 0x1c - invalid */
1793/* Opcode VEX.0F 0x1d - invalid */
1794/* Opcode VEX.0F 0x1e - invalid */
1795/* Opcode VEX.0F 0x1f - invalid */
1796
1797/* Opcode VEX.0F 0x20 - invalid */
1798/* Opcode VEX.0F 0x21 - invalid */
1799/* Opcode VEX.0F 0x22 - invalid */
1800/* Opcode VEX.0F 0x23 - invalid */
1801/* Opcode VEX.0F 0x24 - invalid */
1802/* Opcode VEX.0F 0x25 - invalid */
1803/* Opcode VEX.0F 0x26 - invalid */
1804/* Opcode VEX.0F 0x27 - invalid */
1805
1806/**
1807 * @opcode 0x28
1808 * @oppfx none
1809 * @opcpuid avx
1810 * @opgroup og_avx_pcksclr_datamove
1811 * @opxcpttype 1
1812 * @optest op1=1 op2=2 -> op1=2
1813 * @optest op1=0 op2=-42 -> op1=-42
1814 * @note Almost identical to vmovapd.
1815 */
1816FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1817{
1818 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1820 Assert(pVCpu->iem.s.uVexLength <= 1);
1821 if (IEM_IS_MODRM_REG_MODE(bRm))
1822 {
1823 /*
1824 * Register, register.
1825 */
1826 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1827 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1828
1829 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1830 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1831 if (pVCpu->iem.s.uVexLength == 0)
1832 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1833 IEM_GET_MODRM_RM(pVCpu, bRm));
1834 else
1835 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1836 IEM_GET_MODRM_RM(pVCpu, bRm));
1837 IEM_MC_ADVANCE_RIP_AND_FINISH();
1838 IEM_MC_END();
1839 }
1840 else
1841 {
1842 /*
1843 * Register, memory.
1844 */
1845 if (pVCpu->iem.s.uVexLength == 0)
1846 {
1847 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1849 IEM_MC_LOCAL(RTUINT128U, uSrc);
1850
1851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1852 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1853 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1854 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1855
1856 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1857 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1858
1859 IEM_MC_ADVANCE_RIP_AND_FINISH();
1860 IEM_MC_END();
1861 }
1862 else
1863 {
1864 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866 IEM_MC_LOCAL(RTUINT256U, uSrc);
1867
1868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1869 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1870 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1871 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1872
1873 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1874 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1875
1876 IEM_MC_ADVANCE_RIP_AND_FINISH();
1877 IEM_MC_END();
1878 }
1879 }
1880}
1881
1882
1883/**
1884 * @opcode 0x28
1885 * @oppfx 66
1886 * @opcpuid avx
1887 * @opgroup og_avx_pcksclr_datamove
1888 * @opxcpttype 1
1889 * @optest op1=1 op2=2 -> op1=2
1890 * @optest op1=0 op2=-42 -> op1=-42
1891 * @note Almost identical to vmovaps
1892 */
1893FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1894{
1895 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1897 Assert(pVCpu->iem.s.uVexLength <= 1);
1898 if (IEM_IS_MODRM_REG_MODE(bRm))
1899 {
1900 /*
1901 * Register, register.
1902 */
1903 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1905
1906 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1907 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1908 if (pVCpu->iem.s.uVexLength == 0)
1909 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1910 IEM_GET_MODRM_RM(pVCpu, bRm));
1911 else
1912 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1913 IEM_GET_MODRM_RM(pVCpu, bRm));
1914 IEM_MC_ADVANCE_RIP_AND_FINISH();
1915 IEM_MC_END();
1916 }
1917 else
1918 {
1919 /*
1920 * Register, memory.
1921 */
1922 if (pVCpu->iem.s.uVexLength == 0)
1923 {
1924 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1926 IEM_MC_LOCAL(RTUINT128U, uSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1930 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1931 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1932
1933 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1934 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1935
1936 IEM_MC_ADVANCE_RIP_AND_FINISH();
1937 IEM_MC_END();
1938 }
1939 else
1940 {
1941 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1943 IEM_MC_LOCAL(RTUINT256U, uSrc);
1944
1945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1949
1950 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1952
1953 IEM_MC_ADVANCE_RIP_AND_FINISH();
1954 IEM_MC_END();
1955 }
1956 }
1957}
1958
1959/**
1960 * @opmnemonic udvexf30f28
1961 * @opcode 0x28
1962 * @oppfx 0xf3
1963 * @opunused vex.modrm
1964 * @opcpuid avx
1965 * @optest ->
1966 * @opdone
1967 */
1968
1969/**
1970 * @opmnemonic udvexf20f28
1971 * @opcode 0x28
1972 * @oppfx 0xf2
1973 * @opunused vex.modrm
1974 * @opcpuid avx
1975 * @optest ->
1976 * @opdone
1977 */
1978
1979/**
1980 * @opcode 0x29
1981 * @oppfx none
1982 * @opcpuid avx
1983 * @opgroup og_avx_pcksclr_datamove
1984 * @opxcpttype 1
1985 * @optest op1=1 op2=2 -> op1=2
1986 * @optest op1=0 op2=-42 -> op1=-42
1987 * @note Almost identical to vmovapd.
1988 */
1989FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1990{
1991 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 Assert(pVCpu->iem.s.uVexLength <= 1);
1994 if (IEM_IS_MODRM_REG_MODE(bRm))
1995 {
1996 /*
1997 * Register, register.
1998 */
1999 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2000 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2001
2002 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2003 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2004 if (pVCpu->iem.s.uVexLength == 0)
2005 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2006 IEM_GET_MODRM_REG(pVCpu, bRm));
2007 else
2008 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2009 IEM_GET_MODRM_REG(pVCpu, bRm));
2010 IEM_MC_ADVANCE_RIP_AND_FINISH();
2011 IEM_MC_END();
2012 }
2013 else
2014 {
2015 /*
2016 * Register, memory.
2017 */
2018 if (pVCpu->iem.s.uVexLength == 0)
2019 {
2020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2022 IEM_MC_LOCAL(RTUINT128U, uSrc);
2023
2024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2025 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2026 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2027 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2028
2029 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2030 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2031
2032 IEM_MC_ADVANCE_RIP_AND_FINISH();
2033 IEM_MC_END();
2034 }
2035 else
2036 {
2037 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2039 IEM_MC_LOCAL(RTUINT256U, uSrc);
2040
2041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2042 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2043 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2044 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2045
2046 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2047 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2048
2049 IEM_MC_ADVANCE_RIP_AND_FINISH();
2050 IEM_MC_END();
2051 }
2052 }
2053}
2054
2055/**
2056 * @opcode 0x29
2057 * @oppfx 66
2058 * @opcpuid avx
2059 * @opgroup og_avx_pcksclr_datamove
2060 * @opxcpttype 1
2061 * @optest op1=1 op2=2 -> op1=2
2062 * @optest op1=0 op2=-42 -> op1=-42
2063 * @note Almost identical to vmovaps
2064 */
2065FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2066{
2067 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2068 Assert(pVCpu->iem.s.uVexLength <= 1);
2069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2070 if (IEM_IS_MODRM_REG_MODE(bRm))
2071 {
2072 /*
2073 * Register, register.
2074 */
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2077
2078 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2079 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2080 if (pVCpu->iem.s.uVexLength == 0)
2081 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2082 IEM_GET_MODRM_REG(pVCpu, bRm));
2083 else
2084 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2085 IEM_GET_MODRM_REG(pVCpu, bRm));
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089 else
2090 {
2091 /*
2092 * Register, memory.
2093 */
2094 if (pVCpu->iem.s.uVexLength == 0)
2095 {
2096 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2098 IEM_MC_LOCAL(RTUINT128U, uSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2104
2105 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2106 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP_AND_FINISH();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2115 IEM_MC_LOCAL(RTUINT256U, uSrc);
2116
2117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2118 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2119 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2121
2122 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2123 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2124
2125 IEM_MC_ADVANCE_RIP_AND_FINISH();
2126 IEM_MC_END();
2127 }
2128 }
2129}
2130
2131
2132/**
2133 * @opmnemonic udvexf30f29
2134 * @opcode 0x29
2135 * @oppfx 0xf3
2136 * @opunused vex.modrm
2137 * @opcpuid avx
2138 * @optest ->
2139 * @opdone
2140 */
2141
2142/**
2143 * @opmnemonic udvexf20f29
2144 * @opcode 0x29
2145 * @oppfx 0xf2
2146 * @opunused vex.modrm
2147 * @opcpuid avx
2148 * @optest ->
2149 * @opdone
2150 */
2151
2152
2153/** Opcode VEX.0F 0x2a - invalid */
2154/** Opcode VEX.66.0F 0x2a - invalid */
2155/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2156FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2157/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2158FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2159
2160
2161/**
2162 * @opcode 0x2b
2163 * @opcodesub !11 mr/reg
2164 * @oppfx none
2165 * @opcpuid avx
2166 * @opgroup og_avx_cachect
2167 * @opxcpttype 1
2168 * @optest op1=1 op2=2 -> op1=2
2169 * @optest op1=0 op2=-42 -> op1=-42
2170 * @note Identical implementation to vmovntpd
2171 */
2172FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2173{
2174 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2175 Assert(pVCpu->iem.s.uVexLength <= 1);
2176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2177 if (IEM_IS_MODRM_MEM_MODE(bRm))
2178 {
2179 /*
2180 * memory, register.
2181 */
2182 if (pVCpu->iem.s.uVexLength == 0)
2183 {
2184 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2185 IEM_MC_LOCAL(RTUINT128U, uSrc);
2186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2187
2188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2189 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2190 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2191 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2192
2193 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2194 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2195
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 }
2199 else
2200 {
2201 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2202 IEM_MC_LOCAL(RTUINT256U, uSrc);
2203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2204
2205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2206 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2207 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2208 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2209
2210 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2211 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2212
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 }
2216 }
2217 /* The register, register encoding is invalid. */
2218 else
2219 IEMOP_RAISE_INVALID_OPCODE_RET();
2220}
2221
2222/**
2223 * @opcode 0x2b
2224 * @opcodesub !11 mr/reg
2225 * @oppfx 0x66
2226 * @opcpuid avx
2227 * @opgroup og_avx_cachect
2228 * @opxcpttype 1
2229 * @optest op1=1 op2=2 -> op1=2
2230 * @optest op1=0 op2=-42 -> op1=-42
2231 * @note Identical implementation to vmovntps
2232 */
2233FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2234{
2235 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2236 Assert(pVCpu->iem.s.uVexLength <= 1);
2237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2238 if (IEM_IS_MODRM_MEM_MODE(bRm))
2239 {
2240 /*
2241 * memory, register.
2242 */
2243 if (pVCpu->iem.s.uVexLength == 0)
2244 {
2245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2246 IEM_MC_LOCAL(RTUINT128U, uSrc);
2247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2248
2249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2250 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2251 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2252 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2253
2254 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2255 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2256
2257 IEM_MC_ADVANCE_RIP_AND_FINISH();
2258 IEM_MC_END();
2259 }
2260 else
2261 {
2262 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2263 IEM_MC_LOCAL(RTUINT256U, uSrc);
2264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2265
2266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2267 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2269 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2270
2271 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2272 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2273
2274 IEM_MC_ADVANCE_RIP_AND_FINISH();
2275 IEM_MC_END();
2276 }
2277 }
2278 /* The register, register encoding is invalid. */
2279 else
2280 IEMOP_RAISE_INVALID_OPCODE_RET();
2281}
2282
2283/**
2284 * @opmnemonic udvexf30f2b
2285 * @opcode 0x2b
2286 * @oppfx 0xf3
2287 * @opunused vex.modrm
2288 * @opcpuid avx
2289 * @optest ->
2290 * @opdone
2291 */
2292
2293/**
2294 * @opmnemonic udvexf20f2b
2295 * @opcode 0x2b
2296 * @oppfx 0xf2
2297 * @opunused vex.modrm
2298 * @opcpuid avx
2299 * @optest ->
2300 * @opdone
2301 */
2302
2303
2304/* Opcode VEX.0F 0x2c - invalid */
2305/* Opcode VEX.66.0F 0x2c - invalid */
2306/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2307FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2308/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2309FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2310
2311/* Opcode VEX.0F 0x2d - invalid */
2312/* Opcode VEX.66.0F 0x2d - invalid */
2313/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2314FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2315/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2316FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2317
2318
2319/**
2320 * @opcode 0x2e
2321 * @oppfx none
2322 * @opflmodify cf,pf,af,zf,sf,of
2323 * @opflclear af,sf,of
2324 */
2325FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2326{
2327 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2329 if (IEM_IS_MODRM_REG_MODE(bRm))
2330 {
2331 /*
2332 * Register, register.
2333 */
2334 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2335 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2336 IEM_MC_LOCAL(uint32_t, fEFlags);
2337 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2338 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2339 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2341 IEM_MC_PREPARE_AVX_USAGE();
2342 IEM_MC_FETCH_EFLAGS(fEFlags);
2343 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2344 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2345 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2346 pEFlags, uSrc1, uSrc2);
2347 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2348 IEM_MC_COMMIT_EFLAGS(fEFlags);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Register, memory.
2357 */
2358 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2359 IEM_MC_LOCAL(uint32_t, fEFlags);
2360 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2361 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2362 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2364
2365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2366 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2367 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2368 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2369
2370 IEM_MC_PREPARE_AVX_USAGE();
2371 IEM_MC_FETCH_EFLAGS(fEFlags);
2372 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2373 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2374 pEFlags, uSrc1, uSrc2);
2375 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2376 IEM_MC_COMMIT_EFLAGS(fEFlags);
2377
2378 IEM_MC_ADVANCE_RIP_AND_FINISH();
2379 IEM_MC_END();
2380 }
2381}
2382
2383
2384/**
2385 * @opcode 0x2e
2386 * @oppfx 0x66
2387 * @opflmodify cf,pf,af,zf,sf,of
2388 * @opflclear af,sf,of
2389 */
2390FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2391{
2392 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2394 if (IEM_IS_MODRM_REG_MODE(bRm))
2395 {
2396 /*
2397 * Register, register.
2398 */
2399 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2400 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2401 IEM_MC_LOCAL(uint32_t, fEFlags);
2402 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2403 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2404 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2406 IEM_MC_PREPARE_AVX_USAGE();
2407 IEM_MC_FETCH_EFLAGS(fEFlags);
2408 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2409 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2410 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2411 pEFlags, uSrc1, uSrc2);
2412 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2413 IEM_MC_COMMIT_EFLAGS(fEFlags);
2414
2415 IEM_MC_ADVANCE_RIP_AND_FINISH();
2416 IEM_MC_END();
2417 }
2418 else
2419 {
2420 /*
2421 * Register, memory.
2422 */
2423 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2424 IEM_MC_LOCAL(uint32_t, fEFlags);
2425 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2426 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2427 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2432 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_AVX_USAGE();
2436 IEM_MC_FETCH_EFLAGS(fEFlags);
2437 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2438 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2439 pEFlags, uSrc1, uSrc2);
2440 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2441 IEM_MC_COMMIT_EFLAGS(fEFlags);
2442
2443 IEM_MC_ADVANCE_RIP_AND_FINISH();
2444 IEM_MC_END();
2445 }
2446}
2447
2448
2449/* Opcode VEX.F3.0F 0x2e - invalid */
2450/* Opcode VEX.F2.0F 0x2e - invalid */
2451
2452/**
2453 * @opcode 0x2f
2454 * @oppfx none
2455 * @opflmodify cf,pf,af,zf,sf,of
2456 * @opflclear af,sf,of
2457 */
2458FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2459{
2460 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2462 if (IEM_IS_MODRM_REG_MODE(bRm))
2463 {
2464 /*
2465 * Register, register.
2466 */
2467 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2468 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2469 IEM_MC_LOCAL(uint32_t, fEFlags);
2470 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2471 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2472 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2473 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2474 IEM_MC_PREPARE_AVX_USAGE();
2475 IEM_MC_FETCH_EFLAGS(fEFlags);
2476 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2477 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2478 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2479 pEFlags, uSrc1, uSrc2);
2480 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2481 IEM_MC_COMMIT_EFLAGS(fEFlags);
2482
2483 IEM_MC_ADVANCE_RIP_AND_FINISH();
2484 IEM_MC_END();
2485 }
2486 else
2487 {
2488 /*
2489 * Register, memory.
2490 */
2491 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2492 IEM_MC_LOCAL(uint32_t, fEFlags);
2493 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2494 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2495 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2500 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2501 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2502
2503 IEM_MC_PREPARE_AVX_USAGE();
2504 IEM_MC_FETCH_EFLAGS(fEFlags);
2505 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2506 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2507 pEFlags, uSrc1, uSrc2);
2508 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2509 IEM_MC_COMMIT_EFLAGS(fEFlags);
2510
2511 IEM_MC_ADVANCE_RIP_AND_FINISH();
2512 IEM_MC_END();
2513 }
2514}
2515
2516
2517/**
2518 * @opcode 0x2f
2519 * @oppfx 0x66
2520 * @opflmodify cf,pf,af,zf,sf,of
2521 * @opflclear af,sf,of
2522 */
2523FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2524{
2525 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2527 if (IEM_IS_MODRM_REG_MODE(bRm))
2528 {
2529 /*
2530 * Register, register.
2531 */
2532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2533 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2534 IEM_MC_LOCAL(uint32_t, fEFlags);
2535 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2536 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2537 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2538 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2539 IEM_MC_PREPARE_AVX_USAGE();
2540 IEM_MC_FETCH_EFLAGS(fEFlags);
2541 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2542 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2543 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2544 pEFlags, uSrc1, uSrc2);
2545 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2546 IEM_MC_COMMIT_EFLAGS(fEFlags);
2547
2548 IEM_MC_ADVANCE_RIP_AND_FINISH();
2549 IEM_MC_END();
2550 }
2551 else
2552 {
2553 /*
2554 * Register, memory.
2555 */
2556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2557 IEM_MC_LOCAL(uint32_t, fEFlags);
2558 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2559 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2560 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2562
2563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2564 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2565 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2566 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567
2568 IEM_MC_PREPARE_AVX_USAGE();
2569 IEM_MC_FETCH_EFLAGS(fEFlags);
2570 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2571 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2572 pEFlags, uSrc1, uSrc2);
2573 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2574 IEM_MC_COMMIT_EFLAGS(fEFlags);
2575
2576 IEM_MC_ADVANCE_RIP_AND_FINISH();
2577 IEM_MC_END();
2578 }
2579}
2580
2581
2582/* Opcode VEX.F3.0F 0x2f - invalid */
2583/* Opcode VEX.F2.0F 0x2f - invalid */
2584
2585/* Opcode VEX.0F 0x30 - invalid */
2586/* Opcode VEX.0F 0x31 - invalid */
2587/* Opcode VEX.0F 0x32 - invalid */
2588/* Opcode VEX.0F 0x33 - invalid */
2589/* Opcode VEX.0F 0x34 - invalid */
2590/* Opcode VEX.0F 0x35 - invalid */
2591/* Opcode VEX.0F 0x36 - invalid */
2592/* Opcode VEX.0F 0x37 - invalid */
2593/* Opcode VEX.0F 0x38 - invalid */
2594/* Opcode VEX.0F 0x39 - invalid */
2595/* Opcode VEX.0F 0x3a - invalid */
2596/* Opcode VEX.0F 0x3b - invalid */
2597/* Opcode VEX.0F 0x3c - invalid */
2598/* Opcode VEX.0F 0x3d - invalid */
2599/* Opcode VEX.0F 0x3e - invalid */
2600/* Opcode VEX.0F 0x3f - invalid */
2601/* Opcode VEX.0F 0x40 - invalid */
2602/* Opcode VEX.0F 0x41 - invalid */
2603/* Opcode VEX.0F 0x42 - invalid */
2604/* Opcode VEX.0F 0x43 - invalid */
2605/* Opcode VEX.0F 0x44 - invalid */
2606/* Opcode VEX.0F 0x45 - invalid */
2607/* Opcode VEX.0F 0x46 - invalid */
2608/* Opcode VEX.0F 0x47 - invalid */
2609/* Opcode VEX.0F 0x48 - invalid */
2610/* Opcode VEX.0F 0x49 - invalid */
2611/* Opcode VEX.0F 0x4a - invalid */
2612/* Opcode VEX.0F 0x4b - invalid */
2613/* Opcode VEX.0F 0x4c - invalid */
2614/* Opcode VEX.0F 0x4d - invalid */
2615/* Opcode VEX.0F 0x4e - invalid */
2616/* Opcode VEX.0F 0x4f - invalid */
2617
2618
2619/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2620FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2621{
2622 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2624 if (IEM_IS_MODRM_REG_MODE(bRm))
2625 {
2626 /*
2627 * Register, register.
2628 */
2629 if (pVCpu->iem.s.uVexLength == 0)
2630 {
2631 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2632 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2633 IEM_MC_LOCAL(uint8_t, u8Dst);
2634 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2635 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2636 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2637 IEM_MC_PREPARE_AVX_USAGE();
2638 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2639 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2640 pu8Dst, puSrc);
2641 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2642 IEM_MC_ADVANCE_RIP_AND_FINISH();
2643 IEM_MC_END();
2644 }
2645 else
2646 {
2647 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2648 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2649 IEM_MC_LOCAL(uint8_t, u8Dst);
2650 IEM_MC_LOCAL(RTUINT256U, uSrc);
2651 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2652 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2653
2654 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2655 IEM_MC_PREPARE_AVX_USAGE();
2656 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2657 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2658 pu8Dst, puSrc);
2659 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2660 IEM_MC_ADVANCE_RIP_AND_FINISH();
2661 IEM_MC_END();
2662 }
2663 }
2664 /* No memory operand. */
2665 else
2666 IEMOP_RAISE_INVALID_OPCODE_RET();
2667}
2668
2669
2670/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2671FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2672{
2673 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2675 if (IEM_IS_MODRM_REG_MODE(bRm))
2676 {
2677 /*
2678 * Register, register.
2679 */
2680 if (pVCpu->iem.s.uVexLength == 0)
2681 {
2682 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2683 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2684 IEM_MC_LOCAL(uint8_t, u8Dst);
2685 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2686 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2687 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2688 IEM_MC_PREPARE_AVX_USAGE();
2689 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2690 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2691 pu8Dst, puSrc);
2692 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2693 IEM_MC_ADVANCE_RIP_AND_FINISH();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2699 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2700 IEM_MC_LOCAL(uint8_t, u8Dst);
2701 IEM_MC_LOCAL(RTUINT256U, uSrc);
2702 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2703 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2704
2705 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2706 IEM_MC_PREPARE_AVX_USAGE();
2707 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2708 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2709 pu8Dst, puSrc);
2710 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2711 IEM_MC_ADVANCE_RIP_AND_FINISH();
2712 IEM_MC_END();
2713 }
2714 }
2715 /* No memory operand. */
2716 else
2717 IEMOP_RAISE_INVALID_OPCODE_RET();
2718}
2719
2720
2721/* Opcode VEX.F3.0F 0x50 - invalid */
2722/* Opcode VEX.F2.0F 0x50 - invalid */
2723
2724/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2725FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2726/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2727FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2728/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2729FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2730/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2731FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2732
2733/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2734FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2735/* Opcode VEX.66.0F 0x52 - invalid */
2736/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2737FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2738/* Opcode VEX.F2.0F 0x52 - invalid */
2739
2740/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2741FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2742/* Opcode VEX.66.0F 0x53 - invalid */
2743/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2744FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2745/* Opcode VEX.F2.0F 0x53 - invalid */
2746
2747
2748/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2749FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2750{
2751 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2752 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2753 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2754}
2755
2756
2757/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2758FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2759{
2760 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2761 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2762 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2763}
2764
2765
2766/* Opcode VEX.F3.0F 0x54 - invalid */
2767/* Opcode VEX.F2.0F 0x54 - invalid */
2768
2769
2770/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2771FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2772{
2773 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2774 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2775 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2776}
2777
2778
2779/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2780FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2781{
2782 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2783 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2784 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2785}
2786
2787
2788/* Opcode VEX.F3.0F 0x55 - invalid */
2789/* Opcode VEX.F2.0F 0x55 - invalid */
2790
2791/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2792FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2793{
2794 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2795 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2796 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2797}
2798
2799
2800/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2801FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2802{
2803 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2804 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2805 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2806}
2807
2808
2809/* Opcode VEX.F3.0F 0x56 - invalid */
2810/* Opcode VEX.F2.0F 0x56 - invalid */
2811
2812
2813/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2814FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2815{
2816 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2817 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2818 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2819}
2820
2821
2822/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2823FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2824{
2825 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2826 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2827 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2828}
2829
2830
2831/* Opcode VEX.F3.0F 0x57 - invalid */
2832/* Opcode VEX.F2.0F 0x57 - invalid */
2833
2834/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2835FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2836/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2837FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2838/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2839FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2840/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2841FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2842
2843/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2844FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2845/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2846FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2847/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2848FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2849/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2850FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2851
2852/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2853FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2854/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2855FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2856/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2857FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2858/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2859FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2860
2861/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2862FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2863/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2864FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2865/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2866FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2867/* Opcode VEX.F2.0F 0x5b - invalid */
2868
2869/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2870FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2871/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2872FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2873/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2874FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2875/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2876FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2877
2878/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2879FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2880/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2881FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2882/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2883FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2884/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2885FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2886
2887/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2888FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2889/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2890FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2891/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2892FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2893/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2894FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2895
2896/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2897FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2898/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2899FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2900/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2901FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2902/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2903FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2904
2905
2906/* Opcode VEX.0F 0x60 - invalid */
2907
2908
2909/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2910FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2911{
2912 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2913 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2914 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2915}
2916
2917
2918/* Opcode VEX.F3.0F 0x60 - invalid */
2919
2920
2921/* Opcode VEX.0F 0x61 - invalid */
2922
2923
2924/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2925FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2926{
2927 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2928 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2929 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2930}
2931
2932
2933/* Opcode VEX.F3.0F 0x61 - invalid */
2934
2935
2936/* Opcode VEX.0F 0x62 - invalid */
2937
2938/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2939FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2940{
2941 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2942 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2943 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2944}
2945
2946
2947/* Opcode VEX.F3.0F 0x62 - invalid */
2948
2949
2950
2951/* Opcode VEX.0F 0x63 - invalid */
2952
2953
2954/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2955FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2956{
2957 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2958 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2959 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2960}
2961
2962
2963/* Opcode VEX.F3.0F 0x63 - invalid */
2964
2965/* Opcode VEX.0F 0x64 - invalid */
2966
2967
2968/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2969FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2970{
2971 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2972 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
2973 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2974}
2975
2976
2977/* Opcode VEX.F3.0F 0x64 - invalid */
2978
2979/* Opcode VEX.0F 0x65 - invalid */
2980
2981
2982/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2983FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2984{
2985 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2986 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
2987 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2988}
2989
2990
2991/* Opcode VEX.F3.0F 0x65 - invalid */
2992
2993/* Opcode VEX.0F 0x66 - invalid */
2994
2995
2996/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3000 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3002}
3003
3004
3005/* Opcode VEX.F3.0F 0x66 - invalid */
3006
3007/* Opcode VEX.0F 0x67 - invalid */
3008
3009
3010/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3011FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3012{
3013 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3014 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3015 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3016}
3017
3018
3019/* Opcode VEX.F3.0F 0x67 - invalid */
3020
3021
3022///**
3023// * Common worker for SSE2 instructions on the form:
3024// * pxxxx xmm1, xmm2/mem128
3025// *
3026// * The 2nd operand is the second half of a register, which in the memory case
3027// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3028// * where it may read the full 128 bits or only the upper 64 bits.
3029// *
3030// * Exceptions type 4.
3031// */
3032//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3033//{
3034// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3035// if (IEM_IS_MODRM_REG_MODE(bRm))
3036// {
3037// /*
3038// * Register, register.
3039// */
3040// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3041// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3042// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3043// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3044// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3045// IEM_MC_PREPARE_SSE_USAGE();
3046// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3047// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3048// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3049// IEM_MC_ADVANCE_RIP_AND_FINISH();
3050// IEM_MC_END();
3051// }
3052// else
3053// {
3054// /*
3055// * Register, memory.
3056// */
3057// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3058// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3059// IEM_MC_LOCAL(RTUINT128U, uSrc);
3060// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3061// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3062//
3063// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3065// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3066// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3067//
3068// IEM_MC_PREPARE_SSE_USAGE();
3069// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3071//
3072// IEM_MC_ADVANCE_RIP_AND_FINISH();
3073// IEM_MC_END();
3074// }
3075// return VINF_SUCCESS;
3076//}
3077
3078
3079/* Opcode VEX.0F 0x68 - invalid */
3080
3081/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3082FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3083{
3084 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3085 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3086 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3087}
3088
3089
3090/* Opcode VEX.F3.0F 0x68 - invalid */
3091
3092
3093/* Opcode VEX.0F 0x69 - invalid */
3094
3095
3096/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3097FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3098{
3099 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3100 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3101 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3102}
3103
3104
3105/* Opcode VEX.F3.0F 0x69 - invalid */
3106
3107
3108/* Opcode VEX.0F 0x6a - invalid */
3109
3110
3111/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3112FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3113{
3114 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3115 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3117}
3118
3119
3120/* Opcode VEX.F3.0F 0x6a - invalid */
3121
3122
3123/* Opcode VEX.0F 0x6b - invalid */
3124
3125
3126/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3127FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3128{
3129 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3130 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3131 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3132}
3133
3134
3135/* Opcode VEX.F3.0F 0x6b - invalid */
3136
3137
3138/* Opcode VEX.0F 0x6c - invalid */
3139
3140
3141/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3142FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3143{
3144 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3145 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3146 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3147}
3148
3149
3150/* Opcode VEX.F3.0F 0x6c - invalid */
3151/* Opcode VEX.F2.0F 0x6c - invalid */
3152
3153
3154/* Opcode VEX.0F 0x6d - invalid */
3155
3156
3157/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3158FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3159{
3160 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3161 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3162 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3163}
3164
3165
3166/* Opcode VEX.F3.0F 0x6d - invalid */
3167
3168
3169/* Opcode VEX.0F 0x6e - invalid */
3170
3171FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3172{
3173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3175 {
3176 /**
3177 * @opcode 0x6e
3178 * @opcodesub rex.w=1
3179 * @oppfx 0x66
3180 * @opcpuid avx
3181 * @opgroup og_avx_simdint_datamov
3182 * @opxcpttype 5
3183 * @optest 64-bit / op1=1 op2=2 -> op1=2
3184 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3185 */
3186 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3187 if (IEM_IS_MODRM_REG_MODE(bRm))
3188 {
3189 /* XMM, greg64 */
3190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3191 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3192 IEM_MC_LOCAL(uint64_t, u64Tmp);
3193
3194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3195 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3196
3197 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3198 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3199
3200 IEM_MC_ADVANCE_RIP_AND_FINISH();
3201 IEM_MC_END();
3202 }
3203 else
3204 {
3205 /* XMM, [mem64] */
3206 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3208 IEM_MC_LOCAL(uint64_t, u64Tmp);
3209
3210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3211 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3214
3215 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3216 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 }
3222 else
3223 {
3224 /**
3225 * @opdone
3226 * @opcode 0x6e
3227 * @opcodesub rex.w=0
3228 * @oppfx 0x66
3229 * @opcpuid avx
3230 * @opgroup og_avx_simdint_datamov
3231 * @opxcpttype 5
3232 * @opfunction iemOp_vmovd_q_Vy_Ey
3233 * @optest op1=1 op2=2 -> op1=2
3234 * @optest op1=0 op2=-42 -> op1=-42
3235 */
3236 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3237 if (IEM_IS_MODRM_REG_MODE(bRm))
3238 {
3239 /* XMM, greg32 */
3240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3241 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3242 IEM_MC_LOCAL(uint32_t, u32Tmp);
3243
3244 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3245 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3246
3247 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3248 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3249
3250 IEM_MC_ADVANCE_RIP_AND_FINISH();
3251 IEM_MC_END();
3252 }
3253 else
3254 {
3255 /* XMM, [mem32] */
3256 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_LOCAL(uint32_t, u32Tmp);
3259
3260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3261 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3264
3265 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3266 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3267
3268 IEM_MC_ADVANCE_RIP_AND_FINISH();
3269 IEM_MC_END();
3270 }
3271 }
3272}
3273
3274
3275/* Opcode VEX.F3.0F 0x6e - invalid */
3276
3277
3278/* Opcode VEX.0F 0x6f - invalid */
3279
3280/**
3281 * @opcode 0x6f
3282 * @oppfx 0x66
3283 * @opcpuid avx
3284 * @opgroup og_avx_simdint_datamove
3285 * @opxcpttype 1
3286 * @optest op1=1 op2=2 -> op1=2
3287 * @optest op1=0 op2=-42 -> op1=-42
3288 */
3289FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3290{
3291 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3292 Assert(pVCpu->iem.s.uVexLength <= 1);
3293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3294 if (IEM_IS_MODRM_REG_MODE(bRm))
3295 {
3296 /*
3297 * Register, register.
3298 */
3299 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3300 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3301
3302 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3303 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3304 if (pVCpu->iem.s.uVexLength == 0)
3305 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3306 IEM_GET_MODRM_RM(pVCpu, bRm));
3307 else
3308 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3309 IEM_GET_MODRM_RM(pVCpu, bRm));
3310 IEM_MC_ADVANCE_RIP_AND_FINISH();
3311 IEM_MC_END();
3312 }
3313 else if (pVCpu->iem.s.uVexLength == 0)
3314 {
3315 /*
3316 * Register, memory128.
3317 */
3318 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3319 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3321
3322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3323 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3324 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3325 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3326
3327 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3328 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3329
3330 IEM_MC_ADVANCE_RIP_AND_FINISH();
3331 IEM_MC_END();
3332 }
3333 else
3334 {
3335 /*
3336 * Register, memory256.
3337 */
3338 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3339 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3341
3342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3343 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3344 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3345 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3346
3347 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3348 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3349
3350 IEM_MC_ADVANCE_RIP_AND_FINISH();
3351 IEM_MC_END();
3352 }
3353}
3354
3355/**
3356 * @opcode 0x6f
3357 * @oppfx 0xf3
3358 * @opcpuid avx
3359 * @opgroup og_avx_simdint_datamove
3360 * @opxcpttype 4UA
3361 * @optest op1=1 op2=2 -> op1=2
3362 * @optest op1=0 op2=-42 -> op1=-42
3363 */
3364FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3365{
3366 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3367 Assert(pVCpu->iem.s.uVexLength <= 1);
3368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3369 if (IEM_IS_MODRM_REG_MODE(bRm))
3370 {
3371 /*
3372 * Register, register.
3373 */
3374 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3375 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3376
3377 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3378 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3379 if (pVCpu->iem.s.uVexLength == 0)
3380 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3381 IEM_GET_MODRM_RM(pVCpu, bRm));
3382 else
3383 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3384 IEM_GET_MODRM_RM(pVCpu, bRm));
3385 IEM_MC_ADVANCE_RIP_AND_FINISH();
3386 IEM_MC_END();
3387 }
3388 else if (pVCpu->iem.s.uVexLength == 0)
3389 {
3390 /*
3391 * Register, memory128.
3392 */
3393 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3394 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3396
3397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3398 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3399 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3400 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3401
3402 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3403 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3404
3405 IEM_MC_ADVANCE_RIP_AND_FINISH();
3406 IEM_MC_END();
3407 }
3408 else
3409 {
3410 /*
3411 * Register, memory256.
3412 */
3413 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3414 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3416
3417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3418 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3419 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3421
3422 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3423 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3424
3425 IEM_MC_ADVANCE_RIP_AND_FINISH();
3426 IEM_MC_END();
3427 }
3428}
3429
3430
3431/* Opcode VEX.0F 0x70 - invalid */
3432
3433
3434/**
3435 * Common worker for AVX/AVX2 instructions on the forms:
3436 * - vpxxx xmm0, xmm2/mem128, imm8
3437 * - vpxxx ymm0, ymm2/mem256, imm8
3438 *
3439 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3440 */
3441FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3442{
3443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3444 if (IEM_IS_MODRM_REG_MODE(bRm))
3445 {
3446 /*
3447 * Register, register.
3448 */
3449 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3450 if (pVCpu->iem.s.uVexLength)
3451 {
3452 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3453 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3454 IEM_MC_LOCAL(RTUINT256U, uDst);
3455 IEM_MC_LOCAL(RTUINT256U, uSrc);
3456 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3457 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3458 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3460 IEM_MC_PREPARE_AVX_USAGE();
3461 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3462 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3463 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3464 IEM_MC_ADVANCE_RIP_AND_FINISH();
3465 IEM_MC_END();
3466 }
3467 else
3468 {
3469 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3470 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3471 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3472 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3473 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3474 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3475 IEM_MC_PREPARE_AVX_USAGE();
3476 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3477 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3478 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3479 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3480 IEM_MC_ADVANCE_RIP_AND_FINISH();
3481 IEM_MC_END();
3482 }
3483 }
3484 else
3485 {
3486 /*
3487 * Register, memory.
3488 */
3489 if (pVCpu->iem.s.uVexLength)
3490 {
3491 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3492 IEM_MC_LOCAL(RTUINT256U, uDst);
3493 IEM_MC_LOCAL(RTUINT256U, uSrc);
3494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3495 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3496 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3497
3498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3499 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3500 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3501 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3502 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3503 IEM_MC_PREPARE_AVX_USAGE();
3504
3505 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3506 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3507 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3508
3509 IEM_MC_ADVANCE_RIP_AND_FINISH();
3510 IEM_MC_END();
3511 }
3512 else
3513 {
3514 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3515 IEM_MC_LOCAL(RTUINT128U, uSrc);
3516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3517 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3518 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3519
3520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3521 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3522 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3523 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3524 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3525 IEM_MC_PREPARE_AVX_USAGE();
3526
3527 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3529 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3530 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3531
3532 IEM_MC_ADVANCE_RIP_AND_FINISH();
3533 IEM_MC_END();
3534 }
3535 }
3536}
3537
3538
3539/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3540FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3541{
3542 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3543 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3544 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3545
3546}
3547
3548
3549/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3550FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3551{
3552 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3553 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3554 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3555
3556}
3557
3558
3559/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3560FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3561{
3562 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3563 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3564 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3565}
3566
3567
3568/**
3569 * Common worker(s) for AVX/AVX2 instructions on the forms:
3570 * - vpxxx xmm0, xmm2, imm8
3571 * - vpxxx ymm0, ymm2, imm8
3572 *
3573 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3574 */
3575FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3576{
3577 if (IEM_IS_MODRM_REG_MODE(bRm))
3578 {
3579 /*
3580 * Register, register.
3581 */
3582 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3583 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3584 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3585 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3586 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3587 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3589 IEM_MC_PREPARE_AVX_USAGE();
3590 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3591 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3592 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3593 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3594 IEM_MC_ADVANCE_RIP_AND_FINISH();
3595 IEM_MC_END();
3596 }
3597 /* No memory operand. */
3598 else
3599 IEMOP_RAISE_INVALID_OPCODE_RET();
3600}
3601
3602FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3603{
3604 if (IEM_IS_MODRM_REG_MODE(bRm))
3605 {
3606 /*
3607 * Register, register.
3608 */
3609 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3610 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3611 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3612 IEM_MC_LOCAL(RTUINT256U, uDst);
3613 IEM_MC_LOCAL(RTUINT256U, uSrc);
3614 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3615 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3616 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3617 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3618 IEM_MC_PREPARE_AVX_USAGE();
3619 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3620 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3621 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3622 IEM_MC_ADVANCE_RIP_AND_FINISH();
3623 IEM_MC_END();
3624 }
3625 /* No memory operand. */
3626 else
3627 IEMOP_RAISE_INVALID_OPCODE_RET();
3628}
3629
3630
3631/* Opcode VEX.0F 0x71 11/2 - invalid. */
3632/** Opcode VEX.66.0F 0x71 11/2. */
3633FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3634{
3635 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3636 if (pVCpu->iem.s.uVexLength)
3637 {
3638 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3639 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3640 }
3641 else
3642 {
3643 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3644 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3645 }
3646}
3647
3648
3649/* Opcode VEX.0F 0x71 11/4 - invalid */
3650/** Opcode VEX.66.0F 0x71 11/4. */
3651FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3652{
3653 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3654 if (pVCpu->iem.s.uVexLength)
3655 {
3656 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3657 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3658 }
3659 else
3660 {
3661 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3662 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3663 }
3664}
3665
3666/* Opcode VEX.0F 0x71 11/6 - invalid */
3667
3668/** Opcode VEX.66.0F 0x71 11/6. */
3669FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3670{
3671 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3672 if (pVCpu->iem.s.uVexLength)
3673 {
3674 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3675 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3676 }
3677 else
3678 {
3679 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3680 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3681 }
3682}
3683
3684
3685/**
3686 * VEX Group 12 jump table for register variant.
3687 */
3688IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3689{
3690 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3691 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3692 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3693 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3694 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3695 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3696 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3697 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3698};
3699AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3700
3701
3702/** Opcode VEX.0F 0x71. */
3703FNIEMOP_DEF(iemOp_VGrp12)
3704{
3705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3706 if (IEM_IS_MODRM_REG_MODE(bRm))
3707 /* register, register */
3708 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3709 + pVCpu->iem.s.idxPrefix], bRm);
3710 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3711}
3712
3713
3714/* Opcode VEX.0F 0x72 11/2 - invalid. */
3715/** Opcode VEX.66.0F 0x72 11/2. */
3716FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3717{
3718 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3719 if (pVCpu->iem.s.uVexLength)
3720 {
3721 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3722 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3723 }
3724 else
3725 {
3726 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3727 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3728 }
3729}
3730
3731
3732/* Opcode VEX.0F 0x72 11/4 - invalid. */
3733/** Opcode VEX.66.0F 0x72 11/4. */
3734FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3735{
3736 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3737 if (pVCpu->iem.s.uVexLength)
3738 {
3739 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3740 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3741 }
3742 else
3743 {
3744 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3745 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3746 }
3747}
3748
3749/* Opcode VEX.0F 0x72 11/6 - invalid. */
3750
3751/** Opcode VEX.66.0F 0x72 11/6. */
3752FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3753{
3754 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3755 if (pVCpu->iem.s.uVexLength)
3756 {
3757 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3758 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3759 }
3760 else
3761 {
3762 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3763 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3764 }
3765}
3766
3767
3768/**
3769 * Group 13 jump table for register variant.
3770 */
3771IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3772{
3773 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3774 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3775 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3776 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3777 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3778 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3779 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3780 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3781};
3782AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3783
3784/** Opcode VEX.0F 0x72. */
3785FNIEMOP_DEF(iemOp_VGrp13)
3786{
3787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3788 if (IEM_IS_MODRM_REG_MODE(bRm))
3789 /* register, register */
3790 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3791 + pVCpu->iem.s.idxPrefix], bRm);
3792 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3793}
3794
3795
3796/* Opcode VEX.0F 0x73 11/2 - invalid. */
3797/** Opcode VEX.66.0F 0x73 11/2. */
3798FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3799{
3800 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3801 if (pVCpu->iem.s.uVexLength)
3802 {
3803 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3804 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3805 }
3806 else
3807 {
3808 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3809 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3810 }
3811}
3812
3813
3814/** Opcode VEX.66.0F 0x73 11/3. */
3815FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
3816{
3817 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3818 if (pVCpu->iem.s.uVexLength)
3819 {
3820 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3821 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
3822 }
3823 else
3824 {
3825 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3826 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
3827 }
3828}
3829
3830/* Opcode VEX.0F 0x73 11/6 - invalid. */
3831
3832/** Opcode VEX.66.0F 0x73 11/6. */
3833FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3834{
3835 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3836 if (pVCpu->iem.s.uVexLength)
3837 {
3838 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3839 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3840 }
3841 else
3842 {
3843 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3844 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3845 }
3846}
3847
3848/** Opcode VEX.66.0F 0x73 11/7. */
3849FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
3850{
3851 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3852 if (pVCpu->iem.s.uVexLength)
3853 {
3854 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3855 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
3856 }
3857 else
3858 {
3859 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3860 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
3861 }
3862}
3863
3864/* Opcode VEX.0F 0x73 11/6 - invalid. */
3865
3866/**
3867 * Group 14 jump table for register variant.
3868 */
3869IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3870{
3871 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3872 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3873 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3874 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3875 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3876 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3877 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3878 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3879};
3880AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3881
3882
3883/** Opcode VEX.0F 0x73. */
3884FNIEMOP_DEF(iemOp_VGrp14)
3885{
3886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3887 if (IEM_IS_MODRM_REG_MODE(bRm))
3888 /* register, register */
3889 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3890 + pVCpu->iem.s.idxPrefix], bRm);
3891 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3892}
3893
3894
3895/* Opcode VEX.0F 0x74 - invalid */
3896
3897
3898/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3899FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3900{
3901 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3902 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
3903 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3904}
3905
3906/* Opcode VEX.F3.0F 0x74 - invalid */
3907/* Opcode VEX.F2.0F 0x74 - invalid */
3908
3909
3910/* Opcode VEX.0F 0x75 - invalid */
3911
3912
3913/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3914FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3915{
3916 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3917 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
3918 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3919}
3920
3921
3922/* Opcode VEX.F3.0F 0x75 - invalid */
3923/* Opcode VEX.F2.0F 0x75 - invalid */
3924
3925
3926/* Opcode VEX.0F 0x76 - invalid */
3927
3928
3929/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3930FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3931{
3932 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3933 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
3934 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3935}
3936
3937
3938/* Opcode VEX.F3.0F 0x76 - invalid */
3939/* Opcode VEX.F2.0F 0x76 - invalid */
3940
3941
3942/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3943FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3944{
3945 Assert(pVCpu->iem.s.uVexLength <= 1);
3946 if (pVCpu->iem.s.uVexLength == 0)
3947 {
3948 /*
3949 * 128-bit: vzeroupper
3950 */
3951 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3952 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3953
3954 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3955 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3956 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3957
3958 IEM_MC_CLEAR_YREG_128_UP(0);
3959 IEM_MC_CLEAR_YREG_128_UP(1);
3960 IEM_MC_CLEAR_YREG_128_UP(2);
3961 IEM_MC_CLEAR_YREG_128_UP(3);
3962 IEM_MC_CLEAR_YREG_128_UP(4);
3963 IEM_MC_CLEAR_YREG_128_UP(5);
3964 IEM_MC_CLEAR_YREG_128_UP(6);
3965 IEM_MC_CLEAR_YREG_128_UP(7);
3966
3967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3968 {
3969 IEM_MC_CLEAR_YREG_128_UP( 8);
3970 IEM_MC_CLEAR_YREG_128_UP( 9);
3971 IEM_MC_CLEAR_YREG_128_UP(10);
3972 IEM_MC_CLEAR_YREG_128_UP(11);
3973 IEM_MC_CLEAR_YREG_128_UP(12);
3974 IEM_MC_CLEAR_YREG_128_UP(13);
3975 IEM_MC_CLEAR_YREG_128_UP(14);
3976 IEM_MC_CLEAR_YREG_128_UP(15);
3977 }
3978
3979 IEM_MC_ADVANCE_RIP_AND_FINISH();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 /*
3985 * 256-bit: vzeroall
3986 */
3987 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3988 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3989
3990 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3991 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3992 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3993
3994 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
3995 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3996 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3997 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3998 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3999 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4000 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4001 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4002 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4003
4004 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4005 {
4006 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4007 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4008 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4009 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4010 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4011 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4012 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4013 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4014 }
4015
4016 IEM_MC_ADVANCE_RIP_AND_FINISH();
4017 IEM_MC_END();
4018 }
4019}
4020
4021
4022/* Opcode VEX.66.0F 0x77 - invalid */
4023/* Opcode VEX.F3.0F 0x77 - invalid */
4024/* Opcode VEX.F2.0F 0x77 - invalid */
4025
4026/* Opcode VEX.0F 0x78 - invalid */
4027/* Opcode VEX.66.0F 0x78 - invalid */
4028/* Opcode VEX.F3.0F 0x78 - invalid */
4029/* Opcode VEX.F2.0F 0x78 - invalid */
4030
4031/* Opcode VEX.0F 0x79 - invalid */
4032/* Opcode VEX.66.0F 0x79 - invalid */
4033/* Opcode VEX.F3.0F 0x79 - invalid */
4034/* Opcode VEX.F2.0F 0x79 - invalid */
4035
4036/* Opcode VEX.0F 0x7a - invalid */
4037/* Opcode VEX.66.0F 0x7a - invalid */
4038/* Opcode VEX.F3.0F 0x7a - invalid */
4039/* Opcode VEX.F2.0F 0x7a - invalid */
4040
4041/* Opcode VEX.0F 0x7b - invalid */
4042/* Opcode VEX.66.0F 0x7b - invalid */
4043/* Opcode VEX.F3.0F 0x7b - invalid */
4044/* Opcode VEX.F2.0F 0x7b - invalid */
4045
4046/* Opcode VEX.0F 0x7c - invalid */
4047/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4048FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4049/* Opcode VEX.F3.0F 0x7c - invalid */
4050/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4051FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
4052
4053/* Opcode VEX.0F 0x7d - invalid */
4054/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4055FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4056/* Opcode VEX.F3.0F 0x7d - invalid */
4057/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4058FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4059
4060
4061/* Opcode VEX.0F 0x7e - invalid */
4062
4063FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4064{
4065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4066 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4067 {
4068 /**
4069 * @opcode 0x7e
4070 * @opcodesub rex.w=1
4071 * @oppfx 0x66
4072 * @opcpuid avx
4073 * @opgroup og_avx_simdint_datamov
4074 * @opxcpttype 5
4075 * @optest 64-bit / op1=1 op2=2 -> op1=2
4076 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4077 */
4078 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4079 if (IEM_IS_MODRM_REG_MODE(bRm))
4080 {
4081 /* greg64, XMM */
4082 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4083 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4084 IEM_MC_LOCAL(uint64_t, u64Tmp);
4085
4086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4087 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4088
4089 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4090 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4091
4092 IEM_MC_ADVANCE_RIP_AND_FINISH();
4093 IEM_MC_END();
4094 }
4095 else
4096 {
4097 /* [mem64], XMM */
4098 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4100 IEM_MC_LOCAL(uint64_t, u64Tmp);
4101
4102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4103 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4105 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4106
4107 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4109
4110 IEM_MC_ADVANCE_RIP_AND_FINISH();
4111 IEM_MC_END();
4112 }
4113 }
4114 else
4115 {
4116 /**
4117 * @opdone
4118 * @opcode 0x7e
4119 * @opcodesub rex.w=0
4120 * @oppfx 0x66
4121 * @opcpuid avx
4122 * @opgroup og_avx_simdint_datamov
4123 * @opxcpttype 5
4124 * @opfunction iemOp_vmovd_q_Vy_Ey
4125 * @optest op1=1 op2=2 -> op1=2
4126 * @optest op1=0 op2=-42 -> op1=-42
4127 */
4128 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4129 if (IEM_IS_MODRM_REG_MODE(bRm))
4130 {
4131 /* greg32, XMM */
4132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4133 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4134 IEM_MC_LOCAL(uint32_t, u32Tmp);
4135
4136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4137 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4138
4139 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4140 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4141
4142 IEM_MC_ADVANCE_RIP_AND_FINISH();
4143 IEM_MC_END();
4144 }
4145 else
4146 {
4147 /* [mem32], XMM */
4148 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4150 IEM_MC_LOCAL(uint32_t, u32Tmp);
4151
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4154 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4155 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4156
4157 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4158 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4159
4160 IEM_MC_ADVANCE_RIP_AND_FINISH();
4161 IEM_MC_END();
4162 }
4163 }
4164}
4165
4166
4167/**
4168 * @opcode 0x7e
4169 * @oppfx 0xf3
4170 * @opcpuid avx
4171 * @opgroup og_avx_pcksclr_datamove
4172 * @opxcpttype none
4173 * @optest op1=1 op2=2 -> op1=2
4174 * @optest op1=0 op2=-42 -> op1=-42
4175 */
4176FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4177{
4178 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180 if (IEM_IS_MODRM_REG_MODE(bRm))
4181 {
4182 /*
4183 * Register, register.
4184 */
4185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4186 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4187
4188 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4189 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4190
4191 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4192 IEM_GET_MODRM_RM(pVCpu, bRm));
4193 IEM_MC_ADVANCE_RIP_AND_FINISH();
4194 IEM_MC_END();
4195 }
4196 else
4197 {
4198 /*
4199 * Memory, register.
4200 */
4201 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4202 IEM_MC_LOCAL(uint64_t, uSrc);
4203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4204
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4206 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4207 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4208 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4209
4210 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4211 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4212
4213 IEM_MC_ADVANCE_RIP_AND_FINISH();
4214 IEM_MC_END();
4215 }
4216
4217}
4218/* Opcode VEX.F2.0F 0x7e - invalid */
4219
4220
4221/* Opcode VEX.0F 0x7f - invalid */
4222
4223/**
4224 * @opcode 0x7f
4225 * @oppfx 0x66
4226 * @opcpuid avx
4227 * @opgroup og_avx_simdint_datamove
4228 * @opxcpttype 1
4229 * @optest op1=1 op2=2 -> op1=2
4230 * @optest op1=0 op2=-42 -> op1=-42
4231 */
4232FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4233{
4234 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4235 Assert(pVCpu->iem.s.uVexLength <= 1);
4236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4237 if (IEM_IS_MODRM_REG_MODE(bRm))
4238 {
4239 /*
4240 * Register, register.
4241 */
4242 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4243 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4244
4245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4246 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4247 if (pVCpu->iem.s.uVexLength == 0)
4248 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4249 IEM_GET_MODRM_REG(pVCpu, bRm));
4250 else
4251 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4252 IEM_GET_MODRM_REG(pVCpu, bRm));
4253 IEM_MC_ADVANCE_RIP_AND_FINISH();
4254 IEM_MC_END();
4255 }
4256 else if (pVCpu->iem.s.uVexLength == 0)
4257 {
4258 /*
4259 * Register, memory128.
4260 */
4261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4262 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4264
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4266 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4268 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4269
4270 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4271 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4272
4273 IEM_MC_ADVANCE_RIP_AND_FINISH();
4274 IEM_MC_END();
4275 }
4276 else
4277 {
4278 /*
4279 * Register, memory256.
4280 */
4281 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4282 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4284
4285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4286 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4288 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4289
4290 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4291 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4292
4293 IEM_MC_ADVANCE_RIP_AND_FINISH();
4294 IEM_MC_END();
4295 }
4296}
4297
4298
4299/**
4300 * @opcode 0x7f
4301 * @oppfx 0xf3
4302 * @opcpuid avx
4303 * @opgroup og_avx_simdint_datamove
4304 * @opxcpttype 4UA
4305 * @optest op1=1 op2=2 -> op1=2
4306 * @optest op1=0 op2=-42 -> op1=-42
4307 */
4308FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4309{
4310 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4311 Assert(pVCpu->iem.s.uVexLength <= 1);
4312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4313 if (IEM_IS_MODRM_REG_MODE(bRm))
4314 {
4315 /*
4316 * Register, register.
4317 */
4318 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4319 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4320
4321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4323 if (pVCpu->iem.s.uVexLength == 0)
4324 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4325 IEM_GET_MODRM_REG(pVCpu, bRm));
4326 else
4327 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4328 IEM_GET_MODRM_REG(pVCpu, bRm));
4329 IEM_MC_ADVANCE_RIP_AND_FINISH();
4330 IEM_MC_END();
4331 }
4332 else if (pVCpu->iem.s.uVexLength == 0)
4333 {
4334 /*
4335 * Register, memory128.
4336 */
4337 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4338 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4340
4341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4342 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4343 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4344 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4345
4346 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4347 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4348
4349 IEM_MC_ADVANCE_RIP_AND_FINISH();
4350 IEM_MC_END();
4351 }
4352 else
4353 {
4354 /*
4355 * Register, memory256.
4356 */
4357 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4358 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4360
4361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4362 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4364 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4365
4366 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4367 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4368
4369 IEM_MC_ADVANCE_RIP_AND_FINISH();
4370 IEM_MC_END();
4371 }
4372}
4373
4374/* Opcode VEX.F2.0F 0x7f - invalid */
4375
4376
4377/* Opcode VEX.0F 0x80 - invalid */
4378/* Opcode VEX.0F 0x81 - invalid */
4379/* Opcode VEX.0F 0x82 - invalid */
4380/* Opcode VEX.0F 0x83 - invalid */
4381/* Opcode VEX.0F 0x84 - invalid */
4382/* Opcode VEX.0F 0x85 - invalid */
4383/* Opcode VEX.0F 0x86 - invalid */
4384/* Opcode VEX.0F 0x87 - invalid */
4385/* Opcode VEX.0F 0x88 - invalid */
4386/* Opcode VEX.0F 0x89 - invalid */
4387/* Opcode VEX.0F 0x8a - invalid */
4388/* Opcode VEX.0F 0x8b - invalid */
4389/* Opcode VEX.0F 0x8c - invalid */
4390/* Opcode VEX.0F 0x8d - invalid */
4391/* Opcode VEX.0F 0x8e - invalid */
4392/* Opcode VEX.0F 0x8f - invalid */
4393/* Opcode VEX.0F 0x90 - invalid */
4394/* Opcode VEX.0F 0x91 - invalid */
4395/* Opcode VEX.0F 0x92 - invalid */
4396/* Opcode VEX.0F 0x93 - invalid */
4397/* Opcode VEX.0F 0x94 - invalid */
4398/* Opcode VEX.0F 0x95 - invalid */
4399/* Opcode VEX.0F 0x96 - invalid */
4400/* Opcode VEX.0F 0x97 - invalid */
4401/* Opcode VEX.0F 0x98 - invalid */
4402/* Opcode VEX.0F 0x99 - invalid */
4403/* Opcode VEX.0F 0x9a - invalid */
4404/* Opcode VEX.0F 0x9b - invalid */
4405/* Opcode VEX.0F 0x9c - invalid */
4406/* Opcode VEX.0F 0x9d - invalid */
4407/* Opcode VEX.0F 0x9e - invalid */
4408/* Opcode VEX.0F 0x9f - invalid */
4409/* Opcode VEX.0F 0xa0 - invalid */
4410/* Opcode VEX.0F 0xa1 - invalid */
4411/* Opcode VEX.0F 0xa2 - invalid */
4412/* Opcode VEX.0F 0xa3 - invalid */
4413/* Opcode VEX.0F 0xa4 - invalid */
4414/* Opcode VEX.0F 0xa5 - invalid */
4415/* Opcode VEX.0F 0xa6 - invalid */
4416/* Opcode VEX.0F 0xa7 - invalid */
4417/* Opcode VEX.0F 0xa8 - invalid */
4418/* Opcode VEX.0F 0xa9 - invalid */
4419/* Opcode VEX.0F 0xaa - invalid */
4420/* Opcode VEX.0F 0xab - invalid */
4421/* Opcode VEX.0F 0xac - invalid */
4422/* Opcode VEX.0F 0xad - invalid */
4423
4424
4425/* Opcode VEX.0F 0xae mem/0 - invalid. */
4426/* Opcode VEX.0F 0xae mem/1 - invalid. */
4427
4428/**
4429 * @ opmaps grp15
4430 * @ opcode !11/2
4431 * @ oppfx none
4432 * @ opcpuid sse
4433 * @ opgroup og_sse_mxcsrsm
4434 * @ opxcpttype 5
4435 * @ optest op1=0 -> mxcsr=0
4436 * @ optest op1=0x2083 -> mxcsr=0x2083
4437 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4438 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4439 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4440 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4441 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4442 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4443 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4444 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4445 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4446 */
4447FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4448//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4449//{
4450// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4451// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4452// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4453// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4454// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4455// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4456// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4457// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4458// IEM_MC_END();
4459// return VINF_SUCCESS;
4460//}
4461
4462
4463/**
4464 * @opmaps vexgrp15
4465 * @opcode !11/3
4466 * @oppfx none
4467 * @opcpuid avx
4468 * @opgroup og_avx_mxcsrsm
4469 * @opxcpttype 5
4470 * @optest mxcsr=0 -> op1=0
4471 * @optest mxcsr=0x2083 -> op1=0x2083
4472 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4473 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4474 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4475 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4476 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4477 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4478 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4479 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4480 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4481 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4482 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4483 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4484 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4485 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4486 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4487 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4488 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4489 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4490 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4491 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4492 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4493 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4494 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4495 * -> value.xcpt=0x6
4496 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4497 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4498 * APMv4 rev 3.17 page 509.
4499 * @todo Test this instruction on AMD Ryzen.
4500 */
4501FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4502{
4503 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4504 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4505 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4507 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4508 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4509 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4510 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4511 IEM_MC_END();
4512}
4513
4514/* Opcode VEX.0F 0xae mem/4 - invalid. */
4515/* Opcode VEX.0F 0xae mem/5 - invalid. */
4516/* Opcode VEX.0F 0xae mem/6 - invalid. */
4517/* Opcode VEX.0F 0xae mem/7 - invalid. */
4518
4519/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4520/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4521/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4522/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4523/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4524/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4525/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4526/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4527
4528/**
4529 * Vex group 15 jump table for memory variant.
4530 */
4531IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4532{ /* pfx: none, 066h, 0f3h, 0f2h */
4533 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4534 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4535 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4536 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4537 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4538 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4539 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4540 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4541};
4542AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4543
4544
4545/** Opcode vex. 0xae. */
4546FNIEMOP_DEF(iemOp_VGrp15)
4547{
4548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4549 if (IEM_IS_MODRM_REG_MODE(bRm))
4550 /* register, register */
4551 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4552
4553 /* memory, register */
4554 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4555 + pVCpu->iem.s.idxPrefix], bRm);
4556}
4557
4558
4559/* Opcode VEX.0F 0xaf - invalid. */
4560
4561/* Opcode VEX.0F 0xb0 - invalid. */
4562/* Opcode VEX.0F 0xb1 - invalid. */
4563/* Opcode VEX.0F 0xb2 - invalid. */
4564/* Opcode VEX.0F 0xb2 - invalid. */
4565/* Opcode VEX.0F 0xb3 - invalid. */
4566/* Opcode VEX.0F 0xb4 - invalid. */
4567/* Opcode VEX.0F 0xb5 - invalid. */
4568/* Opcode VEX.0F 0xb6 - invalid. */
4569/* Opcode VEX.0F 0xb7 - invalid. */
4570/* Opcode VEX.0F 0xb8 - invalid. */
4571/* Opcode VEX.0F 0xb9 - invalid. */
4572/* Opcode VEX.0F 0xba - invalid. */
4573/* Opcode VEX.0F 0xbb - invalid. */
4574/* Opcode VEX.0F 0xbc - invalid. */
4575/* Opcode VEX.0F 0xbd - invalid. */
4576/* Opcode VEX.0F 0xbe - invalid. */
4577/* Opcode VEX.0F 0xbf - invalid. */
4578
4579/* Opcode VEX.0F 0xc0 - invalid. */
4580/* Opcode VEX.66.0F 0xc0 - invalid. */
4581/* Opcode VEX.F3.0F 0xc0 - invalid. */
4582/* Opcode VEX.F2.0F 0xc0 - invalid. */
4583
4584/* Opcode VEX.0F 0xc1 - invalid. */
4585/* Opcode VEX.66.0F 0xc1 - invalid. */
4586/* Opcode VEX.F3.0F 0xc1 - invalid. */
4587/* Opcode VEX.F2.0F 0xc1 - invalid. */
4588
4589/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4590FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4591/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4592FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4593/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4594FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4595/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4596FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4597
4598/* Opcode VEX.0F 0xc3 - invalid */
4599/* Opcode VEX.66.0F 0xc3 - invalid */
4600/* Opcode VEX.F3.0F 0xc3 - invalid */
4601/* Opcode VEX.F2.0F 0xc3 - invalid */
4602
4603/* Opcode VEX.0F 0xc4 - invalid */
4604
4605
4606/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4607FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4608{
4609 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4611 if (IEM_IS_MODRM_REG_MODE(bRm))
4612 {
4613 /*
4614 * Register, register.
4615 */
4616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4618 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4619 IEM_MC_LOCAL(uint16_t, uValue);
4620
4621 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4623 IEM_MC_PREPARE_AVX_USAGE();
4624
4625 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4626 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
4627 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4628 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4629 IEM_MC_ADVANCE_RIP_AND_FINISH();
4630 IEM_MC_END();
4631 }
4632 else
4633 {
4634 /*
4635 * Register, memory.
4636 */
4637 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4639 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4640 IEM_MC_LOCAL(uint16_t, uValue);
4641
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4643 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4644 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4646 IEM_MC_PREPARE_AVX_USAGE();
4647
4648 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4649 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4650 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4651 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4652 IEM_MC_ADVANCE_RIP_AND_FINISH();
4653 IEM_MC_END();
4654 }
4655}
4656
4657
4658/* Opcode VEX.F3.0F 0xc4 - invalid */
4659/* Opcode VEX.F2.0F 0xc4 - invalid */
4660
4661/* Opcode VEX.0F 0xc5 - invalid */
4662
4663
4664/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4665FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4666{
4667 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4669 if (IEM_IS_MODRM_REG_MODE(bRm))
4670 {
4671 /*
4672 * greg32, XMM, imm8.
4673 */
4674 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4675 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4676 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4677 IEM_MC_LOCAL(uint16_t, uValue);
4678 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4679 IEM_MC_PREPARE_AVX_USAGE();
4680 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
4681 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
4682 IEM_MC_ADVANCE_RIP_AND_FINISH();
4683 IEM_MC_END();
4684 }
4685 /* No memory operand. */
4686 else
4687 IEMOP_RAISE_INVALID_OPCODE_RET();
4688}
4689
4690
4691/* Opcode VEX.F3.0F 0xc5 - invalid */
4692/* Opcode VEX.F2.0F 0xc5 - invalid */
4693
4694
4695#define VSHUFP_X(a_Instr) \
4696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4697 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4698 { \
4699 /* \
4700 * Register, register. \
4701 */ \
4702 if (pVCpu->iem.s.uVexLength) \
4703 { \
4704 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4705 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4706 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4707 IEM_MC_LOCAL(RTUINT256U, uDst); \
4708 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4709 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4710 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4711 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4712 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4713 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4714 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4715 IEM_MC_PREPARE_AVX_USAGE(); \
4716 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4717 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4718 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4719 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4720 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4722 IEM_MC_END(); \
4723 } \
4724 else \
4725 { \
4726 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4727 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4728 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4729 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4730 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4731 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4732 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4733 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4734 IEM_MC_PREPARE_AVX_USAGE(); \
4735 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4736 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4737 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4738 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4739 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4740 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4742 IEM_MC_END(); \
4743 } \
4744 } \
4745 else \
4746 { \
4747 /* \
4748 * Register, memory. \
4749 */ \
4750 if (pVCpu->iem.s.uVexLength) \
4751 { \
4752 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4753 IEM_MC_LOCAL(RTUINT256U, uDst); \
4754 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4755 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4757 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4758 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4759 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4762 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4763 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4764 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4765 IEM_MC_PREPARE_AVX_USAGE(); \
4766 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4767 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4768 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4769 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4770 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4772 IEM_MC_END(); \
4773 } \
4774 else \
4775 { \
4776 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4777 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4779 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4780 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4781 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4783 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4784 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4785 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4786 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4787 IEM_MC_PREPARE_AVX_USAGE(); \
4788 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4789 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4790 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4791 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4792 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4793 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4795 IEM_MC_END(); \
4796 } \
4797 } \
4798 (void)0
4799
4800/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4801FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4802{
4803 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4804 VSHUFP_X(vshufps);
4805}
4806
4807
4808/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4809FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4810{
4811 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4812 VSHUFP_X(vshufpd);
4813}
4814#undef VSHUFP_X
4815
4816
4817/* Opcode VEX.F3.0F 0xc6 - invalid */
4818/* Opcode VEX.F2.0F 0xc6 - invalid */
4819
4820/* Opcode VEX.0F 0xc7 - invalid */
4821/* Opcode VEX.66.0F 0xc7 - invalid */
4822/* Opcode VEX.F3.0F 0xc7 - invalid */
4823/* Opcode VEX.F2.0F 0xc7 - invalid */
4824
4825/* Opcode VEX.0F 0xc8 - invalid */
4826/* Opcode VEX.0F 0xc9 - invalid */
4827/* Opcode VEX.0F 0xca - invalid */
4828/* Opcode VEX.0F 0xcb - invalid */
4829/* Opcode VEX.0F 0xcc - invalid */
4830/* Opcode VEX.0F 0xcd - invalid */
4831/* Opcode VEX.0F 0xce - invalid */
4832/* Opcode VEX.0F 0xcf - invalid */
4833
4834
4835/* Opcode VEX.0F 0xd0 - invalid */
4836/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4837FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4838/* Opcode VEX.F3.0F 0xd0 - invalid */
4839/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4840FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4841
4842/* Opcode VEX.0F 0xd1 - invalid */
4843/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4844FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
4845{
4846 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4847 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
4848 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4849}
4850
4851/* Opcode VEX.F3.0F 0xd1 - invalid */
4852/* Opcode VEX.F2.0F 0xd1 - invalid */
4853
4854/* Opcode VEX.0F 0xd2 - invalid */
4855/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4856FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
4857{
4858 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4859 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
4860 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4861}
4862
4863/* Opcode VEX.F3.0F 0xd2 - invalid */
4864/* Opcode VEX.F2.0F 0xd2 - invalid */
4865
4866/* Opcode VEX.0F 0xd3 - invalid */
4867/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4868FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
4869{
4870 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4871 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
4872 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4873}
4874
4875/* Opcode VEX.F3.0F 0xd3 - invalid */
4876/* Opcode VEX.F2.0F 0xd3 - invalid */
4877
4878/* Opcode VEX.0F 0xd4 - invalid */
4879
4880
4881/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4882FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4883{
4884 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4885 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
4886 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4887}
4888
4889
4890/* Opcode VEX.F3.0F 0xd4 - invalid */
4891/* Opcode VEX.F2.0F 0xd4 - invalid */
4892
4893/* Opcode VEX.0F 0xd5 - invalid */
4894
4895
4896/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4897FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4898{
4899 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4900 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4901 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4902}
4903
4904
4905/* Opcode VEX.F3.0F 0xd5 - invalid */
4906/* Opcode VEX.F2.0F 0xd5 - invalid */
4907
4908/* Opcode VEX.0F 0xd6 - invalid */
4909
4910/**
4911 * @opcode 0xd6
4912 * @oppfx 0x66
4913 * @opcpuid avx
4914 * @opgroup og_avx_pcksclr_datamove
4915 * @opxcpttype none
4916 * @optest op1=-1 op2=2 -> op1=2
4917 * @optest op1=0 op2=-42 -> op1=-42
4918 */
4919FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4920{
4921 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4923 if (IEM_IS_MODRM_REG_MODE(bRm))
4924 {
4925 /*
4926 * Register, register.
4927 */
4928 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4929 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4930
4931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4932 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4933
4934 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4935 IEM_GET_MODRM_REG(pVCpu, bRm));
4936 IEM_MC_ADVANCE_RIP_AND_FINISH();
4937 IEM_MC_END();
4938 }
4939 else
4940 {
4941 /*
4942 * Memory, register.
4943 */
4944 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4945 IEM_MC_LOCAL(uint64_t, uSrc);
4946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4947
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4949 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4950 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4951 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4952
4953 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4954 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4955
4956 IEM_MC_ADVANCE_RIP_AND_FINISH();
4957 IEM_MC_END();
4958 }
4959}
4960
4961/* Opcode VEX.F3.0F 0xd6 - invalid */
4962/* Opcode VEX.F2.0F 0xd6 - invalid */
4963
4964
4965/* Opcode VEX.0F 0xd7 - invalid */
4966
4967/** Opcode VEX.66.0F 0xd7 - */
4968FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4969{
4970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4971 /* Docs says register only. */
4972 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4973 {
4974 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4975 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4976 if (pVCpu->iem.s.uVexLength)
4977 {
4978 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4979 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4980 IEM_MC_ARG(uint64_t *, puDst, 0);
4981 IEM_MC_LOCAL(RTUINT256U, uSrc);
4982 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4983 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4984 IEM_MC_PREPARE_AVX_USAGE();
4985 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4986 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4987 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4988 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4989 IEM_MC_ADVANCE_RIP_AND_FINISH();
4990 IEM_MC_END();
4991 }
4992 else
4993 {
4994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4995 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4996 IEM_MC_ARG(uint64_t *, puDst, 0);
4997 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4999 IEM_MC_PREPARE_AVX_USAGE();
5000 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5001 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5002 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5003 IEM_MC_ADVANCE_RIP_AND_FINISH();
5004 IEM_MC_END();
5005 }
5006 }
5007 else
5008 IEMOP_RAISE_INVALID_OPCODE_RET();
5009}
5010
5011
5012/* Opcode VEX.F3.0F 0xd7 - invalid */
5013/* Opcode VEX.F2.0F 0xd7 - invalid */
5014
5015
5016/* Opcode VEX.0F 0xd8 - invalid */
5017
5018/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5019FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5020{
5021 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5022 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5023 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5024}
5025
5026
5027/* Opcode VEX.F3.0F 0xd8 - invalid */
5028/* Opcode VEX.F2.0F 0xd8 - invalid */
5029
5030/* Opcode VEX.0F 0xd9 - invalid */
5031
5032
5033/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5034FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5035{
5036 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5037 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5039}
5040
5041
5042/* Opcode VEX.F3.0F 0xd9 - invalid */
5043/* Opcode VEX.F2.0F 0xd9 - invalid */
5044
5045/* Opcode VEX.0F 0xda - invalid */
5046
5047
5048/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5049FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5050{
5051 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5052 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5053 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5054}
5055
5056
5057/* Opcode VEX.F3.0F 0xda - invalid */
5058/* Opcode VEX.F2.0F 0xda - invalid */
5059
5060/* Opcode VEX.0F 0xdb - invalid */
5061
5062
5063/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5064FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5065{
5066 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5067 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5068 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5069}
5070
5071
5072/* Opcode VEX.F3.0F 0xdb - invalid */
5073/* Opcode VEX.F2.0F 0xdb - invalid */
5074
5075/* Opcode VEX.0F 0xdc - invalid */
5076
5077
5078/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5079FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5080{
5081 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5082 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5083 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5084}
5085
5086
5087/* Opcode VEX.F3.0F 0xdc - invalid */
5088/* Opcode VEX.F2.0F 0xdc - invalid */
5089
5090/* Opcode VEX.0F 0xdd - invalid */
5091
5092
5093/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5094FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5095{
5096 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5097 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5098 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5099}
5100
5101
5102/* Opcode VEX.F3.0F 0xdd - invalid */
5103/* Opcode VEX.F2.0F 0xdd - invalid */
5104
5105/* Opcode VEX.0F 0xde - invalid */
5106
5107
5108/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5109FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5110{
5111 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5112 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5113 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5114}
5115
5116
5117/* Opcode VEX.F3.0F 0xde - invalid */
5118/* Opcode VEX.F2.0F 0xde - invalid */
5119
5120/* Opcode VEX.0F 0xdf - invalid */
5121
5122
5123/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5124FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5125{
5126 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5127 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5128 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5129}
5130
5131
5132/* Opcode VEX.F3.0F 0xdf - invalid */
5133/* Opcode VEX.F2.0F 0xdf - invalid */
5134
5135/* Opcode VEX.0F 0xe0 - invalid */
5136
5137
5138/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5139FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5140{
5141 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5142 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5143 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5144}
5145
5146
5147/* Opcode VEX.F3.0F 0xe0 - invalid */
5148/* Opcode VEX.F2.0F 0xe0 - invalid */
5149
5150/* Opcode VEX.0F 0xe1 - invalid */
5151/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5152FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5153{
5154 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5155 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5156 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5157}
5158
5159/* Opcode VEX.F3.0F 0xe1 - invalid */
5160/* Opcode VEX.F2.0F 0xe1 - invalid */
5161
5162/* Opcode VEX.0F 0xe2 - invalid */
5163/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5164FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5165{
5166 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5167 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5168 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5169}
5170
5171/* Opcode VEX.F3.0F 0xe2 - invalid */
5172/* Opcode VEX.F2.0F 0xe2 - invalid */
5173
5174/* Opcode VEX.0F 0xe3 - invalid */
5175
5176
5177/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5178FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5179{
5180 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5181 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5182 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5183}
5184
5185
5186/* Opcode VEX.F3.0F 0xe3 - invalid */
5187/* Opcode VEX.F2.0F 0xe3 - invalid */
5188
5189/* Opcode VEX.0F 0xe4 - invalid */
5190
5191
5192/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5193FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5194{
5195 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5196 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5197 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5198}
5199
5200
5201/* Opcode VEX.F3.0F 0xe4 - invalid */
5202/* Opcode VEX.F2.0F 0xe4 - invalid */
5203
5204/* Opcode VEX.0F 0xe5 - invalid */
5205
5206
5207/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5208FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5209{
5210 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5211 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5212 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5213}
5214
5215
5216/* Opcode VEX.F3.0F 0xe5 - invalid */
5217/* Opcode VEX.F2.0F 0xe5 - invalid */
5218
5219/* Opcode VEX.0F 0xe6 - invalid */
5220/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5221FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5222/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5223FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5224/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5225FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5226
5227
5228/* Opcode VEX.0F 0xe7 - invalid */
5229
5230/**
5231 * @opcode 0xe7
5232 * @opcodesub !11 mr/reg
5233 * @oppfx 0x66
5234 * @opcpuid avx
5235 * @opgroup og_avx_cachect
5236 * @opxcpttype 1
5237 * @optest op1=-1 op2=2 -> op1=2
5238 * @optest op1=0 op2=-42 -> op1=-42
5239 */
5240FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5241{
5242 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5243 Assert(pVCpu->iem.s.uVexLength <= 1);
5244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5245 if (IEM_IS_MODRM_MEM_MODE(bRm))
5246 {
5247 if (pVCpu->iem.s.uVexLength == 0)
5248 {
5249 /*
5250 * 128-bit: Memory, register.
5251 */
5252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5253 IEM_MC_LOCAL(RTUINT128U, uSrc);
5254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5255
5256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5257 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5260
5261 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5262 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5263
5264 IEM_MC_ADVANCE_RIP_AND_FINISH();
5265 IEM_MC_END();
5266 }
5267 else
5268 {
5269 /*
5270 * 256-bit: Memory, register.
5271 */
5272 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5273 IEM_MC_LOCAL(RTUINT256U, uSrc);
5274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5275
5276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5277 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5278 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5279 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5280
5281 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5282 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5283
5284 IEM_MC_ADVANCE_RIP_AND_FINISH();
5285 IEM_MC_END();
5286 }
5287 }
5288 /**
5289 * @opdone
5290 * @opmnemonic udvex660fe7reg
5291 * @opcode 0xe7
5292 * @opcodesub 11 mr/reg
5293 * @oppfx 0x66
5294 * @opunused immediate
5295 * @opcpuid avx
5296 * @optest ->
5297 */
5298 else
5299 IEMOP_RAISE_INVALID_OPCODE_RET();
5300}
5301
5302/* Opcode VEX.F3.0F 0xe7 - invalid */
5303/* Opcode VEX.F2.0F 0xe7 - invalid */
5304
5305
5306/* Opcode VEX.0F 0xe8 - invalid */
5307
5308
5309/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5310FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5311{
5312 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5313 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5314 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5315}
5316
5317
5318/* Opcode VEX.F3.0F 0xe8 - invalid */
5319/* Opcode VEX.F2.0F 0xe8 - invalid */
5320
5321/* Opcode VEX.0F 0xe9 - invalid */
5322
5323
5324/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5325FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5326{
5327 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5328 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5329 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5330}
5331
5332
5333/* Opcode VEX.F3.0F 0xe9 - invalid */
5334/* Opcode VEX.F2.0F 0xe9 - invalid */
5335
5336/* Opcode VEX.0F 0xea - invalid */
5337
5338
5339/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5340FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5341{
5342 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5343 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
5344 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5345}
5346
5347
5348/* Opcode VEX.F3.0F 0xea - invalid */
5349/* Opcode VEX.F2.0F 0xea - invalid */
5350
5351/* Opcode VEX.0F 0xeb - invalid */
5352
5353
5354/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5355FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5356{
5357 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5358 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5359 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5360}
5361
5362
5363
5364/* Opcode VEX.F3.0F 0xeb - invalid */
5365/* Opcode VEX.F2.0F 0xeb - invalid */
5366
5367/* Opcode VEX.0F 0xec - invalid */
5368
5369
5370/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5371FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5372{
5373 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5374 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5375 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5376}
5377
5378
5379/* Opcode VEX.F3.0F 0xec - invalid */
5380/* Opcode VEX.F2.0F 0xec - invalid */
5381
5382/* Opcode VEX.0F 0xed - invalid */
5383
5384
5385/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5386FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5387{
5388 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5389 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5390 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5391}
5392
5393
5394/* Opcode VEX.F3.0F 0xed - invalid */
5395/* Opcode VEX.F2.0F 0xed - invalid */
5396
5397/* Opcode VEX.0F 0xee - invalid */
5398
5399
5400/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5401FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5402{
5403 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5404 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
5405 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5406}
5407
5408
5409/* Opcode VEX.F3.0F 0xee - invalid */
5410/* Opcode VEX.F2.0F 0xee - invalid */
5411
5412
5413/* Opcode VEX.0F 0xef - invalid */
5414
5415
5416/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5417FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5418{
5419 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5420 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5421 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5422}
5423
5424
5425/* Opcode VEX.F3.0F 0xef - invalid */
5426/* Opcode VEX.F2.0F 0xef - invalid */
5427
5428/* Opcode VEX.0F 0xf0 - invalid */
5429/* Opcode VEX.66.0F 0xf0 - invalid */
5430
5431
5432/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5433FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5434{
5435 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5436 Assert(pVCpu->iem.s.uVexLength <= 1);
5437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5438 if (IEM_IS_MODRM_REG_MODE(bRm))
5439 {
5440 /*
5441 * Register, register - (not implemented, assuming it raises \#UD).
5442 */
5443 IEMOP_RAISE_INVALID_OPCODE_RET();
5444 }
5445 else if (pVCpu->iem.s.uVexLength == 0)
5446 {
5447 /*
5448 * Register, memory128.
5449 */
5450 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5451 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5453
5454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5455 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5456 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5457 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5458
5459 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5460 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5461
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 }
5465 else
5466 {
5467 /*
5468 * Register, memory256.
5469 */
5470 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5471 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5473
5474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5475 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5477 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5478
5479 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5480 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5481
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 }
5485}
5486
5487
5488/* Opcode VEX.0F 0xf1 - invalid */
5489/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5490FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5491{
5492 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5493 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5494 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5495}
5496
5497/* Opcode VEX.F2.0F 0xf1 - invalid */
5498
5499/* Opcode VEX.0F 0xf2 - invalid */
5500/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5501FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5502{
5503 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5504 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5505 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5506}
5507/* Opcode VEX.F2.0F 0xf2 - invalid */
5508
5509/* Opcode VEX.0F 0xf3 - invalid */
5510/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5511FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5512{
5513 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5514 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5515 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5516}
5517/* Opcode VEX.F2.0F 0xf3 - invalid */
5518
5519/* Opcode VEX.0F 0xf4 - invalid */
5520
5521
5522/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5523FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5524{
5525 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5526 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5528}
5529
5530
5531/* Opcode VEX.F2.0F 0xf4 - invalid */
5532
5533/* Opcode VEX.0F 0xf5 - invalid */
5534
5535
5536/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5537FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5538{
5539 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5540 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5541 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5542}
5543
5544
5545/* Opcode VEX.F2.0F 0xf5 - invalid */
5546
5547/* Opcode VEX.0F 0xf6 - invalid */
5548
5549
5550/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5551FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5552{
5553 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5554 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5555 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5556}
5557
5558
5559/* Opcode VEX.F2.0F 0xf6 - invalid */
5560
5561/* Opcode VEX.0F 0xf7 - invalid */
5562
5563
5564/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5565FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
5566{
5567// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
5568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5569 if (IEM_IS_MODRM_REG_MODE(bRm))
5570 {
5571 /*
5572 * XMM, XMM, (implicit) [ ER]DI
5573 */
5574 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5575 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5576 IEM_MC_LOCAL( uint64_t, u64EffAddr);
5577 IEM_MC_LOCAL( RTUINT128U, u128Mem);
5578 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
5579 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
5580 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
5581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5582 IEM_MC_PREPARE_AVX_USAGE();
5583
5584 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
5585 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
5586 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5587 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
5588 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
5589 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
5590
5591 IEM_MC_ADVANCE_RIP_AND_FINISH();
5592 IEM_MC_END();
5593 }
5594 else
5595 {
5596 /* The memory, register encoding is invalid. */
5597 IEMOP_RAISE_INVALID_OPCODE_RET();
5598 }
5599}
5600
5601
5602/* Opcode VEX.F2.0F 0xf7 - invalid */
5603
5604/* Opcode VEX.0F 0xf8 - invalid */
5605
5606
5607/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5608FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5609{
5610 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5611 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
5612 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5613}
5614
5615
5616/* Opcode VEX.F2.0F 0xf8 - invalid */
5617
5618/* Opcode VEX.0F 0xf9 - invalid */
5619
5620
5621/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5622FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5623{
5624 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5625 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
5626 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5627}
5628
5629
5630/* Opcode VEX.F2.0F 0xf9 - invalid */
5631
5632/* Opcode VEX.0F 0xfa - invalid */
5633
5634
5635/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5636FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5637{
5638 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5639 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
5640 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5641}
5642
5643
5644/* Opcode VEX.F2.0F 0xfa - invalid */
5645
5646/* Opcode VEX.0F 0xfb - invalid */
5647
5648
5649/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5650FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5651{
5652 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5653 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
5654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5655}
5656
5657
5658/* Opcode VEX.F2.0F 0xfb - invalid */
5659
5660/* Opcode VEX.0F 0xfc - invalid */
5661
5662
5663/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5664FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5665{
5666 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5667 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
5668 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5669}
5670
5671
5672/* Opcode VEX.F2.0F 0xfc - invalid */
5673
5674/* Opcode VEX.0F 0xfd - invalid */
5675
5676
5677/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5678FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5679{
5680 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5681 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
5682 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5683}
5684
5685
5686/* Opcode VEX.F2.0F 0xfd - invalid */
5687
5688/* Opcode VEX.0F 0xfe - invalid */
5689
5690
5691/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5692FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5693{
5694 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5695 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
5696 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5697}
5698
5699
5700/* Opcode VEX.F2.0F 0xfe - invalid */
5701
5702
5703/** Opcode **** 0x0f 0xff - UD0 */
5704FNIEMOP_DEF(iemOp_vud0)
5705{
5706/** @todo testcase: vud0 */
5707 IEMOP_MNEMONIC(vud0, "vud0");
5708 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5709 {
5710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5711 if (IEM_IS_MODRM_MEM_MODE(bRm))
5712 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5713 }
5714 IEMOP_HLP_DONE_DECODING();
5715 IEMOP_RAISE_INVALID_OPCODE_RET();
5716}
5717
5718
5719
5720/**
5721 * VEX opcode map \#1.
5722 *
5723 * @sa g_apfnTwoByteMap
5724 */
5725const PFNIEMOP g_apfnVexMap1[] =
5726{
5727 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5728 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5729 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5730 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5731 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5732 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5733 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5734 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5735 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5736 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5737 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5738 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5739 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5740 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5741 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5742 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5743 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5744
5745 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5746 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5747 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5748 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5749 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5750 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5751 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5752 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5753 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5754 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5755 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5756 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5757 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5758 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5759 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5760 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5761
5762 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5763 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5764 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5765 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5766 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5767 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5768 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5769 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5770 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5771 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5772 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5773 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5774 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5775 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5776 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5777 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5778
5779 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5780 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5781 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5782 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5783 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5784 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5785 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5786 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5787 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5788 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5789 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5790 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5791 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5792 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5793 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5794 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5795
5796 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5797 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5798 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5799 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5800 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5801 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5802 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5803 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5804 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5805 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5806 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5807 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5808 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5809 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5810 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5811 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5812
5813 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5814 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5815 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5816 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5817 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5818 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5819 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5820 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5821 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5822 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5823 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5824 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5825 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5826 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5827 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5828 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5829
5830 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5831 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5832 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5833 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5834 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5835 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5836 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5837 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5838 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5839 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5840 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5841 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5842 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5843 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5844 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5845 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5846
5847 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5848 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5849 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5850 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5851 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5852 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5853 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5854 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5855 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5856 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5857 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5858 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5859 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5860 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5861 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5862 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5863
5864 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5865 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5866 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5867 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5868 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5869 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5870 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5871 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5872 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5873 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5874 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5875 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5876 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5877 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5878 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5879 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5880
5881 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5882 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5883 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5884 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5885 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5886 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5887 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5888 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5889 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5890 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5891 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5892 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5893 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5894 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5895 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5896 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5897
5898 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5899 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5900 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5901 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5902 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5903 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5904 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5905 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5906 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5907 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5908 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5909 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5910 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5911 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5912 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5913 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5914
5915 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5916 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5917 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5918 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5919 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5920 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5921 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5922 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5923 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5924 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5925 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5926 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5927 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5928 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5929 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5930 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5931
5932 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5933 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5934 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5935 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5936 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5937 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5938 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5939 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5940 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5941 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5942 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5943 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5944 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5945 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5946 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5947 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5948
5949 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5950 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5951 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5952 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5953 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5954 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5955 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5956 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5957 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5958 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5959 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5960 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5961 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5962 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5963 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5964 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5965
5966 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5967 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5968 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5969 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5970 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5971 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5972 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5973 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5974 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5975 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5976 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5977 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5978 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5979 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5980 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5981 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5982
5983 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5984 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5985 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5986 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5987 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5988 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5989 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5990 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5991 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5992 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5993 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5994 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5995 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5996 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5997 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5998 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5999};
6000AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6001/** @} */
6002
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use