VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h@ 96407

Last change on this file since 96407 was 96407, checked in by vboxsync, 3 years ago

scm copyright and license note update

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 88.9 KB
Line 
1/* $Id: IEMAllInstructionsVexMap2.cpp.h 96407 2022-08-22 17:43:14Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/* Opcode VEX.0F38 0x00 - invalid. */
37
38
39/** Opcode VEX.66.0F38 0x00. */
40FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
41{
42 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
43 IEMOPMEDIAF3_INIT_VARS(vpshufb);
44 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
45}
46
47
48/* Opcode VEX.0F38 0x01 - invalid. */
49
50
51/** Opcode VEX.66.0F38 0x01. */
52FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
53{
54 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
55 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
56 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
57}
58
59
60/* Opcode VEX.0F38 0x02 - invalid. */
61
62
63/** Opcode VEX.66.0F38 0x02. */
64FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
65{
66 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
67 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
68 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
69}
70
71
72/* Opcode VEX.0F38 0x03 - invalid. */
73
74
75/** Opcode VEX.66.0F38 0x03. */
76FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
77{
78 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
79 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
80 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
81}
82
83
84/* Opcode VEX.0F38 0x04 - invalid. */
85
86
87/** Opcode VEX.66.0F38 0x04. */
88FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
89{
90 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
91 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
92 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
93}
94
95
96/* Opcode VEX.0F38 0x05 - invalid. */
97
98
99/** Opcode VEX.66.0F38 0x05. */
100FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
101{
102 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
103 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
105}
106
107
108/* Opcode VEX.0F38 0x06 - invalid. */
109
110
111/** Opcode VEX.66.0F38 0x06. */
112FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
113{
114 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
115 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
117}
118
119
120/* Opcode VEX.0F38 0x07 - invalid. */
121
122
123/** Opcode VEX.66.0F38 0x07. */
124FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
125{
126 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
127 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
129}
130
131
132/* Opcode VEX.0F38 0x08 - invalid. */
133
134
135/** Opcode VEX.66.0F38 0x08. */
136FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
137{
138 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
139 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
141}
142
143
144/* Opcode VEX.0F38 0x09 - invalid. */
145
146
147/** Opcode VEX.66.0F38 0x09. */
148FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
149{
150 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
151 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
153}
154
155
156/* Opcode VEX.0F38 0x0a - invalid. */
157
158
159/** Opcode VEX.66.0F38 0x0a. */
160FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
161{
162 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
163 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
165}
166
167
168/* Opcode VEX.0F38 0x0b - invalid. */
169
170
171/** Opcode VEX.66.0F38 0x0b. */
172FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
173{
174 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
175 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
177}
178
179
180/* Opcode VEX.0F38 0x0c - invalid. */
181/** Opcode VEX.66.0F38 0x0c. */
182FNIEMOP_STUB(iemOp_vpermilps_Vx_Hx_Wx);
183/* Opcode VEX.0F38 0x0d - invalid. */
184/** Opcode VEX.66.0F38 0x0d. */
185FNIEMOP_STUB(iemOp_vpermilpd_Vx_Hx_Wx);
186/* Opcode VEX.0F38 0x0e - invalid. */
187/** Opcode VEX.66.0F38 0x0e. */
188FNIEMOP_STUB(iemOp_vtestps_Vx_Wx);
189/* Opcode VEX.0F38 0x0f - invalid. */
190/** Opcode VEX.66.0F38 0x0f. */
191FNIEMOP_STUB(iemOp_vtestpd_Vx_Wx);
192
193
194/* Opcode VEX.0F38 0x10 - invalid */
195/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
196/* Opcode VEX.0F38 0x11 - invalid */
197/* Opcode VEX.66.0F38 0x11 - invalid */
198/* Opcode VEX.0F38 0x12 - invalid */
199/* Opcode VEX.66.0F38 0x12 - invalid */
200/* Opcode VEX.0F38 0x13 - invalid */
201/* Opcode VEX.66.0F38 0x13 - invalid (vex only). */
202/* Opcode VEX.0F38 0x14 - invalid */
203/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
204/* Opcode VEX.0F38 0x15 - invalid */
205/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
206/* Opcode VEX.0F38 0x16 - invalid */
207/** Opcode VEX.66.0F38 0x16. */
208FNIEMOP_STUB(iemOp_vpermps_Vqq_Hqq_Wqq);
209/* Opcode VEX.0F38 0x17 - invalid */
210
211
212/** Opcode VEX.66.0F38 0x17 - invalid */
213FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
214{
215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
216 if (IEM_IS_MODRM_REG_MODE(bRm))
217 {
218 /*
219 * Register, register.
220 */
221 if (pVCpu->iem.s.uVexLength)
222 {
223 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
224 IEM_MC_BEGIN(3, 2);
225 IEM_MC_LOCAL(RTUINT256U, uSrc1);
226 IEM_MC_LOCAL(RTUINT256U, uSrc2);
227 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
228 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
231 IEM_MC_PREPARE_AVX_USAGE();
232 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
233 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
234 IEM_MC_REF_EFLAGS(pEFlags);
235 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
236 puSrc1, puSrc2, pEFlags);
237 IEM_MC_ADVANCE_RIP();
238 IEM_MC_END();
239 }
240 else
241 {
242 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
243 IEM_MC_BEGIN(3, 0);
244 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
245 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
250 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
251 IEM_MC_REF_EFLAGS(pEFlags);
252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 }
257 else
258 {
259 /*
260 * Register, memory.
261 */
262 if (pVCpu->iem.s.uVexLength)
263 {
264 IEM_MC_BEGIN(3, 3);
265 IEM_MC_LOCAL(RTUINT256U, uSrc1);
266 IEM_MC_LOCAL(RTUINT256U, uSrc2);
267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
268 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
269 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271
272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
273 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
274 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
275 IEM_MC_PREPARE_AVX_USAGE();
276
277 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
278 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
279 IEM_MC_REF_EFLAGS(pEFlags);
280 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
281 puSrc1, puSrc2, pEFlags);
282
283 IEM_MC_ADVANCE_RIP();
284 IEM_MC_END();
285 }
286 else
287 {
288 IEM_MC_BEGIN(3, 2);
289 IEM_MC_LOCAL(RTUINT128U, uSrc2);
290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
291 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
292 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
294
295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
296 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
298 IEM_MC_PREPARE_AVX_USAGE();
299
300 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
301 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
302 IEM_MC_REF_EFLAGS(pEFlags);
303 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
304
305 IEM_MC_ADVANCE_RIP();
306 IEM_MC_END();
307 }
308 }
309 return VINF_SUCCESS;
310
311}
312
313
314/* Opcode VEX.0F38 0x18 - invalid */
315/** Opcode VEX.66.0F38 0x18. */
316FNIEMOP_STUB(iemOp_vbroadcastss_Vx_Wd);
317/* Opcode VEX.0F38 0x19 - invalid */
318/** Opcode VEX.66.0F38 0x19. */
319FNIEMOP_STUB(iemOp_vbroadcastsd_Vqq_Wq);
320/* Opcode VEX.0F38 0x1a - invalid */
321/** Opcode VEX.66.0F38 0x1a. */
322FNIEMOP_STUB(iemOp_vbroadcastf128_Vqq_Mdq);
323/* Opcode VEX.0F38 0x1b - invalid */
324/* Opcode VEX.66.0F38 0x1b - invalid */
325/* Opcode VEX.0F38 0x1c - invalid. */
326
327
328/** Opcode VEX.66.0F38 0x1c. */
329FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
330{
331 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
332 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
333 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
334}
335
336
337/* Opcode VEX.0F38 0x1d - invalid. */
338
339
340/** Opcode VEX.66.0F38 0x1d. */
341FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
342{
343 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
344 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
345 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
346}
347
348/* Opcode VEX.0F38 0x1e - invalid. */
349
350
351/** Opcode VEX.66.0F38 0x1e. */
352FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
353{
354 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
355 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
356 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
357}
358
359
360/* Opcode VEX.0F38 0x1f - invalid */
361/* Opcode VEX.66.0F38 0x1f - invalid */
362
363
364/** Body for the vpmov{s,z}x* instructions. */
365#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth) \
366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
367 if (IEM_IS_MODRM_REG_MODE(bRm)) \
368 { \
369 /* \
370 * Register, register. \
371 */ \
372 if (pVCpu->iem.s.uVexLength) \
373 { \
374 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
375 IEM_MC_BEGIN(2, 1); \
376 IEM_MC_LOCAL(RTUINT256U, uDst); \
377 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
378 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
379 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
380 IEM_MC_PREPARE_AVX_USAGE(); \
381 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
382 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
383 iemAImpl_ ## a_Instr ## _u256_fallback), \
384 puDst, puSrc); \
385 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
386 IEM_MC_ADVANCE_RIP(); \
387 IEM_MC_END(); \
388 } \
389 else \
390 { \
391 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
392 IEM_MC_BEGIN(2, 0); \
393 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
394 IEM_MC_ARG(uint64_t, uSrc, 1); \
395 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
396 IEM_MC_PREPARE_AVX_USAGE(); \
397 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
398 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
399 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
400 iemAImpl_## a_Instr ## _u128_fallback), \
401 puDst, uSrc); \
402 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
403 IEM_MC_ADVANCE_RIP(); \
404 IEM_MC_END(); \
405 } \
406 } \
407 else \
408 { \
409 /* \
410 * Register, memory. \
411 */ \
412 if (pVCpu->iem.s.uVexLength) \
413 { \
414 IEM_MC_BEGIN(2, 3); \
415 IEM_MC_LOCAL(RTUINT256U, uDst); \
416 IEM_MC_LOCAL(RTUINT128U, uSrc); \
417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
418 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
419 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
421 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
422 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
423 IEM_MC_PREPARE_AVX_USAGE(); \
424 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
425 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
426 iemAImpl_ ## a_Instr ## _u256_fallback), \
427 puDst, puSrc); \
428 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
429 IEM_MC_ADVANCE_RIP(); \
430 IEM_MC_END(); \
431 } \
432 else \
433 { \
434 IEM_MC_BEGIN(2, 1); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
436 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
437 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
439 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
440 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
441 IEM_MC_PREPARE_AVX_USAGE(); \
442 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
443 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
444 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
445 iemAImpl_ ## a_Instr ## _u128_fallback), \
446 puDst, uSrc); \
447 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
448 IEM_MC_ADVANCE_RIP(); \
449 IEM_MC_END(); \
450 } \
451 } \
452 return VINF_SUCCESS \
453
454/** Opcode VEX.66.0F38 0x20. */
455FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
456{
457 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
458 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
459 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64);
460}
461
462
463/** Opcode VEX.66.0F38 0x21. */
464FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
465{
466 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
467 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
468 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32);
469}
470
471
472/** Opcode VEX.66.0F38 0x22. */
473FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
474{
475 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
476 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
477 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16);
478}
479
480
481/** Opcode VEX.66.0F38 0x23. */
482FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
483{
484 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
485 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
486 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64);
487}
488
489
490/** Opcode VEX.66.0F38 0x24. */
491FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
492{
493 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
494 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
495 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32);
496}
497
498
499/** Opcode VEX.66.0F38 0x25. */
500FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
501{
502 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
503 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
504 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64);
505}
506
507
508/* Opcode VEX.66.0F38 0x26 - invalid */
509/* Opcode VEX.66.0F38 0x27 - invalid */
510
511
512/** Opcode VEX.66.0F38 0x28. */
513FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
514{
515 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
516 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
517 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
518}
519
520
521/** Opcode VEX.66.0F38 0x29. */
522FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
523{
524 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
525 IEMOPMEDIAF3_INIT_VARS(vpcmpeqq);
526 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
527}
528
529
530FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
531{
532 Assert(pVCpu->iem.s.uVexLength <= 1);
533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
534 if (IEM_IS_MODRM_MEM_MODE(bRm))
535 {
536 if (pVCpu->iem.s.uVexLength == 0)
537 {
538 /**
539 * @opcode 0x2a
540 * @opcodesub !11 mr/reg vex.l=0
541 * @oppfx 0x66
542 * @opcpuid avx
543 * @opgroup og_avx_cachect
544 * @opxcpttype 1
545 * @optest op1=-1 op2=2 -> op1=2
546 * @optest op1=0 op2=-42 -> op1=-42
547 */
548 /* 128-bit: Memory, register. */
549 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
550 DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
551 IEM_MC_BEGIN(0, 2);
552 IEM_MC_LOCAL(RTUINT128U, uSrc);
553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
554
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
556 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
558 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
559
560 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
561 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
562
563 IEM_MC_ADVANCE_RIP();
564 IEM_MC_END();
565 }
566 else
567 {
568 /**
569 * @opdone
570 * @opcode 0x2a
571 * @opcodesub !11 mr/reg vex.l=1
572 * @oppfx 0x66
573 * @opcpuid avx2
574 * @opgroup og_avx2_cachect
575 * @opxcpttype 1
576 * @optest op1=-1 op2=2 -> op1=2
577 * @optest op1=0 op2=-42 -> op1=-42
578 */
579 /* 256-bit: Memory, register. */
580 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
581 DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(RTUINT256U, uSrc);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
585
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
587 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
588 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
589 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
590
591 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
592 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
593
594 IEM_MC_ADVANCE_RIP();
595 IEM_MC_END();
596 }
597 return VINF_SUCCESS;
598 }
599
600 /**
601 * @opdone
602 * @opmnemonic udvex660f382arg
603 * @opcode 0x2a
604 * @opcodesub 11 mr/reg
605 * @oppfx 0x66
606 * @opunused immediate
607 * @opcpuid avx
608 * @optest ->
609 */
610 return IEMOP_RAISE_INVALID_OPCODE();
611}
612
613
614/** Opcode VEX.66.0F38 0x2b. */
615FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
616{
617 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
618 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
619 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
620}
621
622
623/** Opcode VEX.66.0F38 0x2c. */
624FNIEMOP_STUB(iemOp_vmaskmovps_Vx_Hx_Mx);
625/** Opcode VEX.66.0F38 0x2d. */
626FNIEMOP_STUB(iemOp_vmaskmovpd_Vx_Hx_Mx);
627/** Opcode VEX.66.0F38 0x2e. */
628FNIEMOP_STUB(iemOp_vmaskmovps_Mx_Hx_Vx);
629/** Opcode VEX.66.0F38 0x2f. */
630FNIEMOP_STUB(iemOp_vmaskmovpd_Mx_Hx_Vx);
631
632
633/** Opcode VEX.66.0F38 0x30. */
634FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
635{
636 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
637 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
638 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64);
639}
640
641
642/** Opcode VEX.66.0F38 0x31. */
643FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
644{
645 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
646 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
647 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32);
648}
649
650
651/** Opcode VEX.66.0F38 0x32. */
652FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
653{
654 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
655 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
656 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16);
657}
658
659
660/** Opcode VEX.66.0F38 0x33. */
661FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
662{
663 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
664 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
665 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64);
666}
667
668
669/** Opcode VEX.66.0F38 0x34. */
670FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
671{
672 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
673 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
674 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32);
675}
676
677
678/** Opcode VEX.66.0F38 0x35. */
679FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
680{
681 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
682 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
683 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64);
684}
685
686
687/* Opcode VEX.66.0F38 0x36. */
688FNIEMOP_STUB(iemOp_vpermd_Vqq_Hqq_Wqq);
689
690
691/** Opcode VEX.66.0F38 0x37. */
692FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
693{
694 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
695 IEMOPMEDIAF3_INIT_VARS(vpcmpgtq);
696 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
697}
698
699
700/** Opcode VEX.66.0F38 0x38. */
701FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
702{
703 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
704 IEMOPMEDIAF3_INIT_VARS(vpminsb);
705 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
706}
707
708
709/** Opcode VEX.66.0F38 0x39. */
710FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
711{
712 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
713 IEMOPMEDIAF3_INIT_VARS(vpminsd);
714 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
715}
716
717
718/** Opcode VEX.66.0F38 0x3a. */
719FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
720{
721 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
722 IEMOPMEDIAF3_INIT_VARS(vpminuw);
723 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
724}
725
726
727/** Opcode VEX.66.0F38 0x3b. */
728FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
729{
730 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
731 IEMOPMEDIAF3_INIT_VARS(vpminud);
732 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
733}
734
735
736/** Opcode VEX.66.0F38 0x3c. */
737FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
738{
739 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
740 IEMOPMEDIAF3_INIT_VARS(vpmaxsb);
741 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
742}
743
744
745/** Opcode VEX.66.0F38 0x3d. */
746FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
747{
748 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
749 IEMOPMEDIAF3_INIT_VARS(vpmaxsd);
750 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
751}
752
753
754/** Opcode VEX.66.0F38 0x3e. */
755FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
756{
757 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
758 IEMOPMEDIAF3_INIT_VARS(vpmaxuw);
759 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
760}
761
762
763/** Opcode VEX.66.0F38 0x3f. */
764FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
765{
766 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
767 IEMOPMEDIAF3_INIT_VARS(vpmaxud);
768 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
769}
770
771
772/** Opcode VEX.66.0F38 0x40. */
773FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
774{
775 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
776 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
777 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
778}
779
780
781/** Opcode VEX.66.0F38 0x41. */
782FNIEMOP_STUB(iemOp_vphminposuw_Vdq_Wdq);
783/* Opcode VEX.66.0F38 0x42 - invalid. */
784/* Opcode VEX.66.0F38 0x43 - invalid. */
785/* Opcode VEX.66.0F38 0x44 - invalid. */
786/** Opcode VEX.66.0F38 0x45. */
787FNIEMOP_STUB(iemOp_vpsrlvd_q_Vx_Hx_Wx);
788/** Opcode VEX.66.0F38 0x46. */
789FNIEMOP_STUB(iemOp_vsravd_Vx_Hx_Wx);
790/** Opcode VEX.66.0F38 0x47. */
791FNIEMOP_STUB(iemOp_vpsllvd_q_Vx_Hx_Wx);
792/* Opcode VEX.66.0F38 0x48 - invalid. */
793/* Opcode VEX.66.0F38 0x49 - invalid. */
794/* Opcode VEX.66.0F38 0x4a - invalid. */
795/* Opcode VEX.66.0F38 0x4b - invalid. */
796/* Opcode VEX.66.0F38 0x4c - invalid. */
797/* Opcode VEX.66.0F38 0x4d - invalid. */
798/* Opcode VEX.66.0F38 0x4e - invalid. */
799/* Opcode VEX.66.0F38 0x4f - invalid. */
800
801/* Opcode VEX.66.0F38 0x50 - invalid. */
802/* Opcode VEX.66.0F38 0x51 - invalid. */
803/* Opcode VEX.66.0F38 0x52 - invalid. */
804/* Opcode VEX.66.0F38 0x53 - invalid. */
805/* Opcode VEX.66.0F38 0x54 - invalid. */
806/* Opcode VEX.66.0F38 0x55 - invalid. */
807/* Opcode VEX.66.0F38 0x56 - invalid. */
808/* Opcode VEX.66.0F38 0x57 - invalid. */
809/** Opcode VEX.66.0F38 0x58. */
810FNIEMOP_STUB(iemOp_vpbroadcastd_Vx_Wx);
811/** Opcode VEX.66.0F38 0x59. */
812FNIEMOP_STUB(iemOp_vpbroadcastq_Vx_Wx);
813/** Opcode VEX.66.0F38 0x5a. */
814FNIEMOP_STUB(iemOp_vbroadcasti128_Vqq_Mdq);
815/* Opcode VEX.66.0F38 0x5b - invalid. */
816/* Opcode VEX.66.0F38 0x5c - invalid. */
817/* Opcode VEX.66.0F38 0x5d - invalid. */
818/* Opcode VEX.66.0F38 0x5e - invalid. */
819/* Opcode VEX.66.0F38 0x5f - invalid. */
820
821/* Opcode VEX.66.0F38 0x60 - invalid. */
822/* Opcode VEX.66.0F38 0x61 - invalid. */
823/* Opcode VEX.66.0F38 0x62 - invalid. */
824/* Opcode VEX.66.0F38 0x63 - invalid. */
825/* Opcode VEX.66.0F38 0x64 - invalid. */
826/* Opcode VEX.66.0F38 0x65 - invalid. */
827/* Opcode VEX.66.0F38 0x66 - invalid. */
828/* Opcode VEX.66.0F38 0x67 - invalid. */
829/* Opcode VEX.66.0F38 0x68 - invalid. */
830/* Opcode VEX.66.0F38 0x69 - invalid. */
831/* Opcode VEX.66.0F38 0x6a - invalid. */
832/* Opcode VEX.66.0F38 0x6b - invalid. */
833/* Opcode VEX.66.0F38 0x6c - invalid. */
834/* Opcode VEX.66.0F38 0x6d - invalid. */
835/* Opcode VEX.66.0F38 0x6e - invalid. */
836/* Opcode VEX.66.0F38 0x6f - invalid. */
837
838/* Opcode VEX.66.0F38 0x70 - invalid. */
839/* Opcode VEX.66.0F38 0x71 - invalid. */
840/* Opcode VEX.66.0F38 0x72 - invalid. */
841/* Opcode VEX.66.0F38 0x73 - invalid. */
842/* Opcode VEX.66.0F38 0x74 - invalid. */
843/* Opcode VEX.66.0F38 0x75 - invalid. */
844/* Opcode VEX.66.0F38 0x76 - invalid. */
845/* Opcode VEX.66.0F38 0x77 - invalid. */
846/** Opcode VEX.66.0F38 0x78. */
847FNIEMOP_STUB(iemOp_vpboardcastb_Vx_Wx);
848/** Opcode VEX.66.0F38 0x79. */
849FNIEMOP_STUB(iemOp_vpboardcastw_Vx_Wx);
850/* Opcode VEX.66.0F38 0x7a - invalid. */
851/* Opcode VEX.66.0F38 0x7b - invalid. */
852/* Opcode VEX.66.0F38 0x7c - invalid. */
853/* Opcode VEX.66.0F38 0x7d - invalid. */
854/* Opcode VEX.66.0F38 0x7e - invalid. */
855/* Opcode VEX.66.0F38 0x7f - invalid. */
856
857/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
858/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
859/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
860/* Opcode VEX.66.0F38 0x83 - invalid. */
861/* Opcode VEX.66.0F38 0x84 - invalid. */
862/* Opcode VEX.66.0F38 0x85 - invalid. */
863/* Opcode VEX.66.0F38 0x86 - invalid. */
864/* Opcode VEX.66.0F38 0x87 - invalid. */
865/* Opcode VEX.66.0F38 0x88 - invalid. */
866/* Opcode VEX.66.0F38 0x89 - invalid. */
867/* Opcode VEX.66.0F38 0x8a - invalid. */
868/* Opcode VEX.66.0F38 0x8b - invalid. */
869/** Opcode VEX.66.0F38 0x8c. */
870FNIEMOP_STUB(iemOp_vpmaskmovd_q_Vx_Hx_Mx);
871/* Opcode VEX.66.0F38 0x8d - invalid. */
872/** Opcode VEX.66.0F38 0x8e. */
873FNIEMOP_STUB(iemOp_vpmaskmovd_q_Mx_Vx_Hx);
874/* Opcode VEX.66.0F38 0x8f - invalid. */
875
876/** Opcode VEX.66.0F38 0x90 (vex only). */
877FNIEMOP_STUB(iemOp_vgatherdd_q_Vx_Hx_Wx);
878/** Opcode VEX.66.0F38 0x91 (vex only). */
879FNIEMOP_STUB(iemOp_vgatherqd_q_Vx_Hx_Wx);
880/** Opcode VEX.66.0F38 0x92 (vex only). */
881FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
882/** Opcode VEX.66.0F38 0x93 (vex only). */
883FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
884/* Opcode VEX.66.0F38 0x94 - invalid. */
885/* Opcode VEX.66.0F38 0x95 - invalid. */
886/** Opcode VEX.66.0F38 0x96 (vex only). */
887FNIEMOP_STUB(iemOp_vfmaddsub132ps_q_Vx_Hx_Wx);
888/** Opcode VEX.66.0F38 0x97 (vex only). */
889FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
890/** Opcode VEX.66.0F38 0x98 (vex only). */
891FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
892/** Opcode VEX.66.0F38 0x99 (vex only). */
893FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
894/** Opcode VEX.66.0F38 0x9a (vex only). */
895FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
896/** Opcode VEX.66.0F38 0x9b (vex only). */
897FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
898/** Opcode VEX.66.0F38 0x9c (vex only). */
899FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
900/** Opcode VEX.66.0F38 0x9d (vex only). */
901FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
902/** Opcode VEX.66.0F38 0x9e (vex only). */
903FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
904/** Opcode VEX.66.0F38 0x9f (vex only). */
905FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
906
907/* Opcode VEX.66.0F38 0xa0 - invalid. */
908/* Opcode VEX.66.0F38 0xa1 - invalid. */
909/* Opcode VEX.66.0F38 0xa2 - invalid. */
910/* Opcode VEX.66.0F38 0xa3 - invalid. */
911/* Opcode VEX.66.0F38 0xa4 - invalid. */
912/* Opcode VEX.66.0F38 0xa5 - invalid. */
913/** Opcode VEX.66.0F38 0xa6 (vex only). */
914FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
915/** Opcode VEX.66.0F38 0xa7 (vex only). */
916FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
917/** Opcode VEX.66.0F38 0xa8 (vex only). */
918FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
919/** Opcode VEX.66.0F38 0xa9 (vex only). */
920FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
921/** Opcode VEX.66.0F38 0xaa (vex only). */
922FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
923/** Opcode VEX.66.0F38 0xab (vex only). */
924FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
925/** Opcode VEX.66.0F38 0xac (vex only). */
926FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
927/** Opcode VEX.66.0F38 0xad (vex only). */
928FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
929/** Opcode VEX.66.0F38 0xae (vex only). */
930FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
931/** Opcode VEX.66.0F38 0xaf (vex only). */
932FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
933
934/* Opcode VEX.66.0F38 0xb0 - invalid. */
935/* Opcode VEX.66.0F38 0xb1 - invalid. */
936/* Opcode VEX.66.0F38 0xb2 - invalid. */
937/* Opcode VEX.66.0F38 0xb3 - invalid. */
938/* Opcode VEX.66.0F38 0xb4 - invalid. */
939/* Opcode VEX.66.0F38 0xb5 - invalid. */
940/** Opcode VEX.66.0F38 0xb6 (vex only). */
941FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
942/** Opcode VEX.66.0F38 0xb7 (vex only). */
943FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
944/** Opcode VEX.66.0F38 0xb8 (vex only). */
945FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
946/** Opcode VEX.66.0F38 0xb9 (vex only). */
947FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
948/** Opcode VEX.66.0F38 0xba (vex only). */
949FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
950/** Opcode VEX.66.0F38 0xbb (vex only). */
951FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
952/** Opcode VEX.66.0F38 0xbc (vex only). */
953FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
954/** Opcode VEX.66.0F38 0xbd (vex only). */
955FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
956/** Opcode VEX.66.0F38 0xbe (vex only). */
957FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
958/** Opcode VEX.66.0F38 0xbf (vex only). */
959FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
960
961/* Opcode VEX.0F38 0xc0 - invalid. */
962/* Opcode VEX.66.0F38 0xc0 - invalid. */
963/* Opcode VEX.0F38 0xc1 - invalid. */
964/* Opcode VEX.66.0F38 0xc1 - invalid. */
965/* Opcode VEX.0F38 0xc2 - invalid. */
966/* Opcode VEX.66.0F38 0xc2 - invalid. */
967/* Opcode VEX.0F38 0xc3 - invalid. */
968/* Opcode VEX.66.0F38 0xc3 - invalid. */
969/* Opcode VEX.0F38 0xc4 - invalid. */
970/* Opcode VEX.66.0F38 0xc4 - invalid. */
971/* Opcode VEX.0F38 0xc5 - invalid. */
972/* Opcode VEX.66.0F38 0xc5 - invalid. */
973/* Opcode VEX.0F38 0xc6 - invalid. */
974/* Opcode VEX.66.0F38 0xc6 - invalid. */
975/* Opcode VEX.0F38 0xc7 - invalid. */
976/* Opcode VEX.66.0F38 0xc7 - invalid. */
977/** Opcode VEX.0F38 0xc8. */
978FNIEMOP_STUB(iemOp_vsha1nexte_Vdq_Wdq);
979/* Opcode VEX.66.0F38 0xc8 - invalid. */
980/** Opcode VEX.0F38 0xc9. */
981FNIEMOP_STUB(iemOp_vsha1msg1_Vdq_Wdq);
982/* Opcode VEX.66.0F38 0xc9 - invalid. */
983/** Opcode VEX.0F38 0xca. */
984FNIEMOP_STUB(iemOp_vsha1msg2_Vdq_Wdq);
985/* Opcode VEX.66.0F38 0xca - invalid. */
986/** Opcode VEX.0F38 0xcb. */
987FNIEMOP_STUB(iemOp_vsha256rnds2_Vdq_Wdq);
988/* Opcode VEX.66.0F38 0xcb - invalid. */
989/** Opcode VEX.0F38 0xcc. */
990FNIEMOP_STUB(iemOp_vsha256msg1_Vdq_Wdq);
991/* Opcode VEX.66.0F38 0xcc - invalid. */
992/** Opcode VEX.0F38 0xcd. */
993FNIEMOP_STUB(iemOp_vsha256msg2_Vdq_Wdq);
994/* Opcode VEX.66.0F38 0xcd - invalid. */
995/* Opcode VEX.0F38 0xce - invalid. */
996/* Opcode VEX.66.0F38 0xce - invalid. */
997/* Opcode VEX.0F38 0xcf - invalid. */
998/* Opcode VEX.66.0F38 0xcf - invalid. */
999
1000/* Opcode VEX.66.0F38 0xd0 - invalid. */
1001/* Opcode VEX.66.0F38 0xd1 - invalid. */
1002/* Opcode VEX.66.0F38 0xd2 - invalid. */
1003/* Opcode VEX.66.0F38 0xd3 - invalid. */
1004/* Opcode VEX.66.0F38 0xd4 - invalid. */
1005/* Opcode VEX.66.0F38 0xd5 - invalid. */
1006/* Opcode VEX.66.0F38 0xd6 - invalid. */
1007/* Opcode VEX.66.0F38 0xd7 - invalid. */
1008/* Opcode VEX.66.0F38 0xd8 - invalid. */
1009/* Opcode VEX.66.0F38 0xd9 - invalid. */
1010/* Opcode VEX.66.0F38 0xda - invalid. */
1011/** Opcode VEX.66.0F38 0xdb. */
1012FNIEMOP_STUB(iemOp_vaesimc_Vdq_Wdq);
1013/** Opcode VEX.66.0F38 0xdc. */
1014FNIEMOP_STUB(iemOp_vaesenc_Vdq_Wdq);
1015/** Opcode VEX.66.0F38 0xdd. */
1016FNIEMOP_STUB(iemOp_vaesenclast_Vdq_Wdq);
1017/** Opcode VEX.66.0F38 0xde. */
1018FNIEMOP_STUB(iemOp_vaesdec_Vdq_Wdq);
1019/** Opcode VEX.66.0F38 0xdf. */
1020FNIEMOP_STUB(iemOp_vaesdeclast_Vdq_Wdq);
1021
1022/* Opcode VEX.66.0F38 0xe0 - invalid. */
1023/* Opcode VEX.66.0F38 0xe1 - invalid. */
1024/* Opcode VEX.66.0F38 0xe2 - invalid. */
1025/* Opcode VEX.66.0F38 0xe3 - invalid. */
1026/* Opcode VEX.66.0F38 0xe4 - invalid. */
1027/* Opcode VEX.66.0F38 0xe5 - invalid. */
1028/* Opcode VEX.66.0F38 0xe6 - invalid. */
1029/* Opcode VEX.66.0F38 0xe7 - invalid. */
1030/* Opcode VEX.66.0F38 0xe8 - invalid. */
1031/* Opcode VEX.66.0F38 0xe9 - invalid. */
1032/* Opcode VEX.66.0F38 0xea - invalid. */
1033/* Opcode VEX.66.0F38 0xeb - invalid. */
1034/* Opcode VEX.66.0F38 0xec - invalid. */
1035/* Opcode VEX.66.0F38 0xed - invalid. */
1036/* Opcode VEX.66.0F38 0xee - invalid. */
1037/* Opcode VEX.66.0F38 0xef - invalid. */
1038
1039
1040/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
1041/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
1042/* Opcode VEX.F3.0F38 0xf0 - invalid. */
1043/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
1044
1045/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
1046/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
1047/* Opcode VEX.F3.0F38 0xf1 - invalid. */
1048/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
1049
1050/** Opcode VEX.0F38 0xf2 - ANDN (vex only). */
1051FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
1052{
1053 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1054 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
1055 return iemOp_InvalidNeedRM(pVCpu);
1056 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
1057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1058 if (IEM_IS_MODRM_REG_MODE(bRm))
1059 {
1060 /*
1061 * Register, register.
1062 */
1063 IEMOP_HLP_DONE_VEX_DECODING_L0();
1064 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1065 {
1066 IEM_MC_BEGIN(4, 0);
1067 IEM_MC_ARG(uint64_t *, pDst, 0);
1068 IEM_MC_ARG(uint64_t, uSrc1, 1);
1069 IEM_MC_ARG(uint64_t, uSrc2, 2);
1070 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1071 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1072 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1073 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1074 IEM_MC_REF_EFLAGS(pEFlags);
1075 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1076 pDst, uSrc1, uSrc2, pEFlags);
1077 IEM_MC_ADVANCE_RIP();
1078 IEM_MC_END();
1079 }
1080 else
1081 {
1082 IEM_MC_BEGIN(4, 0);
1083 IEM_MC_ARG(uint32_t *, pDst, 0);
1084 IEM_MC_ARG(uint32_t, uSrc1, 1);
1085 IEM_MC_ARG(uint32_t, uSrc2, 2);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1087 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1088 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1089 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1090 IEM_MC_REF_EFLAGS(pEFlags);
1091 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1092 pDst, uSrc1, uSrc2, pEFlags);
1093 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1094 IEM_MC_ADVANCE_RIP();
1095 IEM_MC_END();
1096 }
1097 }
1098 else
1099 {
1100 /*
1101 * Register, memory.
1102 */
1103 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1104 {
1105 IEM_MC_BEGIN(4, 1);
1106 IEM_MC_ARG(uint64_t *, pDst, 0);
1107 IEM_MC_ARG(uint64_t, uSrc1, 1);
1108 IEM_MC_ARG(uint64_t, uSrc2, 2);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1112 IEMOP_HLP_DONE_VEX_DECODING_L0();
1113 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1114 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1115 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1116 IEM_MC_REF_EFLAGS(pEFlags);
1117 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1118 pDst, uSrc1, uSrc2, pEFlags);
1119 IEM_MC_ADVANCE_RIP();
1120 IEM_MC_END();
1121 }
1122 else
1123 {
1124 IEM_MC_BEGIN(4, 1);
1125 IEM_MC_ARG(uint32_t *, pDst, 0);
1126 IEM_MC_ARG(uint32_t, uSrc1, 1);
1127 IEM_MC_ARG(uint32_t, uSrc2, 2);
1128 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1131 IEMOP_HLP_DONE_VEX_DECODING_L0();
1132 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1133 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1134 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1135 IEM_MC_REF_EFLAGS(pEFlags);
1136 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1137 pDst, uSrc1, uSrc2, pEFlags);
1138 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 }
1142 }
1143 return VINF_SUCCESS;
1144}
1145
1146/* Opcode VEX.66.0F38 0xf2 - invalid. */
1147/* Opcode VEX.F3.0F38 0xf2 - invalid. */
1148/* Opcode VEX.F2.0F38 0xf2 - invalid. */
1149
1150
1151/* Opcode VEX.0F38 0xf3 - invalid. */
1152/* Opcode VEX.66.0F38 0xf3 - invalid. */
1153
1154/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
1155
1156/** Body for the vex group 17 instructions. */
1157#define IEMOP_BODY_By_Ey(a_Instr) \
1158 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1) \
1159 return iemOp_InvalidWithRM(pVCpu, bRm); /* decode memory variant? */ \
1160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
1161 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1162 { \
1163 /* \
1164 * Register, register. \
1165 */ \
1166 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1167 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1168 { \
1169 IEM_MC_BEGIN(3, 0); \
1170 IEM_MC_ARG(uint64_t *, pDst, 0); \
1171 IEM_MC_ARG(uint64_t, uSrc, 1); \
1172 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1173 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1174 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1175 IEM_MC_REF_EFLAGS(pEFlags); \
1176 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1177 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \
1178 IEM_MC_ADVANCE_RIP(); \
1179 IEM_MC_END(); \
1180 } \
1181 else \
1182 { \
1183 IEM_MC_BEGIN(3, 0); \
1184 IEM_MC_ARG(uint32_t *, pDst, 0); \
1185 IEM_MC_ARG(uint32_t, uSrc, 1); \
1186 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1187 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1188 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1189 IEM_MC_REF_EFLAGS(pEFlags); \
1190 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1191 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \
1192 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1193 IEM_MC_ADVANCE_RIP(); \
1194 IEM_MC_END(); \
1195 } \
1196 } \
1197 else \
1198 { \
1199 /* \
1200 * Register, memory. \
1201 */ \
1202 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1203 { \
1204 IEM_MC_BEGIN(3, 1); \
1205 IEM_MC_ARG(uint64_t *, pDst, 0); \
1206 IEM_MC_ARG(uint64_t, uSrc, 1); \
1207 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1210 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1211 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1212 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1213 IEM_MC_REF_EFLAGS(pEFlags); \
1214 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1215 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \
1216 IEM_MC_ADVANCE_RIP(); \
1217 IEM_MC_END(); \
1218 } \
1219 else \
1220 { \
1221 IEM_MC_BEGIN(3, 1); \
1222 IEM_MC_ARG(uint32_t *, pDst, 0); \
1223 IEM_MC_ARG(uint32_t, uSrc, 1); \
1224 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1227 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1228 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1229 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1230 IEM_MC_REF_EFLAGS(pEFlags); \
1231 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1232 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \
1233 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1234 IEM_MC_ADVANCE_RIP(); \
1235 IEM_MC_END(); \
1236 } \
1237 } \
1238 return VINF_SUCCESS
1239
1240
1241/* Opcode VEX.F3.0F38 0xf3 /1. */
1242/** @opcode /1
1243 * @opmaps vexgrp17 */
1244FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
1245{
1246 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1247 IEMOP_BODY_By_Ey(blsr);
1248}
1249
1250
1251/* Opcode VEX.F3.0F38 0xf3 /2. */
1252/** @opcode /2
1253 * @opmaps vexgrp17 */
1254FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
1255{
1256 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1257 IEMOP_BODY_By_Ey(blsmsk);
1258}
1259
1260
1261/* Opcode VEX.F3.0F38 0xf3 /3. */
1262/** @opcode /3
1263 * @opmaps vexgrp17 */
1264FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
1265{
1266 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1267 IEMOP_BODY_By_Ey(blsi);
1268}
1269
1270
1271/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
1272/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
1273/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
1274/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
1275
1276/**
1277 * Group 17 jump table for the VEX.F3 variant.
1278 */
1279IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
1280{
1281 /* /0 */ iemOp_InvalidWithRM,
1282 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
1283 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
1284 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
1285 /* /4 */ iemOp_InvalidWithRM,
1286 /* /5 */ iemOp_InvalidWithRM,
1287 /* /6 */ iemOp_InvalidWithRM,
1288 /* /7 */ iemOp_InvalidWithRM
1289};
1290AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
1291
1292/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
1293FNIEMOP_DEF(iemOp_VGrp17_f3)
1294{
1295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1296 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
1297}
1298
1299/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
1300
1301
1302/* Opcode VEX.0F38 0xf4 - invalid. */
1303/* Opcode VEX.66.0F38 0xf4 - invalid. */
1304/* Opcode VEX.F3.0F38 0xf4 - invalid. */
1305/* Opcode VEX.F2.0F38 0xf4 - invalid. */
1306
1307/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
1308#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
1309 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \
1310 return iemOp_InvalidNeedRM(pVCpu); \
1311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
1312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1313 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1314 { \
1315 /* \
1316 * Register, register. \
1317 */ \
1318 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1319 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1320 { \
1321 IEM_MC_BEGIN(4, 0); \
1322 IEM_MC_ARG(uint64_t *, pDst, 0); \
1323 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1324 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1325 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1326 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1327 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1328 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1329 IEM_MC_REF_EFLAGS(pEFlags); \
1330 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1331 iemAImpl_ ## a_Instr ## _u64_fallback), \
1332 pDst, uSrc1, uSrc2, pEFlags); \
1333 IEM_MC_ADVANCE_RIP(); \
1334 IEM_MC_END(); \
1335 } \
1336 else \
1337 { \
1338 IEM_MC_BEGIN(4, 0); \
1339 IEM_MC_ARG(uint32_t *, pDst, 0); \
1340 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1341 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1342 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1343 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1344 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1345 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1346 IEM_MC_REF_EFLAGS(pEFlags); \
1347 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1348 iemAImpl_ ## a_Instr ## _u32_fallback), \
1349 pDst, uSrc1, uSrc2, pEFlags); \
1350 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1351 IEM_MC_ADVANCE_RIP(); \
1352 IEM_MC_END(); \
1353 } \
1354 } \
1355 else \
1356 { \
1357 /* \
1358 * Register, memory. \
1359 */ \
1360 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1361 { \
1362 IEM_MC_BEGIN(4, 1); \
1363 IEM_MC_ARG(uint64_t *, pDst, 0); \
1364 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1365 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1366 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1369 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1370 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1371 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1372 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1373 IEM_MC_REF_EFLAGS(pEFlags); \
1374 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1375 iemAImpl_ ## a_Instr ## _u64_fallback), \
1376 pDst, uSrc1, uSrc2, pEFlags); \
1377 IEM_MC_ADVANCE_RIP(); \
1378 IEM_MC_END(); \
1379 } \
1380 else \
1381 { \
1382 IEM_MC_BEGIN(4, 1); \
1383 IEM_MC_ARG(uint32_t *, pDst, 0); \
1384 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1385 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1386 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1389 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1390 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1391 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1392 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1393 IEM_MC_REF_EFLAGS(pEFlags); \
1394 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1395 iemAImpl_ ## a_Instr ## _u32_fallback), \
1396 pDst, uSrc1, uSrc2, pEFlags); \
1397 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1398 IEM_MC_ADVANCE_RIP(); \
1399 IEM_MC_END(); \
1400 } \
1401 } \
1402 return VINF_SUCCESS
1403
1404/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
1405#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember, a_fUndefFlags) \
1406 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \
1407 return iemOp_InvalidNeedRM(pVCpu); \
1408 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
1409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1410 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1411 { \
1412 /* \
1413 * Register, register. \
1414 */ \
1415 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1416 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1417 { \
1418 IEM_MC_BEGIN(3, 0); \
1419 IEM_MC_ARG(uint64_t *, pDst, 0); \
1420 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1421 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1422 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1423 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1424 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1425 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1426 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1427 IEM_MC_ADVANCE_RIP(); \
1428 IEM_MC_END(); \
1429 } \
1430 else \
1431 { \
1432 IEM_MC_BEGIN(3, 0); \
1433 IEM_MC_ARG(uint32_t *, pDst, 0); \
1434 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1435 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1436 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1437 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1438 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1439 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1440 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1441 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1442 IEM_MC_ADVANCE_RIP(); \
1443 IEM_MC_END(); \
1444 } \
1445 } \
1446 else \
1447 { \
1448 /* \
1449 * Register, memory. \
1450 */ \
1451 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1452 { \
1453 IEM_MC_BEGIN(3, 1); \
1454 IEM_MC_ARG(uint64_t *, pDst, 0); \
1455 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1456 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1459 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1460 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1461 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1462 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1463 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1464 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1465 IEM_MC_ADVANCE_RIP(); \
1466 IEM_MC_END(); \
1467 } \
1468 else \
1469 { \
1470 IEM_MC_BEGIN(3, 1); \
1471 IEM_MC_ARG(uint32_t *, pDst, 0); \
1472 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1473 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1476 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1477 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1478 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1479 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1480 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1481 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1482 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1483 IEM_MC_ADVANCE_RIP(); \
1484 IEM_MC_END(); \
1485 } \
1486 } \
1487 return VINF_SUCCESS
1488
1489/** Opcode VEX.0F38 0xf5 (vex only). */
1490FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
1491{
1492 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1493 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
1494}
1495
1496/* Opcode VEX.66.0F38 0xf5 - invalid. */
1497
1498/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
1499#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
1500 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \
1501 return iemOp_InvalidNeedRM(pVCpu); \
1502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1503 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1504 { \
1505 /* \
1506 * Register, register. \
1507 */ \
1508 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1509 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1510 { \
1511 IEM_MC_BEGIN(3, 0); \
1512 IEM_MC_ARG(uint64_t *, pDst, 0); \
1513 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1514 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1515 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1516 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1517 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1518 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1519 iemAImpl_ ## a_Instr ## _u64, \
1520 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1521 IEM_MC_ADVANCE_RIP(); \
1522 IEM_MC_END(); \
1523 } \
1524 else \
1525 { \
1526 IEM_MC_BEGIN(3, 0); \
1527 IEM_MC_ARG(uint32_t *, pDst, 0); \
1528 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1529 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1530 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1531 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1532 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1533 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1534 iemAImpl_ ## a_Instr ## _u32, \
1535 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1536 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1537 IEM_MC_ADVANCE_RIP(); \
1538 IEM_MC_END(); \
1539 } \
1540 } \
1541 else \
1542 { \
1543 /* \
1544 * Register, memory. \
1545 */ \
1546 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1547 { \
1548 IEM_MC_BEGIN(3, 1); \
1549 IEM_MC_ARG(uint64_t *, pDst, 0); \
1550 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1551 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1554 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1555 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1556 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1557 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1558 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1559 iemAImpl_ ## a_Instr ## _u64, \
1560 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1561 IEM_MC_ADVANCE_RIP(); \
1562 IEM_MC_END(); \
1563 } \
1564 else \
1565 { \
1566 IEM_MC_BEGIN(3, 1); \
1567 IEM_MC_ARG(uint32_t *, pDst, 0); \
1568 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1569 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1572 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1573 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1574 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1575 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1576 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1577 iemAImpl_ ## a_Instr ## _u32, \
1578 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1579 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1580 IEM_MC_ADVANCE_RIP(); \
1581 IEM_MC_END(); \
1582 } \
1583 } \
1584 return VINF_SUCCESS;
1585
1586
1587/** Opcode VEX.F3.0F38 0xf5 (vex only). */
1588FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
1589{
1590 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1591 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
1592}
1593
1594
1595/** Opcode VEX.F2.0F38 0xf5 (vex only). */
1596FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
1597{
1598 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1599 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
1600}
1601
1602
1603/* Opcode VEX.0F38 0xf6 - invalid. */
1604/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
1605/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
1606
1607
1608/** Opcode VEX.F2.0F38 0xf6 (vex only) */
1609FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
1610{
1611 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1612 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi2)
1613 return iemOp_InvalidNeedRM(pVCpu);
1614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1615 if (IEM_IS_MODRM_REG_MODE(bRm))
1616 {
1617 /*
1618 * Register, register.
1619 */
1620 IEMOP_HLP_DONE_VEX_DECODING_L0();
1621 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1622 {
1623 IEM_MC_BEGIN(4, 0);
1624 IEM_MC_ARG(uint64_t *, pDst1, 0);
1625 IEM_MC_ARG(uint64_t *, pDst2, 1);
1626 IEM_MC_ARG(uint64_t, uSrc1, 2);
1627 IEM_MC_ARG(uint64_t, uSrc2, 3);
1628 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1629 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1630 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
1631 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1632 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
1633 pDst1, pDst2, uSrc1, uSrc2);
1634 IEM_MC_ADVANCE_RIP();
1635 IEM_MC_END();
1636 }
1637 else
1638 {
1639 IEM_MC_BEGIN(4, 0);
1640 IEM_MC_ARG(uint32_t *, pDst1, 0);
1641 IEM_MC_ARG(uint32_t *, pDst2, 1);
1642 IEM_MC_ARG(uint32_t, uSrc1, 2);
1643 IEM_MC_ARG(uint32_t, uSrc2, 3);
1644 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1645 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1646 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
1647 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1648 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
1649 pDst1, pDst2, uSrc1, uSrc2);
1650 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2);
1651 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1);
1652 IEM_MC_ADVANCE_RIP();
1653 IEM_MC_END();
1654 }
1655 }
1656 else
1657 {
1658 /*
1659 * Register, memory.
1660 */
1661 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1662 {
1663 IEM_MC_BEGIN(4, 1);
1664 IEM_MC_ARG(uint64_t *, pDst1, 0);
1665 IEM_MC_ARG(uint64_t *, pDst2, 1);
1666 IEM_MC_ARG(uint64_t, uSrc1, 2);
1667 IEM_MC_ARG(uint64_t, uSrc2, 3);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_VEX_DECODING_L0();
1671 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1672 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
1673 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1674 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1675 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
1676 pDst1, pDst2, uSrc1, uSrc2);
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 else
1681 {
1682 IEM_MC_BEGIN(4, 1);
1683 IEM_MC_ARG(uint32_t *, pDst1, 0);
1684 IEM_MC_ARG(uint32_t *, pDst2, 1);
1685 IEM_MC_ARG(uint32_t, uSrc1, 2);
1686 IEM_MC_ARG(uint32_t, uSrc2, 3);
1687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1689 IEMOP_HLP_DONE_VEX_DECODING_L0();
1690 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1691 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
1692 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1693 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1694 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
1695 pDst1, pDst2, uSrc1, uSrc2);
1696 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2);
1697 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1);
1698 IEM_MC_ADVANCE_RIP();
1699 IEM_MC_END();
1700 }
1701 }
1702 return VINF_SUCCESS;
1703}
1704
1705
1706/** Opcode VEX.0F38 0xf7 (vex only). */
1707FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
1708{
1709 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1710 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
1711}
1712
1713
1714/** Opcode VEX.66.0F38 0xf7 (vex only). */
1715FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
1716{
1717 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1718 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2, 0);
1719}
1720
1721
1722/** Opcode VEX.F3.0F38 0xf7 (vex only). */
1723FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
1724{
1725 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1726 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2, 0);
1727}
1728
1729
1730/** Opcode VEX.F2.0F38 0xf7 (vex only). */
1731FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
1732{
1733 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1734 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2, 0);
1735}
1736
1737/* Opcode VEX.0F38 0xf8 - invalid. */
1738/* Opcode VEX.66.0F38 0xf8 - invalid. */
1739/* Opcode VEX.F3.0F38 0xf8 - invalid. */
1740/* Opcode VEX.F2.0F38 0xf8 - invalid. */
1741
1742/* Opcode VEX.0F38 0xf9 - invalid. */
1743/* Opcode VEX.66.0F38 0xf9 - invalid. */
1744/* Opcode VEX.F3.0F38 0xf9 - invalid. */
1745/* Opcode VEX.F2.0F38 0xf9 - invalid. */
1746
1747/* Opcode VEX.0F38 0xfa - invalid. */
1748/* Opcode VEX.66.0F38 0xfa - invalid. */
1749/* Opcode VEX.F3.0F38 0xfa - invalid. */
1750/* Opcode VEX.F2.0F38 0xfa - invalid. */
1751
1752/* Opcode VEX.0F38 0xfb - invalid. */
1753/* Opcode VEX.66.0F38 0xfb - invalid. */
1754/* Opcode VEX.F3.0F38 0xfb - invalid. */
1755/* Opcode VEX.F2.0F38 0xfb - invalid. */
1756
1757/* Opcode VEX.0F38 0xfc - invalid. */
1758/* Opcode VEX.66.0F38 0xfc - invalid. */
1759/* Opcode VEX.F3.0F38 0xfc - invalid. */
1760/* Opcode VEX.F2.0F38 0xfc - invalid. */
1761
1762/* Opcode VEX.0F38 0xfd - invalid. */
1763/* Opcode VEX.66.0F38 0xfd - invalid. */
1764/* Opcode VEX.F3.0F38 0xfd - invalid. */
1765/* Opcode VEX.F2.0F38 0xfd - invalid. */
1766
1767/* Opcode VEX.0F38 0xfe - invalid. */
1768/* Opcode VEX.66.0F38 0xfe - invalid. */
1769/* Opcode VEX.F3.0F38 0xfe - invalid. */
1770/* Opcode VEX.F2.0F38 0xfe - invalid. */
1771
1772/* Opcode VEX.0F38 0xff - invalid. */
1773/* Opcode VEX.66.0F38 0xff - invalid. */
1774/* Opcode VEX.F3.0F38 0xff - invalid. */
1775/* Opcode VEX.F2.0F38 0xff - invalid. */
1776
1777
1778/**
1779 * VEX opcode map \#2.
1780 *
1781 * @sa g_apfnThreeByte0f38
1782 */
1783IEM_STATIC const PFNIEMOP g_apfnVexMap2[] =
1784{
1785 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1786 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1787 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1788 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1789 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1790 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1791 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1792 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1793 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1794 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1795 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1796 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1797 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1798 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1799 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1800 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1801 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1802
1803 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
1804 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1805 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1806 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1807 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
1808 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
1809 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1810 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1811 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1812 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1813 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1814 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1815 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1816 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1817 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1818 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1819
1820 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1821 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1822 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1823 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1824 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1825 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1826 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1827 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1828 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1829 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1830 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1831 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1832 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1833 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1834 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1835 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1836
1837 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1838 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1839 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1840 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1841 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1842 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1843 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1844 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1845 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1846 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1847 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1848 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1849 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1850 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1851 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1852 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1853
1854 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1855 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1856 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1857 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1858 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1859 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1860 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1861 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1862 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1863 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1864 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1865 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1866 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1867 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1868 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1869 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1870
1871 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1872 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1873 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
1874 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
1875 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
1876 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
1877 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
1878 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
1879 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1880 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1881 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1882 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
1883 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
1884 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
1885 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
1886 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
1887
1888 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
1889 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
1890 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
1891 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
1892 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
1893 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
1894 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
1895 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
1896 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
1897 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
1898 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
1899 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
1900 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
1901 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
1902 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
1903 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
1904
1905 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
1906 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
1907 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
1908 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
1909 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
1910 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
1911 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
1912 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
1913 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpboardcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1914 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpboardcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1915 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
1916 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
1917 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
1918 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
1919 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
1920 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
1921
1922 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
1923 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
1924 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
1925 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
1926 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
1927 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
1928 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
1929 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
1930 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
1931 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
1932 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
1933 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
1934 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1935 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
1936 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1937 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
1938
1939 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1940 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1941 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1942 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1943 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
1944 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
1945 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1946 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1947 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1948 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1949 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1950 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1951 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1952 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1953 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1954 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1955
1956 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1957 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1958 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1959 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1960 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1961 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1962 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1963 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1964 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1965 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1966 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1967 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1968 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1969 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1970 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1971 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1972
1973 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1974 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1975 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1976 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1977 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1978 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1979 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1980 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1981 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1982 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1983 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1984 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1985 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1986 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1987 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1988 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1989
1990 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1991 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1992 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1993 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1994 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1995 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1996 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1997 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1998 /* 0xc8 */ iemOp_vsha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1999 /* 0xc9 */ iemOp_vsha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2000 /* 0xca */ iemOp_vsha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2001 /* 0xcb */ iemOp_vsha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2002 /* 0xcc */ iemOp_vsha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2003 /* 0xcd */ iemOp_vsha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2004 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2005 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2006
2007 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2008 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2009 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2010 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2011 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2012 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2013 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2014 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2015 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2016 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2017 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2018 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2019 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2020 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2021 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2022 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2023
2024 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2025 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2026 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2027 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2028 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2029 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2030 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2031 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2032 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2033 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2034 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2035 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2036 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2037 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2038 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2039 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2040
2041 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2042 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2043 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2044 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2045 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2046 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
2047 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
2048 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
2049 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2050 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2051 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2052 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2053 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2054 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2055 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2056 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2057};
2058AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
2059
2060/** @} */
2061
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette