VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h

Last change on this file was 104272, checked in by vboxsync, 6 weeks ago

VMM: bugref:9898 VEX.W opcode decoding helper and its usage for vtestp[s|d] instructions.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 117.6 KB
Line 
1/* $Id: IEMAllInstVexMap2.cpp.h 104272 2024-04-10 10:44:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/* Opcode VEX.0F38 0x00 - invalid. */
37
38
39/** Opcode VEX.66.0F38 0x00. */
40FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
41{
42 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
43 IEMOPMEDIAOPTF3_INIT_VARS( vpshufb);
44 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
45}
46
47
48/* Opcode VEX.0F38 0x01 - invalid. */
49
50
51/** Opcode VEX.66.0F38 0x01. */
52FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
53{
54 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
55 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
56 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
57}
58
59
60/* Opcode VEX.0F38 0x02 - invalid. */
61
62
63/** Opcode VEX.66.0F38 0x02. */
64FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
65{
66 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
67 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
68 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
69}
70
71
72/* Opcode VEX.0F38 0x03 - invalid. */
73
74
75/** Opcode VEX.66.0F38 0x03. */
76FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
77{
78 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
79 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
80 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
81}
82
83
84/* Opcode VEX.0F38 0x04 - invalid. */
85
86
87/** Opcode VEX.66.0F38 0x04. */
88FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
89{
90 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
91 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
92 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
93}
94
95
96/* Opcode VEX.0F38 0x05 - invalid. */
97
98
99/** Opcode VEX.66.0F38 0x05. */
100FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
101{
102 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
103 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
105}
106
107
108/* Opcode VEX.0F38 0x06 - invalid. */
109
110
111/** Opcode VEX.66.0F38 0x06. */
112FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
113{
114 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
115 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
117}
118
119
120/* Opcode VEX.0F38 0x07 - invalid. */
121
122
123/** Opcode VEX.66.0F38 0x07. */
124FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
125{
126 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
127 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
129}
130
131
132/* Opcode VEX.0F38 0x08 - invalid. */
133
134
135/** Opcode VEX.66.0F38 0x08. */
136FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
137{
138 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
139 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
141}
142
143
144/* Opcode VEX.0F38 0x09 - invalid. */
145
146
147/** Opcode VEX.66.0F38 0x09. */
148FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
149{
150 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
151 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
153}
154
155
156/* Opcode VEX.0F38 0x0a - invalid. */
157
158
159/** Opcode VEX.66.0F38 0x0a. */
160FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
161{
162 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
163 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
165}
166
167
168/* Opcode VEX.0F38 0x0b - invalid. */
169
170
171/** Opcode VEX.66.0F38 0x0b. */
172FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
173{
174 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
175 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
177}
178
179
180/* Opcode VEX.0F38 0x0c - invalid. */
181
182
183/** Opcode VEX.66.0F38 0x0c.
184 * AVX,AVX */
185FNIEMOP_DEF(iemOp_vpermilps_Vx_Hx_Wx)
186{
187 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPS, vpermilps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
188 IEMOPMEDIAOPTF3_INIT_VARS(vpermilps);
189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
190}
191
192
193/* Opcode VEX.0F38 0x0d - invalid. */
194
195
196/** Opcode VEX.66.0F38 0x0d.
197 * AVX,AVX */
198FNIEMOP_DEF(iemOp_vpermilpd_Vx_Hx_Wx)
199{
200 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPD, vpermilpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
201 IEMOPMEDIAOPTF3_INIT_VARS(vpermilpd);
202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
203}
204
205
206/**
207 * Common worker for AVX instructions on the forms:
208 * - vtestps/d xmm1, xmm2/mem128
209 * - vtestps/d ymm1, ymm2/mem256
210 *
211 * Takes function table for function w/o implicit state parameter.
212 *
213 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
214 */
215#define IEMOP_BODY_VTESTP_S_D(a_Instr) \
216 Assert(pVCpu->iem.s.uVexLength <= 1); \
217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
218 if (IEM_IS_MODRM_REG_MODE(bRm)) \
219 { \
220 /* \
221 * Register, register. \
222 */ \
223 if (pVCpu->iem.s.uVexLength) \
224 { \
225 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
226 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
227 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
228 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
229 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
230 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
231 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
232 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
233 IEM_MC_PREPARE_AVX_USAGE(); \
234 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
238 iemAImpl_ ## a_Instr ## _u256_fallback), \
239 puSrc1, puSrc2, pEFlags); \
240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
241 IEM_MC_END(); \
242 } \
243 else \
244 { \
245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
246 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
247 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
248 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
251 IEM_MC_PREPARE_AVX_USAGE(); \
252 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
253 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
254 IEM_MC_REF_EFLAGS(pEFlags); \
255 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
256 iemAImpl_ ## a_Instr ## _u128_fallback), \
257 puSrc1, puSrc2, pEFlags); \
258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
259 IEM_MC_END(); \
260 } \
261 } \
262 else \
263 { \
264 /* \
265 * Register, memory. \
266 */ \
267 if (pVCpu->iem.s.uVexLength) \
268 { \
269 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
270 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
271 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
273 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
274 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
275 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
277 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
278 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
279 IEM_MC_PREPARE_AVX_USAGE(); \
280 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
281 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
284 iemAImpl_ ## a_Instr ## _u256_fallback), \
285 puSrc1, puSrc2, pEFlags); \
286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
287 IEM_MC_END(); \
288 } \
289 else \
290 { \
291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
292 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
294 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
295 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1); \
296 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
298 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
300 IEM_MC_PREPARE_AVX_USAGE(); \
301 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
302 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
303 IEM_MC_REF_EFLAGS(pEFlags); \
304 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
305 iemAImpl_ ## a_Instr ## _u128_fallback), \
306 puSrc1, puSrc2, pEFlags); \
307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
308 IEM_MC_END(); \
309 } \
310 } \
311 (void)0
312
313
314/* Opcode VEX.0F38 0x0e - invalid. */
315
316
317/**
318 * @opcode 0x0e
319 * @oppfx 0x66
320 * @opflmodify cf,zf,pf,af,sf,of
321 * @opflclear pf,af,sf,of
322 */
323FNIEMOP_DEF(iemOp_vtestps_Vx_Wx)
324{
325 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
326 * CPU modes. */
327 IEMOP_MNEMONIC2(VEX_RM, VTESTPS, vtestps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
328 IEMOP_BODY_VTESTP_S_D(vtestps);
329}
330
331
332/* Opcode VEX.0F38 0x0f - invalid. */
333
334
335/**
336 * @opcode 0x0f
337 * @oppfx 0x66
338 * @opflmodify cf,zf,pf,af,sf,of
339 * @opflclear pf,af,sf,of
340 */
341FNIEMOP_DEF(iemOp_vtestpd_Vx_Wx)
342{
343 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
344 * CPU modes. */
345 IEMOP_MNEMONIC2(VEX_RM, VTESTPD, vtestpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
346 IEMOP_BODY_VTESTP_S_D(vtestpd);
347}
348
349
350/* Opcode VEX.0F38 0x10 - invalid */
351/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
352/* Opcode VEX.0F38 0x11 - invalid */
353/* Opcode VEX.66.0F38 0x11 - invalid */
354/* Opcode VEX.0F38 0x12 - invalid */
355/* Opcode VEX.66.0F38 0x12 - invalid */
356/* Opcode VEX.0F38 0x13 - invalid */
357/* Opcode VEX.66.0F38 0x13 (vex only). */
358FNIEMOP_STUB(iemOp_vcvtph2ps_Vx_Wx);
359/* Opcode VEX.0F38 0x14 - invalid */
360/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
361/* Opcode VEX.0F38 0x15 - invalid */
362/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
363/* Opcode VEX.0F38 0x16 - invalid */
364/** Opcode VEX.66.0F38 0x16. */
365FNIEMOP_STUB(iemOp_vpermps_Vqq_Hqq_Wqq);
366/* Opcode VEX.0F38 0x17 - invalid */
367
368
369/**
370 * @opcode 0x17
371 * @oppfx 0x66
372 * @opflmodify cf,pf,af,zf,sf,of
373 * @opflclear pf,af,sf,of
374 */
375FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
376{
377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
378 if (IEM_IS_MODRM_REG_MODE(bRm))
379 {
380 /*
381 * Register, register.
382 */
383 if (pVCpu->iem.s.uVexLength)
384 {
385 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
386 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
387 IEM_MC_LOCAL(RTUINT256U, uSrc1);
388 IEM_MC_LOCAL(RTUINT256U, uSrc2);
389 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
390 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
392 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
393 IEM_MC_PREPARE_AVX_USAGE();
394 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
395 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
396 IEM_MC_REF_EFLAGS(pEFlags);
397 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
398 puSrc1, puSrc2, pEFlags);
399 IEM_MC_ADVANCE_RIP_AND_FINISH();
400 IEM_MC_END();
401 }
402 else
403 {
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
406 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
407 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
408 IEM_MC_ARG(uint32_t *, pEFlags, 2);
409 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
410 IEM_MC_PREPARE_AVX_USAGE();
411 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
412 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
413 IEM_MC_REF_EFLAGS(pEFlags);
414 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
415 IEM_MC_ADVANCE_RIP_AND_FINISH();
416 IEM_MC_END();
417 }
418 }
419 else
420 {
421 /*
422 * Register, memory.
423 */
424 if (pVCpu->iem.s.uVexLength)
425 {
426 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
427 IEM_MC_LOCAL(RTUINT256U, uSrc1);
428 IEM_MC_LOCAL(RTUINT256U, uSrc2);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
431 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
432 IEM_MC_ARG(uint32_t *, pEFlags, 2);
433
434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
435 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_PREPARE_AVX_USAGE();
438
439 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
440 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
441 IEM_MC_REF_EFLAGS(pEFlags);
442 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
443 puSrc1, puSrc2, pEFlags);
444
445 IEM_MC_ADVANCE_RIP_AND_FINISH();
446 IEM_MC_END();
447 }
448 else
449 {
450 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
451 IEM_MC_LOCAL(RTUINT128U, uSrc2);
452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
453 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
454 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
455 IEM_MC_ARG(uint32_t *, pEFlags, 2);
456
457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
458 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
460 IEM_MC_PREPARE_AVX_USAGE();
461
462 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
463 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
466
467 IEM_MC_ADVANCE_RIP_AND_FINISH();
468 IEM_MC_END();
469 }
470 }
471}
472
473
474/* Opcode VEX.0F38 0x18 - invalid */
475
476
477/** Opcode VEX.66.0F38 0x18. */
478FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd)
479{
480 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0);
481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
482 if (IEM_IS_MODRM_REG_MODE(bRm))
483 {
484 /*
485 * Register, register.
486 */
487 if (pVCpu->iem.s.uVexLength)
488 {
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
491 IEM_MC_LOCAL(uint32_t, uSrc);
492
493 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
494 IEM_MC_PREPARE_AVX_USAGE();
495
496 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
497 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
498
499 IEM_MC_ADVANCE_RIP_AND_FINISH();
500 IEM_MC_END();
501 }
502 else
503 {
504 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
506 IEM_MC_LOCAL(uint32_t, uSrc);
507
508 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
509 IEM_MC_PREPARE_AVX_USAGE();
510 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
511 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
512
513 IEM_MC_ADVANCE_RIP_AND_FINISH();
514 IEM_MC_END();
515 }
516 }
517 else
518 {
519 /*
520 * Register, memory.
521 */
522 if (pVCpu->iem.s.uVexLength)
523 {
524 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
525 IEM_MC_LOCAL(uint32_t, uSrc);
526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
527
528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
529 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
531 IEM_MC_PREPARE_AVX_USAGE();
532
533 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
534 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
535
536 IEM_MC_ADVANCE_RIP_AND_FINISH();
537 IEM_MC_END();
538 }
539 else
540 {
541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
542 IEM_MC_LOCAL(uint32_t, uSrc);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
548 IEM_MC_PREPARE_AVX_USAGE();
549
550 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
551 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
552
553 IEM_MC_ADVANCE_RIP_AND_FINISH();
554 IEM_MC_END();
555 }
556 }
557}
558
559
560/* Opcode VEX.0F38 0x19 - invalid */
561
562
563/** Opcode VEX.66.0F38 0x19. */
564FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq)
565{
566 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 if (pVCpu->iem.s.uVexLength)
574 {
575 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
576 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
577 IEM_MC_LOCAL(uint64_t, uSrc);
578
579 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
580 IEM_MC_PREPARE_AVX_USAGE();
581
582 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
583 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
584
585 IEM_MC_ADVANCE_RIP_AND_FINISH();
586 IEM_MC_END();
587 }
588 else
589 {
590 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
591 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
592 IEM_MC_LOCAL(uint64_t, uSrc);
593
594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
595 IEM_MC_PREPARE_AVX_USAGE();
596 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
597 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 }
603 else
604 {
605 /*
606 * Register, memory.
607 */
608 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
611
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
613 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
615 IEM_MC_PREPARE_AVX_USAGE();
616
617 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
618 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
619
620 IEM_MC_ADVANCE_RIP_AND_FINISH();
621 IEM_MC_END();
622 }
623}
624
625
626/* Opcode VEX.0F38 0x1a - invalid */
627
628
629/** Opcode VEX.66.0F38 0x1a. */
630FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq)
631{
632 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
634 if (IEM_IS_MODRM_REG_MODE(bRm))
635 {
636 /*
637 * No register, register.
638 */
639 IEMOP_RAISE_INVALID_OPCODE_RET();
640 }
641 else
642 {
643 /*
644 * Register, memory.
645 */
646 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
647 IEM_MC_LOCAL(RTUINT128U, uSrc);
648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
649
650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
651 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
652 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
653 IEM_MC_PREPARE_AVX_USAGE();
654
655 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
656 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
657
658 IEM_MC_ADVANCE_RIP_AND_FINISH();
659 IEM_MC_END();
660 }
661}
662
663
664/* Opcode VEX.0F38 0x1b - invalid */
665/* Opcode VEX.66.0F38 0x1b - invalid */
666/* Opcode VEX.0F38 0x1c - invalid. */
667
668
669/** Opcode VEX.66.0F38 0x1c. */
670FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
671{
672 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
673 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
674 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
675}
676
677
678/* Opcode VEX.0F38 0x1d - invalid. */
679
680
681/** Opcode VEX.66.0F38 0x1d. */
682FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
683{
684 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
685 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
686 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
687}
688
689/* Opcode VEX.0F38 0x1e - invalid. */
690
691
692/** Opcode VEX.66.0F38 0x1e. */
693FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
694{
695 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
696 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
697 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
698}
699
700
701/* Opcode VEX.0F38 0x1f - invalid */
702/* Opcode VEX.66.0F38 0x1f - invalid */
703
704
705/** Body for the vpmov{s,z}x* instructions. */
706#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth, a_VexLengthMemFetch) \
707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
708 if (IEM_IS_MODRM_REG_MODE(bRm)) \
709 { \
710 /* \
711 * Register, register. \
712 */ \
713 if (pVCpu->iem.s.uVexLength) \
714 { \
715 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
716 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
717 IEM_MC_LOCAL(RTUINT256U, uDst); \
718 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
719 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
720 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
721 IEM_MC_PREPARE_AVX_USAGE(); \
722 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
723 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
724 iemAImpl_ ## a_Instr ## _u256_fallback), \
725 puDst, puSrc); \
726 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
727 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
728 IEM_MC_END(); \
729 } \
730 else \
731 { \
732 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
733 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
734 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
735 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
736 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
737 IEM_MC_PREPARE_AVX_USAGE(); \
738 IEM_MC_FETCH_XREG_U ## a_SrcWidth (uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); \
739 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
740 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
741 iemAImpl_## a_Instr ## _u128_fallback), \
742 puDst, uSrc); \
743 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
745 IEM_MC_END(); \
746 } \
747 } \
748 else \
749 { \
750 /* \
751 * Register, memory. \
752 */ \
753 if (pVCpu->iem.s.uVexLength) \
754 { \
755 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
756 IEM_MC_LOCAL(RTUINT256U, uDst); \
757 IEM_MC_LOCAL(RTUINT128U, uSrc); \
758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
759 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
760 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
762 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
763 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
764 IEM_MC_PREPARE_AVX_USAGE(); \
765 a_VexLengthMemFetch(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
766 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
767 iemAImpl_ ## a_Instr ## _u256_fallback), \
768 puDst, puSrc); \
769 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
771 IEM_MC_END(); \
772 } \
773 else \
774 { \
775 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
777 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
778 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
780 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
782 IEM_MC_PREPARE_AVX_USAGE(); \
783 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
784 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
785 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
786 iemAImpl_ ## a_Instr ## _u128_fallback), \
787 puDst, uSrc); \
788 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
789 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
790 IEM_MC_END(); \
791 } \
792 } \
793 (void)0
794
795/** Opcode VEX.66.0F38 0x20. */
796FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
797{
798 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
799 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
800 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
801}
802
803
804/** Opcode VEX.66.0F38 0x21. */
805FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
806{
807 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
808 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
809 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32, IEM_MC_FETCH_MEM_U128);
810}
811
812
813/** Opcode VEX.66.0F38 0x22. */
814FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
815{
816 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
817 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
818 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16, IEM_MC_FETCH_MEM_U128);
819}
820
821
822/** Opcode VEX.66.0F38 0x23. */
823FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
824{
825 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
826 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
827 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
828}
829
830
831/** Opcode VEX.66.0F38 0x24. */
832FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
833{
834 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
835 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
836 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32, IEM_MC_FETCH_MEM_U128);
837}
838
839
840/** Opcode VEX.66.0F38 0x25. */
841FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
842{
843 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
844 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
845 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
846}
847
848
849/* Opcode VEX.66.0F38 0x26 - invalid */
850/* Opcode VEX.66.0F38 0x27 - invalid */
851
852
853/** Opcode VEX.66.0F38 0x28. */
854FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
855{
856 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
857 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
858 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
859}
860
861
862/** Opcode VEX.66.0F38 0x29. */
863FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
864{
865 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
866 IEMOPMEDIAOPTF3_INIT_VARS(vpcmpeqq);
867 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
868}
869
870
871FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
872{
873 Assert(pVCpu->iem.s.uVexLength <= 1);
874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
875 if (IEM_IS_MODRM_MEM_MODE(bRm))
876 {
877 if (pVCpu->iem.s.uVexLength == 0)
878 {
879 /**
880 * @opcode 0x2a
881 * @opcodesub !11 mr/reg vex.l=0
882 * @oppfx 0x66
883 * @opcpuid avx
884 * @opgroup og_avx_cachect
885 * @opxcpttype 1
886 * @optest op1=-1 op2=2 -> op1=2
887 * @optest op1=0 op2=-42 -> op1=-42
888 */
889 /* 128-bit: Memory, register. */
890 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
891 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
893 IEM_MC_LOCAL(RTUINT128U, uSrc);
894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
895
896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
897 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
899 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
900
901 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
902 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
903
904 IEM_MC_ADVANCE_RIP_AND_FINISH();
905 IEM_MC_END();
906 }
907 else
908 {
909 /**
910 * @opdone
911 * @opcode 0x2a
912 * @opcodesub !11 mr/reg vex.l=1
913 * @oppfx 0x66
914 * @opcpuid avx2
915 * @opgroup og_avx2_cachect
916 * @opxcpttype 1
917 * @optest op1=-1 op2=2 -> op1=2
918 * @optest op1=0 op2=-42 -> op1=-42
919 */
920 /* 256-bit: Memory, register. */
921 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
922 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
923 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
924 IEM_MC_LOCAL(RTUINT256U, uSrc);
925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
926
927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
928 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
929 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
930 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
931
932 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
933 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
934
935 IEM_MC_ADVANCE_RIP_AND_FINISH();
936 IEM_MC_END();
937 }
938 }
939
940 /**
941 * @opdone
942 * @opmnemonic udvex660f382arg
943 * @opcode 0x2a
944 * @opcodesub 11 mr/reg
945 * @oppfx 0x66
946 * @opunused immediate
947 * @opcpuid avx
948 * @optest ->
949 */
950 else
951 IEMOP_RAISE_INVALID_OPCODE_RET();
952}
953
954
955/** Opcode VEX.66.0F38 0x2b. */
956FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
957{
958 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
959 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
960 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
961}
962
963
964/** Opcode VEX.66.0F38 0x2c. */
965FNIEMOP_STUB(iemOp_vmaskmovps_Vx_Hx_Mx);
966/** Opcode VEX.66.0F38 0x2d. */
967FNIEMOP_STUB(iemOp_vmaskmovpd_Vx_Hx_Mx);
968/** Opcode VEX.66.0F38 0x2e. */
969FNIEMOP_STUB(iemOp_vmaskmovps_Mx_Hx_Vx);
970/** Opcode VEX.66.0F38 0x2f. */
971FNIEMOP_STUB(iemOp_vmaskmovpd_Mx_Hx_Vx);
972
973
974/** Opcode VEX.66.0F38 0x30. */
975FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
976{
977 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
978 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
979 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
980}
981
982
983/** Opcode VEX.66.0F38 0x31. */
984FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
985{
986 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
987 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
988 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32, IEM_MC_FETCH_MEM_U128);
989}
990
991
992/** Opcode VEX.66.0F38 0x32. */
993FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
994{
995 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
996 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
997 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16, IEM_MC_FETCH_MEM_U128);
998}
999
1000
1001/** Opcode VEX.66.0F38 0x33. */
1002FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
1003{
1004 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1005 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1006 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1007}
1008
1009
1010/** Opcode VEX.66.0F38 0x34. */
1011FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
1012{
1013 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1014 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1015 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32, IEM_MC_FETCH_MEM_U128);
1016}
1017
1018
1019/** Opcode VEX.66.0F38 0x35. */
1020FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
1021{
1022 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1023 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1024 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1025}
1026
1027
1028/* Opcode VEX.66.0F38 0x36. */
1029FNIEMOP_STUB(iemOp_vpermd_Vqq_Hqq_Wqq);
1030
1031
1032/** Opcode VEX.66.0F38 0x37. */
1033FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
1034{
1035 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1036 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtq);
1037 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1038}
1039
1040
1041/** Opcode VEX.66.0F38 0x38. */
1042FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
1043{
1044 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1045 IEMOPMEDIAOPTF3_INIT_VARS( vpminsb);
1046 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1047}
1048
1049
1050/** Opcode VEX.66.0F38 0x39. */
1051FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
1052{
1053 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1054 IEMOPMEDIAOPTF3_INIT_VARS( vpminsd);
1055 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1056}
1057
1058
1059/** Opcode VEX.66.0F38 0x3a. */
1060FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
1061{
1062 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1063 IEMOPMEDIAOPTF3_INIT_VARS( vpminuw);
1064 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1065}
1066
1067
1068/** Opcode VEX.66.0F38 0x3b. */
1069FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
1070{
1071 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1072 IEMOPMEDIAOPTF3_INIT_VARS( vpminud);
1073 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1074}
1075
1076
1077/** Opcode VEX.66.0F38 0x3c. */
1078FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
1079{
1080 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1081 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsb);
1082 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1083}
1084
1085
1086/** Opcode VEX.66.0F38 0x3d. */
1087FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
1088{
1089 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1090 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsd);
1091 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1092}
1093
1094
1095/** Opcode VEX.66.0F38 0x3e. */
1096FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
1097{
1098 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1099 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxuw);
1100 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1101}
1102
1103
1104/** Opcode VEX.66.0F38 0x3f. */
1105FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
1106{
1107 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1108 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxud);
1109 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1110}
1111
1112
1113/** Opcode VEX.66.0F38 0x40. */
1114FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
1115{
1116 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1117 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
1118 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1119}
1120
1121
1122/** Opcode VEX.66.0F38 0x41. */
1123FNIEMOP_DEF(iemOp_vphminposuw_Vdq_Wdq)
1124{
1125 IEMOP_MNEMONIC2(VEX_RM, VPHMINPOSUW, vphminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1127 if (IEM_IS_MODRM_REG_MODE(bRm))
1128 {
1129 /*
1130 * Register, register.
1131 */
1132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1133 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1134 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1135 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1137 IEM_MC_PREPARE_AVX_USAGE();
1138 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1139 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1140 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1141 puDst, puSrc);
1142 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1143 IEM_MC_ADVANCE_RIP_AND_FINISH();
1144 IEM_MC_END();
1145 }
1146 else
1147 {
1148 /*
1149 * Register, memory.
1150 */
1151 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1152 IEM_MC_LOCAL(RTUINT128U, uSrc);
1153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1154 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1155 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1156
1157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1158 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1159 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1160 IEM_MC_PREPARE_AVX_USAGE();
1161
1162 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1163 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1164 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1165 puDst, puSrc);
1166 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1167
1168 IEM_MC_ADVANCE_RIP_AND_FINISH();
1169 IEM_MC_END();
1170 }
1171}
1172
1173
1174/* Opcode VEX.66.0F38 0x42 - invalid. */
1175/* Opcode VEX.66.0F38 0x43 - invalid. */
1176/* Opcode VEX.66.0F38 0x44 - invalid. */
1177
1178
1179/** Opcode VEX.66.0F38 0x45. */
1180FNIEMOP_DEF(iemOp_vpsrlvd_q_Vx_Hx_Wx)
1181{
1182 IEMOP_MNEMONIC3(VEX_RVM, VPSRLVD, vpsrlvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1183
1184 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1185 {
1186 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvq);
1187 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1188 }
1189 else
1190 {
1191 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvd);
1192 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1193 }
1194}
1195
1196
1197/** Opcode VEX.66.0F38 0x46. */
1198FNIEMOP_DEF(iemOp_vpsravd_Vx_Hx_Wx)
1199{
1200 IEMOP_MNEMONIC3(VEX_RVM, VPSRAVD, vpsravd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1201 IEMOPMEDIAOPTF3_INIT_VARS(vpsravd);
1202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1203}
1204
1205
1206/** Opcode VEX.66.0F38 0x47. */
1207FNIEMOP_DEF(iemOp_vpsllvd_q_Vx_Hx_Wx)
1208{
1209 IEMOP_MNEMONIC3(VEX_RVM, VPSLLVD, vpsllvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1210
1211 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1212 {
1213 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvq);
1214 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1215 }
1216 else
1217 {
1218 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvd);
1219 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1220 }
1221}
1222
1223
1224/* Opcode VEX.66.0F38 0x48 - invalid. */
1225/* Opcode VEX.66.0F38 0x49 - invalid. */
1226/* Opcode VEX.66.0F38 0x4a - invalid. */
1227/* Opcode VEX.66.0F38 0x4b - invalid. */
1228/* Opcode VEX.66.0F38 0x4c - invalid. */
1229/* Opcode VEX.66.0F38 0x4d - invalid. */
1230/* Opcode VEX.66.0F38 0x4e - invalid. */
1231/* Opcode VEX.66.0F38 0x4f - invalid. */
1232
1233/* Opcode VEX.66.0F38 0x50 - invalid. */
1234/* Opcode VEX.66.0F38 0x51 - invalid. */
1235/* Opcode VEX.66.0F38 0x52 - invalid. */
1236/* Opcode VEX.66.0F38 0x53 - invalid. */
1237/* Opcode VEX.66.0F38 0x54 - invalid. */
1238/* Opcode VEX.66.0F38 0x55 - invalid. */
1239/* Opcode VEX.66.0F38 0x56 - invalid. */
1240/* Opcode VEX.66.0F38 0x57 - invalid. */
1241
1242
1243/** Opcode VEX.66.0F38 0x58. */
1244FNIEMOP_DEF(iemOp_vpbroadcastd_Vx_Wx)
1245{
1246 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTD, vpbroadcastd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1248 if (IEM_IS_MODRM_REG_MODE(bRm))
1249 {
1250 /*
1251 * Register, register.
1252 */
1253 if (pVCpu->iem.s.uVexLength)
1254 {
1255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1256 IEM_MC_LOCAL(uint32_t, uSrc);
1257
1258 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1260 IEM_MC_PREPARE_AVX_USAGE();
1261
1262 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1263 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1264
1265 IEM_MC_ADVANCE_RIP_AND_FINISH();
1266 IEM_MC_END();
1267 }
1268 else
1269 {
1270 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1271 IEM_MC_LOCAL(uint32_t, uSrc);
1272
1273 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1274 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1275 IEM_MC_PREPARE_AVX_USAGE();
1276 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1277 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1278
1279 IEM_MC_ADVANCE_RIP_AND_FINISH();
1280 IEM_MC_END();
1281 }
1282 }
1283 else
1284 {
1285 /*
1286 * Register, memory.
1287 */
1288 if (pVCpu->iem.s.uVexLength)
1289 {
1290 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1291 IEM_MC_LOCAL(uint32_t, uSrc);
1292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1293
1294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1295 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1296 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1297 IEM_MC_PREPARE_AVX_USAGE();
1298
1299 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1300 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1301
1302 IEM_MC_ADVANCE_RIP_AND_FINISH();
1303 IEM_MC_END();
1304 }
1305 else
1306 {
1307 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1308 IEM_MC_LOCAL(uint32_t, uSrc);
1309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1310
1311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1312 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1313 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1314 IEM_MC_PREPARE_AVX_USAGE();
1315
1316 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1317 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1318
1319 IEM_MC_ADVANCE_RIP_AND_FINISH();
1320 IEM_MC_END();
1321 }
1322 }
1323}
1324
1325
1326/** Opcode VEX.66.0F38 0x59. */
1327FNIEMOP_DEF(iemOp_vpbroadcastq_Vx_Wx)
1328{
1329 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTQ, vpbroadcastq, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1331 if (IEM_IS_MODRM_REG_MODE(bRm))
1332 {
1333 /*
1334 * Register, register.
1335 */
1336 if (pVCpu->iem.s.uVexLength)
1337 {
1338 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1339 IEM_MC_LOCAL(uint64_t, uSrc);
1340
1341 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1342 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1343 IEM_MC_PREPARE_AVX_USAGE();
1344
1345 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1346 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1347
1348 IEM_MC_ADVANCE_RIP_AND_FINISH();
1349 IEM_MC_END();
1350 }
1351 else
1352 {
1353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1354 IEM_MC_LOCAL(uint64_t, uSrc);
1355
1356 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1357 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1358 IEM_MC_PREPARE_AVX_USAGE();
1359 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1360 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 }
1366 else
1367 {
1368 /*
1369 * Register, memory.
1370 */
1371 if (pVCpu->iem.s.uVexLength)
1372 {
1373 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1374 IEM_MC_LOCAL(uint64_t, uSrc);
1375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1376
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1380 IEM_MC_PREPARE_AVX_USAGE();
1381
1382 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1383 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1384
1385 IEM_MC_ADVANCE_RIP_AND_FINISH();
1386 IEM_MC_END();
1387 }
1388 else
1389 {
1390 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1391 IEM_MC_LOCAL(uint64_t, uSrc);
1392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1393
1394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1395 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1397 IEM_MC_PREPARE_AVX_USAGE();
1398
1399 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1400 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1401
1402 IEM_MC_ADVANCE_RIP_AND_FINISH();
1403 IEM_MC_END();
1404 }
1405 }
1406}
1407
1408
1409/** Opcode VEX.66.0F38 0x5a. */
1410FNIEMOP_DEF(iemOp_vbroadcasti128_Vqq_Mdq)
1411{
1412 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTI128, vbroadcasti128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1414 if (IEM_IS_MODRM_REG_MODE(bRm))
1415 {
1416 /*
1417 * No register, register.
1418 */
1419 IEMOP_RAISE_INVALID_OPCODE_RET();
1420 }
1421 else
1422 {
1423 /*
1424 * Register, memory.
1425 */
1426 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1427 IEM_MC_LOCAL(RTUINT128U, uSrc);
1428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1429
1430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1431 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
1432 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1433 IEM_MC_PREPARE_AVX_USAGE();
1434
1435 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1436 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1437
1438 IEM_MC_ADVANCE_RIP_AND_FINISH();
1439 IEM_MC_END();
1440 }
1441}
1442
1443
1444/* Opcode VEX.66.0F38 0x5b - invalid. */
1445/* Opcode VEX.66.0F38 0x5c - invalid. */
1446/* Opcode VEX.66.0F38 0x5d - invalid. */
1447/* Opcode VEX.66.0F38 0x5e - invalid. */
1448/* Opcode VEX.66.0F38 0x5f - invalid. */
1449
1450/* Opcode VEX.66.0F38 0x60 - invalid. */
1451/* Opcode VEX.66.0F38 0x61 - invalid. */
1452/* Opcode VEX.66.0F38 0x62 - invalid. */
1453/* Opcode VEX.66.0F38 0x63 - invalid. */
1454/* Opcode VEX.66.0F38 0x64 - invalid. */
1455/* Opcode VEX.66.0F38 0x65 - invalid. */
1456/* Opcode VEX.66.0F38 0x66 - invalid. */
1457/* Opcode VEX.66.0F38 0x67 - invalid. */
1458/* Opcode VEX.66.0F38 0x68 - invalid. */
1459/* Opcode VEX.66.0F38 0x69 - invalid. */
1460/* Opcode VEX.66.0F38 0x6a - invalid. */
1461/* Opcode VEX.66.0F38 0x6b - invalid. */
1462/* Opcode VEX.66.0F38 0x6c - invalid. */
1463/* Opcode VEX.66.0F38 0x6d - invalid. */
1464/* Opcode VEX.66.0F38 0x6e - invalid. */
1465/* Opcode VEX.66.0F38 0x6f - invalid. */
1466
1467/* Opcode VEX.66.0F38 0x70 - invalid. */
1468/* Opcode VEX.66.0F38 0x71 - invalid. */
1469/* Opcode VEX.66.0F38 0x72 - invalid. */
1470/* Opcode VEX.66.0F38 0x73 - invalid. */
1471/* Opcode VEX.66.0F38 0x74 - invalid. */
1472/* Opcode VEX.66.0F38 0x75 - invalid. */
1473/* Opcode VEX.66.0F38 0x76 - invalid. */
1474/* Opcode VEX.66.0F38 0x77 - invalid. */
1475
1476
1477/** Opcode VEX.66.0F38 0x78. */
1478FNIEMOP_DEF(iemOp_vpbroadcastb_Vx_Wx)
1479{
1480 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTB, vpbroadcastb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1482 if (IEM_IS_MODRM_REG_MODE(bRm))
1483 {
1484 /*
1485 * Register, register.
1486 */
1487 if (pVCpu->iem.s.uVexLength)
1488 {
1489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1490 IEM_MC_LOCAL(uint8_t, uSrc);
1491
1492 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1493 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1494 IEM_MC_PREPARE_AVX_USAGE();
1495
1496 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1497 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1498
1499 IEM_MC_ADVANCE_RIP_AND_FINISH();
1500 IEM_MC_END();
1501 }
1502 else
1503 {
1504 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1505 IEM_MC_LOCAL(uint8_t, uSrc);
1506
1507 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1508 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1509 IEM_MC_PREPARE_AVX_USAGE();
1510 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1511 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1512
1513 IEM_MC_ADVANCE_RIP_AND_FINISH();
1514 IEM_MC_END();
1515 }
1516 }
1517 else
1518 {
1519 /*
1520 * Register, memory.
1521 */
1522 if (pVCpu->iem.s.uVexLength)
1523 {
1524 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1525 IEM_MC_LOCAL(uint8_t, uSrc);
1526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1527
1528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1529 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1531 IEM_MC_PREPARE_AVX_USAGE();
1532
1533 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1534 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1535
1536 IEM_MC_ADVANCE_RIP_AND_FINISH();
1537 IEM_MC_END();
1538 }
1539 else
1540 {
1541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1542 IEM_MC_LOCAL(uint8_t, uSrc);
1543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1544
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1548 IEM_MC_PREPARE_AVX_USAGE();
1549
1550 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1551 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1552
1553 IEM_MC_ADVANCE_RIP_AND_FINISH();
1554 IEM_MC_END();
1555 }
1556 }
1557}
1558
1559
1560/** Opcode VEX.66.0F38 0x79. */
1561FNIEMOP_DEF(iemOp_vpbroadcastw_Vx_Wx)
1562{
1563 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTW, vpbroadcastw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_REG_MODE(bRm))
1566 {
1567 /*
1568 * Register, register.
1569 */
1570 if (pVCpu->iem.s.uVexLength)
1571 {
1572 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1573 IEM_MC_LOCAL(uint16_t, uSrc);
1574
1575 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1576 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1577 IEM_MC_PREPARE_AVX_USAGE();
1578
1579 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1580 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1581
1582 IEM_MC_ADVANCE_RIP_AND_FINISH();
1583 IEM_MC_END();
1584 }
1585 else
1586 {
1587 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1588 IEM_MC_LOCAL(uint16_t, uSrc);
1589
1590 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1591 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1592 IEM_MC_PREPARE_AVX_USAGE();
1593 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1594 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1595
1596 IEM_MC_ADVANCE_RIP_AND_FINISH();
1597 IEM_MC_END();
1598 }
1599 }
1600 else
1601 {
1602 /*
1603 * Register, memory.
1604 */
1605 if (pVCpu->iem.s.uVexLength)
1606 {
1607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1608 IEM_MC_LOCAL(uint16_t, uSrc);
1609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1610
1611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1612 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1613 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1614 IEM_MC_PREPARE_AVX_USAGE();
1615
1616 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1617 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1618
1619 IEM_MC_ADVANCE_RIP_AND_FINISH();
1620 IEM_MC_END();
1621 }
1622 else
1623 {
1624 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1625 IEM_MC_LOCAL(uint16_t, uSrc);
1626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1630 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1631 IEM_MC_PREPARE_AVX_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1635
1636 IEM_MC_ADVANCE_RIP_AND_FINISH();
1637 IEM_MC_END();
1638 }
1639 }
1640}
1641
1642
1643/* Opcode VEX.66.0F38 0x7a - invalid. */
1644/* Opcode VEX.66.0F38 0x7b - invalid. */
1645/* Opcode VEX.66.0F38 0x7c - invalid. */
1646/* Opcode VEX.66.0F38 0x7d - invalid. */
1647/* Opcode VEX.66.0F38 0x7e - invalid. */
1648/* Opcode VEX.66.0F38 0x7f - invalid. */
1649
1650/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
1651/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
1652/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
1653/* Opcode VEX.66.0F38 0x83 - invalid. */
1654/* Opcode VEX.66.0F38 0x84 - invalid. */
1655/* Opcode VEX.66.0F38 0x85 - invalid. */
1656/* Opcode VEX.66.0F38 0x86 - invalid. */
1657/* Opcode VEX.66.0F38 0x87 - invalid. */
1658/* Opcode VEX.66.0F38 0x88 - invalid. */
1659/* Opcode VEX.66.0F38 0x89 - invalid. */
1660/* Opcode VEX.66.0F38 0x8a - invalid. */
1661/* Opcode VEX.66.0F38 0x8b - invalid. */
1662/** Opcode VEX.66.0F38 0x8c. */
1663FNIEMOP_STUB(iemOp_vpmaskmovd_q_Vx_Hx_Mx);
1664/* Opcode VEX.66.0F38 0x8d - invalid. */
1665/** Opcode VEX.66.0F38 0x8e. */
1666FNIEMOP_STUB(iemOp_vpmaskmovd_q_Mx_Vx_Hx);
1667/* Opcode VEX.66.0F38 0x8f - invalid. */
1668
1669/** Opcode VEX.66.0F38 0x90 (vex only). */
1670FNIEMOP_STUB(iemOp_vpgatherdd_q_Vx_Hx_Wx);
1671/** Opcode VEX.66.0F38 0x91 (vex only). */
1672FNIEMOP_STUB(iemOp_vpgatherqd_q_Vx_Hx_Wx);
1673/** Opcode VEX.66.0F38 0x92 (vex only). */
1674FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
1675/** Opcode VEX.66.0F38 0x93 (vex only). */
1676FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
1677/* Opcode VEX.66.0F38 0x94 - invalid. */
1678/* Opcode VEX.66.0F38 0x95 - invalid. */
1679/** Opcode VEX.66.0F38 0x96 (vex only). */
1680FNIEMOP_STUB(iemOp_vfmaddsub132ps_d_Vx_Hx_Wx);
1681/** Opcode VEX.66.0F38 0x97 (vex only). */
1682FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
1683/** Opcode VEX.66.0F38 0x98 (vex only). */
1684FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
1685/** Opcode VEX.66.0F38 0x99 (vex only). */
1686FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
1687/** Opcode VEX.66.0F38 0x9a (vex only). */
1688FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
1689/** Opcode VEX.66.0F38 0x9b (vex only). */
1690FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
1691/** Opcode VEX.66.0F38 0x9c (vex only). */
1692FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
1693/** Opcode VEX.66.0F38 0x9d (vex only). */
1694FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
1695/** Opcode VEX.66.0F38 0x9e (vex only). */
1696FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
1697/** Opcode VEX.66.0F38 0x9f (vex only). */
1698FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
1699
1700/* Opcode VEX.66.0F38 0xa0 - invalid. */
1701/* Opcode VEX.66.0F38 0xa1 - invalid. */
1702/* Opcode VEX.66.0F38 0xa2 - invalid. */
1703/* Opcode VEX.66.0F38 0xa3 - invalid. */
1704/* Opcode VEX.66.0F38 0xa4 - invalid. */
1705/* Opcode VEX.66.0F38 0xa5 - invalid. */
1706/** Opcode VEX.66.0F38 0xa6 (vex only). */
1707FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
1708/** Opcode VEX.66.0F38 0xa7 (vex only). */
1709FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
1710/** Opcode VEX.66.0F38 0xa8 (vex only). */
1711FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
1712/** Opcode VEX.66.0F38 0xa9 (vex only). */
1713FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
1714/** Opcode VEX.66.0F38 0xaa (vex only). */
1715FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
1716/** Opcode VEX.66.0F38 0xab (vex only). */
1717FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
1718/** Opcode VEX.66.0F38 0xac (vex only). */
1719FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
1720/** Opcode VEX.66.0F38 0xad (vex only). */
1721FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
1722/** Opcode VEX.66.0F38 0xae (vex only). */
1723FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
1724/** Opcode VEX.66.0F38 0xaf (vex only). */
1725FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
1726
1727/* Opcode VEX.66.0F38 0xb0 - invalid. */
1728/* Opcode VEX.66.0F38 0xb1 - invalid. */
1729/* Opcode VEX.66.0F38 0xb2 - invalid. */
1730/* Opcode VEX.66.0F38 0xb3 - invalid. */
1731/* Opcode VEX.66.0F38 0xb4 - invalid. */
1732/* Opcode VEX.66.0F38 0xb5 - invalid. */
1733/** Opcode VEX.66.0F38 0xb6 (vex only). */
1734FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
1735/** Opcode VEX.66.0F38 0xb7 (vex only). */
1736FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
1737/** Opcode VEX.66.0F38 0xb8 (vex only). */
1738FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
1739/** Opcode VEX.66.0F38 0xb9 (vex only). */
1740FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
1741/** Opcode VEX.66.0F38 0xba (vex only). */
1742FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
1743/** Opcode VEX.66.0F38 0xbb (vex only). */
1744FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
1745/** Opcode VEX.66.0F38 0xbc (vex only). */
1746FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
1747/** Opcode VEX.66.0F38 0xbd (vex only). */
1748FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
1749/** Opcode VEX.66.0F38 0xbe (vex only). */
1750FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
1751/** Opcode VEX.66.0F38 0xbf (vex only). */
1752FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
1753
1754/* Opcode VEX.0F38 0xc0 - invalid. */
1755/* Opcode VEX.66.0F38 0xc0 - invalid. */
1756/* Opcode VEX.0F38 0xc1 - invalid. */
1757/* Opcode VEX.66.0F38 0xc1 - invalid. */
1758/* Opcode VEX.0F38 0xc2 - invalid. */
1759/* Opcode VEX.66.0F38 0xc2 - invalid. */
1760/* Opcode VEX.0F38 0xc3 - invalid. */
1761/* Opcode VEX.66.0F38 0xc3 - invalid. */
1762/* Opcode VEX.0F38 0xc4 - invalid. */
1763/* Opcode VEX.66.0F38 0xc4 - invalid. */
1764/* Opcode VEX.0F38 0xc5 - invalid. */
1765/* Opcode VEX.66.0F38 0xc5 - invalid. */
1766/* Opcode VEX.0F38 0xc6 - invalid. */
1767/* Opcode VEX.66.0F38 0xc6 - invalid. */
1768/* Opcode VEX.0F38 0xc7 - invalid. */
1769/* Opcode VEX.66.0F38 0xc7 - invalid. */
1770/* Opcode VEX.0F38 0xc8 - invalid. */
1771/* Opcode VEX.66.0F38 0xc8 - invalid. */
1772/* Opcode VEX.0F38 0xc9 - invalid. */
1773/* Opcode VEX.66.0F38 0xc9 - invalid. */
1774/* Opcode VEX.0F38 0xca. */
1775/* Opcode VEX.66.0F38 0xca - invalid. */
1776/* Opcode VEX.0F38 0xcb - invalid. */
1777/* Opcode VEX.66.0F38 0xcb - invalid. */
1778/* Opcode VEX.0F38 0xcc - invalid. */
1779/* Opcode VEX.66.0F38 0xcc - invalid. */
1780/* Opcode VEX.0F38 0xcd - invalid. */
1781/* Opcode VEX.66.0F38 0xcd - invalid. */
1782/* Opcode VEX.0F38 0xce - invalid. */
1783/* Opcode VEX.66.0F38 0xce - invalid. */
1784/* Opcode VEX.0F38 0xcf - invalid. */
1785/* Opcode VEX.66.0F38 0xcf - invalid. */
1786
1787/* Opcode VEX.66.0F38 0xd0 - invalid. */
1788/* Opcode VEX.66.0F38 0xd1 - invalid. */
1789/* Opcode VEX.66.0F38 0xd2 - invalid. */
1790/* Opcode VEX.66.0F38 0xd3 - invalid. */
1791/* Opcode VEX.66.0F38 0xd4 - invalid. */
1792/* Opcode VEX.66.0F38 0xd5 - invalid. */
1793/* Opcode VEX.66.0F38 0xd6 - invalid. */
1794/* Opcode VEX.66.0F38 0xd7 - invalid. */
1795/* Opcode VEX.66.0F38 0xd8 - invalid. */
1796/* Opcode VEX.66.0F38 0xd9 - invalid. */
1797/* Opcode VEX.66.0F38 0xda - invalid. */
1798/** Opcode VEX.66.0F38 0xdb. */
1799FNIEMOP_STUB(iemOp_vaesimc_Vdq_Wdq);
1800/** Opcode VEX.66.0F38 0xdc. */
1801FNIEMOP_STUB(iemOp_vaesenc_Vdq_Wdq);
1802/** Opcode VEX.66.0F38 0xdd. */
1803FNIEMOP_STUB(iemOp_vaesenclast_Vdq_Wdq);
1804/** Opcode VEX.66.0F38 0xde. */
1805FNIEMOP_STUB(iemOp_vaesdec_Vdq_Wdq);
1806/** Opcode VEX.66.0F38 0xdf. */
1807FNIEMOP_STUB(iemOp_vaesdeclast_Vdq_Wdq);
1808
1809/* Opcode VEX.66.0F38 0xe0 - invalid. */
1810/* Opcode VEX.66.0F38 0xe1 - invalid. */
1811/* Opcode VEX.66.0F38 0xe2 - invalid. */
1812/* Opcode VEX.66.0F38 0xe3 - invalid. */
1813/* Opcode VEX.66.0F38 0xe4 - invalid. */
1814/* Opcode VEX.66.0F38 0xe5 - invalid. */
1815/* Opcode VEX.66.0F38 0xe6 - invalid. */
1816/* Opcode VEX.66.0F38 0xe7 - invalid. */
1817/* Opcode VEX.66.0F38 0xe8 - invalid. */
1818/* Opcode VEX.66.0F38 0xe9 - invalid. */
1819/* Opcode VEX.66.0F38 0xea - invalid. */
1820/* Opcode VEX.66.0F38 0xeb - invalid. */
1821/* Opcode VEX.66.0F38 0xec - invalid. */
1822/* Opcode VEX.66.0F38 0xed - invalid. */
1823/* Opcode VEX.66.0F38 0xee - invalid. */
1824/* Opcode VEX.66.0F38 0xef - invalid. */
1825
1826
1827/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
1828/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
1829/* Opcode VEX.F3.0F38 0xf0 - invalid. */
1830/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
1831
1832/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
1833/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
1834/* Opcode VEX.F3.0F38 0xf1 - invalid. */
1835/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
1836
1837/**
1838 * @opcode 0xf2
1839 * @oppfx none
1840 * @opflmodify cf,pf,af,zf,sf,of
1841 * @opflclear cf,of
1842 * @opflundef pf,af
1843 * @note VEX only
1844 */
1845FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
1846{
1847 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
1849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1850 if (IEM_IS_MODRM_REG_MODE(bRm))
1851 {
1852 /*
1853 * Register, register.
1854 */
1855 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1856 {
1857 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1858 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1859 IEM_MC_ARG(uint64_t *, pDst, 0);
1860 IEM_MC_ARG(uint64_t, uSrc1, 1);
1861 IEM_MC_ARG(uint64_t, uSrc2, 2);
1862 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1863 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1864 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1865 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1866 IEM_MC_REF_EFLAGS(pEFlags);
1867 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1868 pDst, uSrc1, uSrc2, pEFlags);
1869 IEM_MC_ADVANCE_RIP_AND_FINISH();
1870 IEM_MC_END();
1871 }
1872 else
1873 {
1874 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1875 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1876 IEM_MC_ARG(uint32_t *, pDst, 0);
1877 IEM_MC_ARG(uint32_t, uSrc1, 1);
1878 IEM_MC_ARG(uint32_t, uSrc2, 2);
1879 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1880 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1881 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1882 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1883 IEM_MC_REF_EFLAGS(pEFlags);
1884 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1885 pDst, uSrc1, uSrc2, pEFlags);
1886 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1887 IEM_MC_ADVANCE_RIP_AND_FINISH();
1888 IEM_MC_END();
1889 }
1890 }
1891 else
1892 {
1893 /*
1894 * Register, memory.
1895 */
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1897 {
1898 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1899 IEM_MC_ARG(uint64_t *, pDst, 0);
1900 IEM_MC_ARG(uint64_t, uSrc1, 1);
1901 IEM_MC_ARG(uint64_t, uSrc2, 2);
1902 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1905 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1906 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1907 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1908 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1909 IEM_MC_REF_EFLAGS(pEFlags);
1910 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1911 pDst, uSrc1, uSrc2, pEFlags);
1912 IEM_MC_ADVANCE_RIP_AND_FINISH();
1913 IEM_MC_END();
1914 }
1915 else
1916 {
1917 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1918 IEM_MC_ARG(uint32_t *, pDst, 0);
1919 IEM_MC_ARG(uint32_t, uSrc1, 1);
1920 IEM_MC_ARG(uint32_t, uSrc2, 2);
1921 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1924 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1925 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1926 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1927 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1928 IEM_MC_REF_EFLAGS(pEFlags);
1929 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1930 pDst, uSrc1, uSrc2, pEFlags);
1931 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1932 IEM_MC_ADVANCE_RIP_AND_FINISH();
1933 IEM_MC_END();
1934 }
1935 }
1936}
1937
1938/* Opcode VEX.66.0F38 0xf2 - invalid. */
1939/* Opcode VEX.F3.0F38 0xf2 - invalid. */
1940/* Opcode VEX.F2.0F38 0xf2 - invalid. */
1941
1942
1943/* Opcode VEX.0F38 0xf3 - invalid. */
1944/* Opcode VEX.66.0F38 0xf3 - invalid. */
1945
1946/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
1947
1948/** Body for the vex group 17 instructions. */
1949#define IEMOP_BODY_By_Ey(a_Instr) \
1950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
1951 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1952 { \
1953 /* \
1954 * Register, register. \
1955 */ \
1956 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1957 { \
1958 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1959 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
1960 IEM_MC_ARG(uint64_t, uSrc, 2); \
1961 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1962 IEM_MC_ARG(uint64_t *, pDst, 1); \
1963 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1964 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1965 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
1966 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1967 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
1968 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1969 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1970 IEM_MC_END(); \
1971 } \
1972 else \
1973 { \
1974 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
1975 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
1976 IEM_MC_ARG(uint32_t, uSrc, 2); \
1977 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1978 IEM_MC_ARG(uint32_t *, pDst, 1); \
1979 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1980 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
1981 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
1982 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1983 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
1984 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1985 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
1986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1987 IEM_MC_END(); \
1988 } \
1989 } \
1990 else \
1991 { \
1992 /* \
1993 * Register, memory. \
1994 */ \
1995 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1996 { \
1997 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2000 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2001 \
2002 IEM_MC_ARG(uint64_t, uSrc, 2); \
2003 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2004 IEM_MC_ARG(uint64_t *, pDst, 1); \
2005 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2006 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2007 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2008 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2009 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2010 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2011 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2012 IEM_MC_END(); \
2013 } \
2014 else \
2015 { \
2016 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2019 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2020 \
2021 IEM_MC_ARG(uint32_t, uSrc, 2); \
2022 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2023 IEM_MC_ARG(uint32_t *, pDst, 1); \
2024 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2025 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2026 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2027 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2028 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2029 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2030 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2032 IEM_MC_END(); \
2033 } \
2034 } \
2035 (void)0
2036
2037
2038/**
2039 * @opmaps vexgrp17
2040 * @opcode /1
2041 * @opflmodify cf,pf,af,zf,sf,of
2042 * @opflclear of
2043 * @opflundef pf,af
2044 */
2045FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
2046{
2047 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2048 IEMOP_BODY_By_Ey(blsr);
2049}
2050
2051
2052/**
2053 * @opmaps vexgrp17
2054 * @opcode /2
2055 * @opflmodify cf,pf,af,zf,sf,of
2056 * @opflclear zf,of
2057 * @opflundef pf,af
2058 */
2059FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
2060{
2061 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2062 IEMOP_BODY_By_Ey(blsmsk);
2063}
2064
2065
2066/**
2067 * @opmaps vexgrp17
2068 * @opcode /3
2069 * @opflmodify cf,pf,af,zf,sf,of
2070 * @opflclear of
2071 * @opflundef pf,af
2072 */
2073FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
2074{
2075 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2076 IEMOP_BODY_By_Ey(blsi);
2077}
2078
2079
2080/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
2081/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
2082/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
2083/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
2084
2085/**
2086 * Group 17 jump table for the VEX.F3 variant.
2087 */
2088IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
2089{
2090 /* /0 */ iemOp_InvalidWithRM,
2091 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
2092 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
2093 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
2094 /* /4 */ iemOp_InvalidWithRM,
2095 /* /5 */ iemOp_InvalidWithRM,
2096 /* /6 */ iemOp_InvalidWithRM,
2097 /* /7 */ iemOp_InvalidWithRM
2098};
2099AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
2100
2101/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
2102FNIEMOP_DEF(iemOp_VGrp17_f3)
2103{
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
2106}
2107
2108/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
2109
2110
2111/* Opcode VEX.0F38 0xf4 - invalid. */
2112/* Opcode VEX.66.0F38 0xf4 - invalid. */
2113/* Opcode VEX.F3.0F38 0xf4 - invalid. */
2114/* Opcode VEX.F2.0F38 0xf4 - invalid. */
2115
2116/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
2117#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
2118 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
2119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2120 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2121 { \
2122 /* \
2123 * Register, register. \
2124 */ \
2125 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2126 { \
2127 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2128 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2129 IEM_MC_ARG(uint64_t *, pDst, 0); \
2130 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2131 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2132 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2133 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2134 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2135 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2136 IEM_MC_REF_EFLAGS(pEFlags); \
2137 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2138 iemAImpl_ ## a_Instr ## _u64_fallback), \
2139 pDst, uSrc1, uSrc2, pEFlags); \
2140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2141 IEM_MC_END(); \
2142 } \
2143 else \
2144 { \
2145 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2146 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2147 IEM_MC_ARG(uint32_t *, pDst, 0); \
2148 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2149 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2150 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2151 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2152 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2153 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2154 IEM_MC_REF_EFLAGS(pEFlags); \
2155 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2156 iemAImpl_ ## a_Instr ## _u32_fallback), \
2157 pDst, uSrc1, uSrc2, pEFlags); \
2158 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2160 IEM_MC_END(); \
2161 } \
2162 } \
2163 else \
2164 { \
2165 /* \
2166 * Register, memory. \
2167 */ \
2168 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2169 { \
2170 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2171 IEM_MC_ARG(uint64_t *, pDst, 0); \
2172 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2173 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2174 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2177 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2178 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2179 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2180 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2181 IEM_MC_REF_EFLAGS(pEFlags); \
2182 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2183 iemAImpl_ ## a_Instr ## _u64_fallback), \
2184 pDst, uSrc1, uSrc2, pEFlags); \
2185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2186 IEM_MC_END(); \
2187 } \
2188 else \
2189 { \
2190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2191 IEM_MC_ARG(uint32_t *, pDst, 0); \
2192 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2193 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2194 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2197 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2198 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2199 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2200 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2201 IEM_MC_REF_EFLAGS(pEFlags); \
2202 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2203 iemAImpl_ ## a_Instr ## _u32_fallback), \
2204 pDst, uSrc1, uSrc2, pEFlags); \
2205 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2206 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2207 IEM_MC_END(); \
2208 } \
2209 } \
2210 (void)0
2211
2212/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
2213#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember) \
2214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2215 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2216 { \
2217 /* \
2218 * Register, register. \
2219 */ \
2220 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2221 { \
2222 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2224 IEM_MC_ARG(uint64_t *, pDst, 0); \
2225 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2226 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2227 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2228 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2229 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2230 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2231 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2233 IEM_MC_END(); \
2234 } \
2235 else \
2236 { \
2237 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2238 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2239 IEM_MC_ARG(uint32_t *, pDst, 0); \
2240 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2241 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2242 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2243 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2244 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2245 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2246 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2247 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2248 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2249 IEM_MC_END(); \
2250 } \
2251 } \
2252 else \
2253 { \
2254 /* \
2255 * Register, memory. \
2256 */ \
2257 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2258 { \
2259 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2260 IEM_MC_ARG(uint64_t *, pDst, 0); \
2261 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2262 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2265 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2266 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2267 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2268 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2269 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2270 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2271 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2272 IEM_MC_END(); \
2273 } \
2274 else \
2275 { \
2276 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2277 IEM_MC_ARG(uint32_t *, pDst, 0); \
2278 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2279 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2282 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2283 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2284 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2285 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2286 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2287 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2288 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2289 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2290 IEM_MC_END(); \
2291 } \
2292 } \
2293 (void)0
2294
2295/**
2296 * @opcode 0xf5
2297 * @oppfx none
2298 * @opflmodify cf,pf,af,zf,sf,of
2299 * @opflclear of
2300 * @opflundef pf,af
2301 * @note VEX only
2302 */
2303FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
2304{
2305 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2306 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
2307}
2308
2309/* Opcode VEX.66.0F38 0xf5 - invalid. */
2310
2311/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
2312#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
2313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2314 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2315 { \
2316 /* \
2317 * Register, register. \
2318 */ \
2319 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2320 { \
2321 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2322 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2323 IEM_MC_ARG(uint64_t *, pDst, 0); \
2324 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2325 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2326 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2327 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2328 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2329 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2330 iemAImpl_ ## a_Instr ## _u64, \
2331 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2332 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2333 IEM_MC_END(); \
2334 } \
2335 else \
2336 { \
2337 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2338 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2339 IEM_MC_ARG(uint32_t *, pDst, 0); \
2340 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2341 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2342 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2343 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2344 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2345 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2346 iemAImpl_ ## a_Instr ## _u32, \
2347 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2348 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2350 IEM_MC_END(); \
2351 } \
2352 } \
2353 else \
2354 { \
2355 /* \
2356 * Register, memory. \
2357 */ \
2358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2359 { \
2360 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2361 IEM_MC_ARG(uint64_t *, pDst, 0); \
2362 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2363 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2366 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2367 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2368 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2369 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2370 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2371 iemAImpl_ ## a_Instr ## _u64, \
2372 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2374 IEM_MC_END(); \
2375 } \
2376 else \
2377 { \
2378 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2379 IEM_MC_ARG(uint32_t *, pDst, 0); \
2380 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2381 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2384 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2385 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2386 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2387 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2388 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2389 iemAImpl_ ## a_Instr ## _u32, \
2390 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2391 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2392 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2393 IEM_MC_END(); \
2394 } \
2395 } \
2396 (void)0
2397
2398
2399/** Opcode VEX.F3.0F38 0xf5 (vex only). */
2400FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
2401{
2402 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2403 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
2404}
2405
2406
2407/** Opcode VEX.F2.0F38 0xf5 (vex only). */
2408FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
2409{
2410 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2411 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
2412}
2413
2414
2415/* Opcode VEX.0F38 0xf6 - invalid. */
2416/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
2417/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
2418
2419
2420/**
2421 * @opcode 0xf6
2422 * @oppfx 0xf2
2423 * @opflclass unchanged
2424 */
2425FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
2426{
2427 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2429 if (IEM_IS_MODRM_REG_MODE(bRm))
2430 {
2431 /*
2432 * Register, register.
2433 */
2434 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2435 {
2436 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2437 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2438 IEM_MC_ARG(uint64_t *, pDst1, 0);
2439 IEM_MC_ARG(uint64_t *, pDst2, 1);
2440 IEM_MC_ARG(uint64_t, uSrc1, 2);
2441 IEM_MC_ARG(uint64_t, uSrc2, 3);
2442 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
2443 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2444 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2445 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2446 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
2447 pDst1, pDst2, uSrc1, uSrc2);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2454 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2455 IEM_MC_ARG(uint32_t *, pDst1, 0);
2456 IEM_MC_ARG(uint32_t *, pDst2, 1);
2457 IEM_MC_ARG(uint32_t, uSrc1, 2);
2458 IEM_MC_ARG(uint32_t, uSrc2, 3);
2459 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
2460 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2461 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2463 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
2464 pDst1, pDst2, uSrc1, uSrc2);
2465 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
2466 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2467 IEM_MC_ADVANCE_RIP_AND_FINISH();
2468 IEM_MC_END();
2469 }
2470 }
2471 else
2472 {
2473 /*
2474 * Register, memory.
2475 */
2476 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2477 {
2478 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2479 IEM_MC_ARG(uint64_t *, pDst1, 0);
2480 IEM_MC_ARG(uint64_t *, pDst2, 1);
2481 IEM_MC_ARG(uint64_t, uSrc1, 2);
2482 IEM_MC_ARG(uint64_t, uSrc2, 3);
2483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2485 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2486 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2487 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
2488 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2489 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2490 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
2491 pDst1, pDst2, uSrc1, uSrc2);
2492 IEM_MC_ADVANCE_RIP_AND_FINISH();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2498 IEM_MC_ARG(uint32_t *, pDst1, 0);
2499 IEM_MC_ARG(uint32_t *, pDst2, 1);
2500 IEM_MC_ARG(uint32_t, uSrc1, 2);
2501 IEM_MC_ARG(uint32_t, uSrc2, 3);
2502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2504 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2505 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
2507 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2508 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2509 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
2510 pDst1, pDst2, uSrc1, uSrc2);
2511 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
2512 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2513 IEM_MC_ADVANCE_RIP_AND_FINISH();
2514 IEM_MC_END();
2515 }
2516 }
2517}
2518
2519
2520/**
2521 * @opcode 0xf7
2522 * @oppfx none
2523 * @opflmodify cf,pf,af,zf,sf,of
2524 * @opflclear cf,of
2525 * @opflundef pf,af,sf
2526 */
2527FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
2528{
2529 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2530 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
2531}
2532
2533
2534/**
2535 * @opcode 0xf7
2536 * @oppfx 0x66
2537 * @opflclass unchanged
2538 */
2539FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
2540{
2541 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2542 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2);
2543}
2544
2545
2546/**
2547 * @opcode 0xf7
2548 * @oppfx 0xf3
2549 * @opflclass unchanged
2550 */
2551FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
2552{
2553 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2554 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2);
2555}
2556
2557
2558/**
2559 * @opcode 0xf7
2560 * @oppfx 0xf2
2561 * @opflclass unchanged
2562 */
2563FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
2564{
2565 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2566 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2);
2567}
2568
2569/* Opcode VEX.0F38 0xf8 - invalid. */
2570/* Opcode VEX.66.0F38 0xf8 - invalid. */
2571/* Opcode VEX.F3.0F38 0xf8 - invalid. */
2572/* Opcode VEX.F2.0F38 0xf8 - invalid. */
2573
2574/* Opcode VEX.0F38 0xf9 - invalid. */
2575/* Opcode VEX.66.0F38 0xf9 - invalid. */
2576/* Opcode VEX.F3.0F38 0xf9 - invalid. */
2577/* Opcode VEX.F2.0F38 0xf9 - invalid. */
2578
2579/* Opcode VEX.0F38 0xfa - invalid. */
2580/* Opcode VEX.66.0F38 0xfa - invalid. */
2581/* Opcode VEX.F3.0F38 0xfa - invalid. */
2582/* Opcode VEX.F2.0F38 0xfa - invalid. */
2583
2584/* Opcode VEX.0F38 0xfb - invalid. */
2585/* Opcode VEX.66.0F38 0xfb - invalid. */
2586/* Opcode VEX.F3.0F38 0xfb - invalid. */
2587/* Opcode VEX.F2.0F38 0xfb - invalid. */
2588
2589/* Opcode VEX.0F38 0xfc - invalid. */
2590/* Opcode VEX.66.0F38 0xfc - invalid. */
2591/* Opcode VEX.F3.0F38 0xfc - invalid. */
2592/* Opcode VEX.F2.0F38 0xfc - invalid. */
2593
2594/* Opcode VEX.0F38 0xfd - invalid. */
2595/* Opcode VEX.66.0F38 0xfd - invalid. */
2596/* Opcode VEX.F3.0F38 0xfd - invalid. */
2597/* Opcode VEX.F2.0F38 0xfd - invalid. */
2598
2599/* Opcode VEX.0F38 0xfe - invalid. */
2600/* Opcode VEX.66.0F38 0xfe - invalid. */
2601/* Opcode VEX.F3.0F38 0xfe - invalid. */
2602/* Opcode VEX.F2.0F38 0xfe - invalid. */
2603
2604/* Opcode VEX.0F38 0xff - invalid. */
2605/* Opcode VEX.66.0F38 0xff - invalid. */
2606/* Opcode VEX.F3.0F38 0xff - invalid. */
2607/* Opcode VEX.F2.0F38 0xff - invalid. */
2608
2609
2610/**
2611 * VEX opcode map \#2.
2612 *
2613 * @sa g_apfnThreeByte0f38
2614 */
2615const PFNIEMOP g_apfnVexMap2[] =
2616{
2617 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2618 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2619 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2620 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2621 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2622 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2623 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2624 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2625 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2626 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2627 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2628 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2629 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2630 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2631 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2632 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2633 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2634
2635 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
2636 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
2637 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
2638 /* 0x13 */ iemOp_InvalidNeedRM, iemOp_vcvtph2ps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2639 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
2640 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
2641 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2642 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2643 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2644 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2645 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2646 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
2647 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2648 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2649 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2650 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
2651
2652 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2653 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2654 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2655 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2656 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2657 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2658 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
2659 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
2660 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2661 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2662 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2663 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2664 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2665 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2666 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2667 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2668
2669 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2670 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2671 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2672 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2673 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2674 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2675 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2676 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2677 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2678 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2679 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2680 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2681 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2682 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2683 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2684 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2685
2686 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2687 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2688 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
2689 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
2690 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
2691 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2692 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vpsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2693 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2694 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
2695 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
2696 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
2697 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
2698 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
2699 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
2700 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
2701 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
2702
2703 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
2704 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
2705 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
2706 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
2707 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
2708 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
2709 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
2710 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
2711 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2712 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2713 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2714 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
2715 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
2716 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
2717 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
2718 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
2719
2720 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
2721 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
2722 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
2723 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
2724 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
2725 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
2726 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
2727 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
2728 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
2729 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
2730 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
2731 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
2732 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
2733 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
2734 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
2735 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
2736
2737 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
2738 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
2739 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
2740 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
2741 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
2742 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
2743 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
2744 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
2745 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2746 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2747 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
2748 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
2749 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
2750 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
2751 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
2752 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
2753
2754 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
2755 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
2756 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
2757 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
2758 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
2759 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
2760 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
2761 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
2762 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
2763 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
2764 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
2765 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
2766 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2767 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
2768 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2769 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
2770
2771 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vpgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2772 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vpgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2773 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2774 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2775 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
2776 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
2777 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2778 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2779 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2780 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2781 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2782 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2783 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2784 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2785 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2786 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2787
2788 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2789 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2790 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2791 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2792 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2793 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2794 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2795 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2796 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2797 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2798 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2799 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2800 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2801 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2802 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2803 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2804
2805 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2806 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2807 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2808 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2809 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2810 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2811 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2812 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2813 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2814 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2815 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2816 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2817 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2818 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2819 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2820 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2821
2822 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2823 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2824 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2825 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2826 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2827 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2828 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2829 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2830 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2831 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2832 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
2833 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
2834 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
2835 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
2836 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2837 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2838
2839 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2840 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2841 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2842 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2843 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2844 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2845 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2846 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2847 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2848 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2849 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2850 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2851 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2852 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2853 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2854 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2855
2856 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2857 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2858 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2859 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2860 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2861 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2862 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2863 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2864 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2865 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2866 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2867 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2868 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2869 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2870 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2871 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2872
2873 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2874 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2875 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2876 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2877 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2878 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
2879 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
2880 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
2881 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2882 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2883 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2884 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2885 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2886 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2887 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2888 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2889};
2890AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
2891
2892/** @} */
2893
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use