VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 96860

Last change on this file since 96860 was 96454, checked in by vboxsync, 21 months ago

VMM/IEM: Implement [v]pblendvb/[v]blendvps/[v]blendvpd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 79.7 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 96454 2022-08-24 12:53:47Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x38
33 * @{
34 */
35
36FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported); /* in IEMAllInstructionsTwoByteOf.cpp.h */
37
38
39/**
40 * Common worker for SSSE3 instructions on the forms:
41 * pxxx xmm1, xmm2/mem128
42 *
43 * Proper alignment of the 128-bit operand is enforced.
44 * Exceptions type 4. SSSE3 cpuid checks.
45 *
46 * @sa iemOpCommonSse2_FullFull_To_Full
47 */
48FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
49{
50 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
51 if (IEM_IS_MODRM_REG_MODE(bRm))
52 {
53 /*
54 * Register, register.
55 */
56 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
57 IEM_MC_BEGIN(2, 0);
58 IEM_MC_ARG(PRTUINT128U, puDst, 0);
59 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
60 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
61 IEM_MC_PREPARE_SSE_USAGE();
62 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
63 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(PRTUINT128U, puDst, 0);
75 IEM_MC_LOCAL(RTUINT128U, uSrc);
76 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_SSE_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
87
88 IEM_MC_ADVANCE_RIP();
89 IEM_MC_END();
90 }
91 return VINF_SUCCESS;
92}
93
94
95/**
96 * Common worker for SSE4.1 instructions on the forms:
97 * pxxx xmm1, xmm2/mem128
98 *
99 * Proper alignment of the 128-bit operand is enforced.
100 * Exceptions type 4. SSE4.1 cpuid checks.
101 *
102 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
103 * iemOpCommonSse42_FullFull_To_Full
104 */
105FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
114 IEM_MC_BEGIN(2, 0);
115 IEM_MC_ARG(PRTUINT128U, puDst, 0);
116 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
117 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
118 IEM_MC_PREPARE_SSE_USAGE();
119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
120 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
121 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 }
125 else
126 {
127 /*
128 * Register, memory.
129 */
130 IEM_MC_BEGIN(2, 2);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_LOCAL(RTUINT128U, uSrc);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
135
136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
138 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
139 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
140
141 IEM_MC_PREPARE_SSE_USAGE();
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
144
145 IEM_MC_ADVANCE_RIP();
146 IEM_MC_END();
147 }
148 return VINF_SUCCESS;
149}
150
151
152/**
153 * Common worker for SSE4.1 instructions on the forms:
154 * pxxx xmm1, xmm2/mem128
155 *
156 * Proper alignment of the 128-bit operand is enforced.
157 * Exceptions type 4. SSE4.1 cpuid checks.
158 *
159 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
160 * takes no FXSAVE state, just the operands.
161 *
162 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
163 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
164 */
165FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
166{
167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
168 if (IEM_IS_MODRM_REG_MODE(bRm))
169 {
170 /*
171 * Register, register.
172 */
173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
174 IEM_MC_BEGIN(2, 0);
175 IEM_MC_ARG(PRTUINT128U, puDst, 0);
176 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
177 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
178 IEM_MC_PREPARE_SSE_USAGE();
179 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
180 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
181 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(2, 2);
191 IEM_MC_ARG(PRTUINT128U, puDst, 0);
192 IEM_MC_LOCAL(RTUINT128U, uSrc);
193 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
195
196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
198 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
199 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200
201 IEM_MC_PREPARE_SSE_USAGE();
202 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
204
205 IEM_MC_ADVANCE_RIP();
206 IEM_MC_END();
207 }
208 return VINF_SUCCESS;
209}
210
211
212/**
213 * Common worker for SSE4.2 instructions on the forms:
214 * pxxx xmm1, xmm2/mem128
215 *
216 * Proper alignment of the 128-bit operand is enforced.
217 * Exceptions type 4. SSE4.2 cpuid checks.
218 *
219 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
220 * iemOpCommonSse41_FullFull_To_Full
221 */
222FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
223{
224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
225 if (IEM_IS_MODRM_REG_MODE(bRm))
226 {
227 /*
228 * Register, register.
229 */
230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
231 IEM_MC_BEGIN(2, 0);
232 IEM_MC_ARG(PRTUINT128U, puDst, 0);
233 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
234 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
235 IEM_MC_PREPARE_SSE_USAGE();
236 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
237 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
238 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 }
242 else
243 {
244 /*
245 * Register, memory.
246 */
247 IEM_MC_BEGIN(2, 2);
248 IEM_MC_ARG(PRTUINT128U, puDst, 0);
249 IEM_MC_LOCAL(RTUINT128U, uSrc);
250 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
252
253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
255 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
256 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
257
258 IEM_MC_PREPARE_SSE_USAGE();
259 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
260 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
261
262 IEM_MC_ADVANCE_RIP();
263 IEM_MC_END();
264 }
265 return VINF_SUCCESS;
266}
267
268
269/** Opcode 0x0f 0x38 0x00. */
270FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
271{
272 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
273 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
274 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback),
275 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
276}
277
278
279/** Opcode 0x66 0x0f 0x38 0x00. */
280FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
281{
282 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
283 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
284 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
285
286}
287
288
289/* Opcode 0x0f 0x38 0x01. */
290FNIEMOP_DEF(iemOp_phaddw_Pq_Qq)
291{
292 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
293 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
294 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u64,&iemAImpl_phaddw_u64_fallback),
295 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
296}
297
298
299/** Opcode 0x66 0x0f 0x38 0x01. */
300FNIEMOP_DEF(iemOp_phaddw_Vx_Wx)
301{
302 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
303 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
304 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback));
305
306}
307
308
309/** Opcode 0x0f 0x38 0x02. */
310FNIEMOP_DEF(iemOp_phaddd_Pq_Qq)
311{
312 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
313 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
314 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u64,&iemAImpl_phaddd_u64_fallback),
315 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
316}
317
318
319/** Opcode 0x66 0x0f 0x38 0x02. */
320FNIEMOP_DEF(iemOp_phaddd_Vx_Wx)
321{
322 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
323 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
324 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback));
325
326}
327
328
329/** Opcode 0x0f 0x38 0x03. */
330FNIEMOP_DEF(iemOp_phaddsw_Pq_Qq)
331{
332 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
333 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
334 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u64,&iemAImpl_phaddsw_u64_fallback),
335 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
336}
337
338
339/** Opcode 0x66 0x0f 0x38 0x03. */
340FNIEMOP_DEF(iemOp_phaddsw_Vx_Wx)
341{
342 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
343 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
344 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback));
345
346}
347
348
349/** Opcode 0x0f 0x38 0x04. */
350FNIEMOP_DEF(iemOp_pmaddubsw_Pq_Qq)
351{
352 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
353 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
354 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u64, &iemAImpl_pmaddubsw_u64_fallback),
355 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
356}
357
358
359/** Opcode 0x66 0x0f 0x38 0x04. */
360FNIEMOP_DEF(iemOp_pmaddubsw_Vx_Wx)
361{
362 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
363 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
364 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback));
365
366}
367
368
369/** Opcode 0x0f 0x38 0x05. */
370FNIEMOP_DEF(iemOp_phsubw_Pq_Qq)
371{
372 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
373 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
374 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u64,&iemAImpl_phsubw_u64_fallback),
375 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
376}
377
378
379/** Opcode 0x66 0x0f 0x38 0x05. */
380FNIEMOP_DEF(iemOp_phsubw_Vx_Wx)
381{
382 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
383 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
384 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback));
385
386}
387
388
389/** Opcode 0x0f 0x38 0x06. */
390FNIEMOP_DEF(iemOp_phsubd_Pq_Qq)
391{
392 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
393 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
394 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u64,&iemAImpl_phsubd_u64_fallback),
395 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
396}
397
398
399
400/** Opcode 0x66 0x0f 0x38 0x06. */
401FNIEMOP_DEF(iemOp_phsubd_Vx_Wx)
402{
403 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
404 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
405 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback));
406
407}
408
409
410/** Opcode 0x0f 0x38 0x07. */
411FNIEMOP_DEF(iemOp_phsubsw_Pq_Qq)
412{
413 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
414 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
415 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u64,&iemAImpl_phsubsw_u64_fallback),
416 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
417}
418
419
420/** Opcode 0x66 0x0f 0x38 0x07. */
421FNIEMOP_DEF(iemOp_phsubsw_Vx_Wx)
422{
423 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
424 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
425 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback));
426
427}
428
429
430/** Opcode 0x0f 0x38 0x08. */
431FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
432{
433 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
434 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
435 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback),
436 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
437}
438
439
440/** Opcode 0x66 0x0f 0x38 0x08. */
441FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
442{
443 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
444 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
445 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
446
447}
448
449
450/** Opcode 0x0f 0x38 0x09. */
451FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
452{
453 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
454 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
455 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback),
456 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
457}
458
459
460/** Opcode 0x66 0x0f 0x38 0x09. */
461FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
462{
463 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
464 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
465 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
466
467}
468
469
470/** Opcode 0x0f 0x38 0x0a. */
471FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
472{
473 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
474 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
475 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback),
476 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
477}
478
479
480/** Opcode 0x66 0x0f 0x38 0x0a. */
481FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
482{
483 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
484 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
485 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
486
487}
488
489
490/** Opcode 0x0f 0x38 0x0b. */
491FNIEMOP_DEF(iemOp_pmulhrsw_Pq_Qq)
492{
493 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
494 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
495 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u64, &iemAImpl_pmulhrsw_u64_fallback),
496 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
497}
498
499
500/** Opcode 0x66 0x0f 0x38 0x0b. */
501FNIEMOP_DEF(iemOp_pmulhrsw_Vx_Wx)
502{
503 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
504 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
505 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback));
506
507}
508
509
510/* Opcode 0x0f 0x38 0x0c - invalid. */
511/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
512/* Opcode 0x0f 0x38 0x0d - invalid. */
513/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
514/* Opcode 0x0f 0x38 0x0e - invalid. */
515/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
516/* Opcode 0x0f 0x38 0x0f - invalid. */
517/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
518
519
520/* Opcode 0x0f 0x38 0x10 - invalid */
521
522
523/** Body for the *blend* instructions. */
524#define IEMOP_BODY_P_BLEND_X(a_Instr) \
525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
526 if (IEM_IS_MODRM_REG_MODE(bRm)) \
527 { \
528 /* \
529 * Register, register. \
530 */ \
531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
532 IEM_MC_BEGIN(3, 0); \
533 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
534 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
535 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
536 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
537 IEM_MC_PREPARE_SSE_USAGE(); \
538 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
539 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
540 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
541 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
542 iemAImpl_ ## a_Instr ## _u128, \
543 iemAImpl_ ## a_Instr ## _u128_fallback), \
544 puDst, puSrc, puMask); \
545 IEM_MC_ADVANCE_RIP(); \
546 IEM_MC_END(); \
547 } \
548 else \
549 { \
550 /* \
551 * Register, memory. \
552 */ \
553 IEM_MC_BEGIN(3, 2); \
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
555 IEM_MC_LOCAL(RTUINT128U, uSrc); \
556 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
557 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
558 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
561 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
562 IEM_MC_PREPARE_SSE_USAGE(); \
563 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
564 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
565 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
566 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
567 iemAImpl_ ## a_Instr ## _u128, \
568 iemAImpl_ ## a_Instr ## _u128_fallback), \
569 puDst, puSrc, puMask); \
570 IEM_MC_ADVANCE_RIP(); \
571 IEM_MC_END(); \
572 } \
573 return VINF_SUCCESS
574
575/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
576FNIEMOP_DEF(iemOp_pblendvb_Vdq_Wdq)
577{
578 IEMOP_MNEMONIC2(RM, PBLENDVB, pblendvb, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
579 IEMOP_BODY_P_BLEND_X(pblendvb);
580}
581
582
583/* Opcode 0x0f 0x38 0x11 - invalid */
584/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
585/* Opcode 0x0f 0x38 0x12 - invalid */
586/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
587/* Opcode 0x0f 0x38 0x13 - invalid */
588/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
589/* Opcode 0x0f 0x38 0x14 - invalid */
590
591
592/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
593FNIEMOP_DEF(iemOp_blendvps_Vdq_Wdq)
594{
595 IEMOP_MNEMONIC2(RM, BLENDVPS, blendvps, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
596 IEMOP_BODY_P_BLEND_X(blendvps);
597}
598
599
600/* Opcode 0x0f 0x38 0x15 - invalid */
601
602
603/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
604FNIEMOP_DEF(iemOp_blendvpd_Vdq_Wdq)
605{
606 IEMOP_MNEMONIC2(RM, BLENDVPD, blendvpd, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
607 IEMOP_BODY_P_BLEND_X(blendvpd);
608}
609
610
611/* Opcode 0x0f 0x38 0x16 - invalid */
612/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
613/* Opcode 0x0f 0x38 0x17 - invalid */
614
615
616/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
617FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
618{
619 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
621 if (IEM_IS_MODRM_REG_MODE(bRm))
622 {
623 /*
624 * Register, register.
625 */
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(3, 0);
628 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
629 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
631 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
632 IEM_MC_PREPARE_SSE_USAGE();
633 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
634 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
635 IEM_MC_REF_EFLAGS(pEFlags);
636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
637 IEM_MC_ADVANCE_RIP();
638 IEM_MC_END();
639 }
640 else
641 {
642 /*
643 * Register, memory.
644 */
645 IEM_MC_BEGIN(3, 2);
646 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
647 IEM_MC_LOCAL(RTUINT128U, uSrc2);
648 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
649 IEM_MC_ARG(uint32_t *, pEFlags, 2);
650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
651
652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
655 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
656
657 IEM_MC_PREPARE_SSE_USAGE();
658 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
659 IEM_MC_REF_EFLAGS(pEFlags);
660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
661
662 IEM_MC_ADVANCE_RIP();
663 IEM_MC_END();
664 }
665 return VINF_SUCCESS;
666}
667
668
669/* Opcode 0x0f 0x38 0x18 - invalid */
670/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
671/* Opcode 0x0f 0x38 0x19 - invalid */
672/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
673/* Opcode 0x0f 0x38 0x1a - invalid */
674/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
675/* Opcode 0x0f 0x38 0x1b - invalid */
676/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
677
678
679/** Opcode 0x0f 0x38 0x1c. */
680FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
681{
682 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
683 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
684 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback),
685 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
686}
687
688
689/** Opcode 0x66 0x0f 0x38 0x1c. */
690FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
691{
692 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
693 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
694 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
695
696}
697
698
699/** Opcode 0x0f 0x38 0x1d. */
700FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
701{
702 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
703 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
704 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback),
705 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
706}
707
708
709/** Opcode 0x66 0x0f 0x38 0x1d. */
710FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
711{
712 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
713 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
714 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
715
716}
717
718
719/** Opcode 0x0f 0x38 0x1e. */
720FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
721{
722 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
724 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback),
725 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
726}
727
728
729/** Opcode 0x66 0x0f 0x38 0x1e. */
730FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
731{
732 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
733 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
734 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
735
736}
737
738
739/* Opcode 0x0f 0x38 0x1f - invalid */
740/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
741
742
743/** Body for the pmov{s,z}x* instructions. */
744#define IEMOP_BODY_PMOV_S_Z(a_Instr, a_SrcWidth) \
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
746 if (IEM_IS_MODRM_REG_MODE(bRm)) \
747 { \
748 /* \
749 * Register, register. \
750 */ \
751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
752 IEM_MC_BEGIN(2, 0); \
753 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
754 IEM_MC_ARG(uint64_t, uSrc, 1); \
755 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
756 IEM_MC_PREPARE_SSE_USAGE(); \
757 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
758 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
759 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
760 iemAImpl_ ## a_Instr ## _u128, \
761 iemAImpl_v ## a_Instr ## _u128_fallback), \
762 puDst, uSrc); \
763 IEM_MC_ADVANCE_RIP(); \
764 IEM_MC_END(); \
765 } \
766 else \
767 { \
768 /* \
769 * Register, memory. \
770 */ \
771 IEM_MC_BEGIN(2, 2); \
772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
773 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
774 IEM_MC_ARG(uint ## a_SrcWidth ## _t, uSrc, 1); \
775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
777 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
778 IEM_MC_PREPARE_SSE_USAGE(); \
779 IEM_MC_FETCH_MEM_U## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
780 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
781 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
782 iemAImpl_ ## a_Instr ## _u128, \
783 iemAImpl_v ## a_Instr ## _u128_fallback), \
784 puDst, uSrc); \
785 IEM_MC_ADVANCE_RIP(); \
786 IEM_MC_END(); \
787 } \
788 return VINF_SUCCESS
789
790
791/** Opcode 0x66 0x0f 0x38 0x20. */
792FNIEMOP_DEF(iemOp_pmovsxbw_Vx_UxMq)
793{
794 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
795 IEMOP_MNEMONIC2(RM, PMOVSXBW, pmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_PMOV_S_Z(pmovsxbw, 64);
797}
798
799
800/** Opcode 0x66 0x0f 0x38 0x21. */
801FNIEMOP_DEF(iemOp_pmovsxbd_Vx_UxMd)
802{
803 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
804 IEMOP_MNEMONIC2(RM, PMOVSXBD, pmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
805 IEMOP_BODY_PMOV_S_Z(pmovsxbd, 32);
806}
807
808
809/** Opcode 0x66 0x0f 0x38 0x22. */
810FNIEMOP_DEF(iemOp_pmovsxbq_Vx_UxMw)
811{
812 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
813 IEMOP_MNEMONIC2(RM, PMOVSXBQ, pmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
814 IEMOP_BODY_PMOV_S_Z(pmovsxbq, 16);
815}
816
817
818/** Opcode 0x66 0x0f 0x38 0x23. */
819FNIEMOP_DEF(iemOp_pmovsxwd_Vx_UxMq)
820{
821 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
822 IEMOP_MNEMONIC2(RM, PMOVSXWD, pmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
823 IEMOP_BODY_PMOV_S_Z(pmovsxwd, 64);
824}
825
826
827/** Opcode 0x66 0x0f 0x38 0x24. */
828FNIEMOP_DEF(iemOp_pmovsxwq_Vx_UxMd)
829{
830 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
831 IEMOP_MNEMONIC2(RM, PMOVSXWQ, pmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
832 IEMOP_BODY_PMOV_S_Z(pmovsxwq, 32);
833}
834
835
836/** Opcode 0x66 0x0f 0x38 0x25. */
837FNIEMOP_DEF(iemOp_pmovsxdq_Vx_UxMq)
838{
839 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
840 IEMOP_MNEMONIC2(RM, PMOVSXDQ, pmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
841 IEMOP_BODY_PMOV_S_Z(pmovsxdq, 64);
842}
843
844
845/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
846/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
847
848
849/** Opcode 0x66 0x0f 0x38 0x28. */
850FNIEMOP_DEF(iemOp_pmuldq_Vx_Wx)
851{
852 IEMOP_MNEMONIC2(RM, PMULDQ, pmuldq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
853 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
854 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback));
855}
856
857
858/** Opcode 0x66 0x0f 0x38 0x29. */
859FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
860{
861 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
862 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
863 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
864}
865
866
867/**
868 * @opcode 0x2a
869 * @opcodesub !11 mr/reg
870 * @oppfx 0x66
871 * @opcpuid sse4.1
872 * @opgroup og_sse41_cachect
873 * @opxcpttype 1
874 * @optest op1=-1 op2=2 -> op1=2
875 * @optest op1=0 op2=-42 -> op1=-42
876 */
877FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
878{
879 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
881 if (IEM_IS_MODRM_MEM_MODE(bRm))
882 {
883 /* Register, memory. */
884 IEM_MC_BEGIN(0, 2);
885 IEM_MC_LOCAL(RTUINT128U, uSrc);
886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
887
888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
890 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
891 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
892
893 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
894 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
895
896 IEM_MC_ADVANCE_RIP();
897 IEM_MC_END();
898 return VINF_SUCCESS;
899 }
900
901 /**
902 * @opdone
903 * @opmnemonic ud660f382areg
904 * @opcode 0x2a
905 * @opcodesub 11 mr/reg
906 * @oppfx 0x66
907 * @opunused immediate
908 * @opcpuid sse
909 * @optest ->
910 */
911 return IEMOP_RAISE_INVALID_OPCODE();
912}
913
914
915/** Opcode 0x66 0x0f 0x38 0x2b. */
916FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
917{
918 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
919 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
920}
921
922
923/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
924/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
925/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
926/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
927
928/** Opcode 0x66 0x0f 0x38 0x30. */
929FNIEMOP_DEF(iemOp_pmovzxbw_Vx_UxMq)
930{
931 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
932 IEMOP_MNEMONIC2(RM, PMOVZXBW, pmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
933 IEMOP_BODY_PMOV_S_Z(pmovzxbw, 64);
934}
935
936
937/** Opcode 0x66 0x0f 0x38 0x31. */
938FNIEMOP_DEF(iemOp_pmovzxbd_Vx_UxMd)
939{
940 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
941 IEMOP_MNEMONIC2(RM, PMOVZXBD, pmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
942 IEMOP_BODY_PMOV_S_Z(pmovzxbd, 32);
943}
944
945
946/** Opcode 0x66 0x0f 0x38 0x32. */
947FNIEMOP_DEF(iemOp_pmovzxbq_Vx_UxMw)
948{
949 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
950 IEMOP_MNEMONIC2(RM, PMOVZXBQ, pmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
951 IEMOP_BODY_PMOV_S_Z(pmovzxbq, 16);
952}
953
954
955/** Opcode 0x66 0x0f 0x38 0x33. */
956FNIEMOP_DEF(iemOp_pmovzxwd_Vx_UxMq)
957{
958 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
959 IEMOP_MNEMONIC2(RM, PMOVZXWD, pmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
960 IEMOP_BODY_PMOV_S_Z(pmovzxwd, 64);
961}
962
963
964/** Opcode 0x66 0x0f 0x38 0x34. */
965FNIEMOP_DEF(iemOp_pmovzxwq_Vx_UxMd)
966{
967 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
968 IEMOP_MNEMONIC2(RM, PMOVZXWQ, pmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
969 IEMOP_BODY_PMOV_S_Z(pmovzxwq, 32);
970}
971
972
973/** Opcode 0x66 0x0f 0x38 0x35. */
974FNIEMOP_DEF(iemOp_pmovzxdq_Vx_UxMq)
975{
976 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
977 IEMOP_MNEMONIC2(RM, PMOVZXDQ, pmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
978 IEMOP_BODY_PMOV_S_Z(pmovzxdq, 64);
979}
980
981
982/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
983
984
985/** Opcode 0x66 0x0f 0x38 0x37. */
986FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
987{
988 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
989 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
990 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
991}
992
993
994/** Opcode 0x66 0x0f 0x38 0x38. */
995FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
996{
997 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
998 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
999 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
1000}
1001
1002
1003/** Opcode 0x66 0x0f 0x38 0x39. */
1004FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
1005{
1006 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1007 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1008 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
1009}
1010
1011
1012/** Opcode 0x66 0x0f 0x38 0x3a. */
1013FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
1014{
1015 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1016 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1017 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
1018}
1019
1020
1021/** Opcode 0x66 0x0f 0x38 0x3b. */
1022FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
1023{
1024 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1025 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1026 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
1027}
1028
1029
1030/** Opcode 0x66 0x0f 0x38 0x3c. */
1031FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
1032{
1033 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1034 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1035 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
1036}
1037
1038
1039/** Opcode 0x66 0x0f 0x38 0x3d. */
1040FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
1041{
1042 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1043 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1044 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
1045}
1046
1047
1048/** Opcode 0x66 0x0f 0x38 0x3e. */
1049FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
1050{
1051 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1052 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1053 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
1054}
1055
1056
1057/** Opcode 0x66 0x0f 0x38 0x3f. */
1058FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
1059{
1060 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1061 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1062 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
1063}
1064
1065
1066/** Opcode 0x66 0x0f 0x38 0x40. */
1067FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
1068{
1069 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1070 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1071 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
1072}
1073
1074
1075/** Opcode 0x66 0x0f 0x38 0x41. */
1076FNIEMOP_DEF(iemOp_phminposuw_Vdq_Wdq)
1077{
1078 IEMOP_MNEMONIC2(RM, PHMINPOSUW, phminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1079 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
1080 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback));
1081}
1082
1083
1084/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
1085/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
1086/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
1087/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
1088/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
1089/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
1090/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
1091/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
1092/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
1093/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
1094/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
1095/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
1096/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
1097/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
1098
1099/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
1100/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
1101/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
1102/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
1103/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
1104/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
1105/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
1106/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
1107/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
1108/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
1109/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
1110/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
1111/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
1112/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
1113/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
1114/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
1115
1116/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
1117/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
1118/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
1119/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
1120/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
1121/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
1122/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
1123/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
1124/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
1125/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
1126/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
1127/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
1128/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
1129/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
1130/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
1131/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
1132
1133/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
1134/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
1135/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
1136/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
1137/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
1138/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
1139/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
1140/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
1141/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
1142/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
1143/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
1144/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
1145/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
1146/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
1147/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
1148/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
1149
1150/** Opcode 0x66 0x0f 0x38 0x80. */
1151#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
1152FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
1153{
1154 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
1155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1156 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1157 IEMOP_HLP_VMX_INSTR("invept", kVmxVDiag_Invept);
1158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1159 if (IEM_IS_MODRM_MEM_MODE(bRm))
1160 {
1161 /* Register, memory. */
1162 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1163 {
1164 IEM_MC_BEGIN(3, 0);
1165 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1166 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1167 IEM_MC_ARG(uint64_t, uInveptType, 2);
1168 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1170 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1171 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1172 IEM_MC_END();
1173 }
1174 else
1175 {
1176 IEM_MC_BEGIN(3, 0);
1177 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1178 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1179 IEM_MC_ARG(uint32_t, uInveptType, 2);
1180 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1182 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1183 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1184 IEM_MC_END();
1185 }
1186 }
1187 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
1188 return IEMOP_RAISE_INVALID_OPCODE();
1189}
1190#else
1191FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
1192#endif
1193
1194/** Opcode 0x66 0x0f 0x38 0x81. */
1195#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1196FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
1197{
1198 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
1199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1200 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1201 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1203 if (IEM_IS_MODRM_MEM_MODE(bRm))
1204 {
1205 /* Register, memory. */
1206 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1207 {
1208 IEM_MC_BEGIN(3, 0);
1209 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1210 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1211 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
1212 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1214 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1215 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1216 IEM_MC_END();
1217 }
1218 else
1219 {
1220 IEM_MC_BEGIN(3, 0);
1221 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1222 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1223 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
1224 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1226 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1227 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1228 IEM_MC_END();
1229 }
1230 }
1231 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
1232 return IEMOP_RAISE_INVALID_OPCODE();
1233}
1234#else
1235FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
1236#endif
1237
1238/** Opcode 0x66 0x0f 0x38 0x82. */
1239FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
1240{
1241 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
1242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1244 if (IEM_IS_MODRM_MEM_MODE(bRm))
1245 {
1246 /* Register, memory. */
1247 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1248 {
1249 IEM_MC_BEGIN(3, 0);
1250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1251 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1252 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
1253 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1255 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1256 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(3, 0);
1262 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1263 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1264 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
1265 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1267 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1268 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1269 IEM_MC_END();
1270 }
1271 }
1272 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
1273 return IEMOP_RAISE_INVALID_OPCODE();
1274}
1275
1276
1277/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
1278/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
1279/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
1280/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
1281/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
1282/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
1283/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
1284/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
1285/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
1286/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
1287/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
1288/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
1289/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
1290
1291/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
1292/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
1293/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
1294/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
1295/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
1296/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
1297/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
1298/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
1299/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
1300/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
1301/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
1302/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
1303/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
1304/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
1305/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
1306/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
1307
1308/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
1309/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
1310/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
1311/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
1312/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
1313/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
1314/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
1315/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
1316/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
1317/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
1318/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
1319/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
1320/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
1321/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
1322/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
1323/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
1324
1325/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
1326/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
1327/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
1328/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
1329/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
1330/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
1331/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
1332/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
1333/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
1334/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
1335/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
1336/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
1337/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
1338/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
1339/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
1340/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
1341
1342/* Opcode 0x0f 0x38 0xc0 - invalid. */
1343/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
1344/* Opcode 0x0f 0x38 0xc1 - invalid. */
1345/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
1346/* Opcode 0x0f 0x38 0xc2 - invalid. */
1347/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
1348/* Opcode 0x0f 0x38 0xc3 - invalid. */
1349/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
1350/* Opcode 0x0f 0x38 0xc4 - invalid. */
1351/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
1352/* Opcode 0x0f 0x38 0xc5 - invalid. */
1353/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
1354/* Opcode 0x0f 0x38 0xc6 - invalid. */
1355/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
1356/* Opcode 0x0f 0x38 0xc7 - invalid. */
1357/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
1358/** Opcode 0x0f 0x38 0xc8. */
1359FNIEMOP_STUB(iemOp_sha1nexte_Vdq_Wdq);
1360/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1361/** Opcode 0x0f 0x38 0xc9. */
1362FNIEMOP_STUB(iemOp_sha1msg1_Vdq_Wdq);
1363/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1364/** Opcode 0x0f 0x38 0xca. */
1365FNIEMOP_STUB(iemOp_sha1msg2_Vdq_Wdq);
1366/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1367/** Opcode 0x0f 0x38 0xcb. */
1368FNIEMOP_STUB(iemOp_sha256rnds2_Vdq_Wdq);
1369/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1370/** Opcode 0x0f 0x38 0xcc. */
1371FNIEMOP_STUB(iemOp_sha256msg1_Vdq_Wdq);
1372/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1373/** Opcode 0x0f 0x38 0xcd. */
1374FNIEMOP_STUB(iemOp_sha256msg2_Vdq_Wdq);
1375/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1376/* Opcode 0x0f 0x38 0xce - invalid. */
1377/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1378/* Opcode 0x0f 0x38 0xcf - invalid. */
1379/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1380
1381/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1382/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1383/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1384/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1385/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1386/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1387/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1388/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1389/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1390/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1391/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1392/** Opcode 0x66 0x0f 0x38 0xdb. */
1393FNIEMOP_STUB(iemOp_aesimc_Vdq_Wdq);
1394/** Opcode 0x66 0x0f 0x38 0xdc. */
1395FNIEMOP_STUB(iemOp_aesenc_Vdq_Wdq);
1396/** Opcode 0x66 0x0f 0x38 0xdd. */
1397FNIEMOP_STUB(iemOp_aesenclast_Vdq_Wdq);
1398/** Opcode 0x66 0x0f 0x38 0xde. */
1399FNIEMOP_STUB(iemOp_aesdec_Vdq_Wdq);
1400/** Opcode 0x66 0x0f 0x38 0xdf. */
1401FNIEMOP_STUB(iemOp_aesdeclast_Vdq_Wdq);
1402
1403/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1404/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1405/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1406/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1407/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1408/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1409/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1410/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1411/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1412/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1413/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1414/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1415/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1416/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1417/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1418/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1419
1420
1421/** Opcode 0x0f 0x38 0xf0. */
1422FNIEMOP_STUB(iemOp_movbe_Gy_My);
1423/** Opcode 0x66 0x0f 0x38 0xf0. */
1424FNIEMOP_STUB(iemOp_movbe_Gw_Mw);
1425/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1426
1427
1428/** Opcode 0xf2 0x0f 0x38 0xf0. */
1429FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1430{
1431 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1432 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1433 return iemOp_InvalidNeedRM(pVCpu);
1434
1435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1436 if (IEM_IS_MODRM_REG_MODE(bRm))
1437 {
1438 /*
1439 * Register, register.
1440 */
1441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1442 IEM_MC_BEGIN(2, 0);
1443 IEM_MC_ARG(uint32_t *, puDst, 0);
1444 IEM_MC_ARG(uint8_t, uSrc, 1);
1445 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1446 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1447 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1448 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1449 IEM_MC_ADVANCE_RIP();
1450 IEM_MC_END();
1451 }
1452 else
1453 {
1454 /*
1455 * Register, memory.
1456 */
1457 IEM_MC_BEGIN(2, 1);
1458 IEM_MC_ARG(uint32_t *, puDst, 0);
1459 IEM_MC_ARG(uint8_t, uSrc, 1);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1465
1466 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1467 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1468 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/** Opcode 0x0f 0x38 0xf1. */
1478FNIEMOP_STUB(iemOp_movbe_My_Gy);
1479/** Opcode 0x66 0x0f 0x38 0xf1. */
1480FNIEMOP_STUB(iemOp_movbe_Mw_Gw);
1481/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1482
1483
1484/** Opcode 0xf2 0x0f 0x38 0xf1. */
1485FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1486{
1487 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1488 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1489 return iemOp_InvalidNeedRM(pVCpu);
1490
1491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1492 if (IEM_IS_MODRM_REG_MODE(bRm))
1493 {
1494 /*
1495 * Register, register.
1496 */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 switch (pVCpu->iem.s.enmEffOpSize)
1499 {
1500 case IEMMODE_16BIT:
1501 IEM_MC_BEGIN(2, 0);
1502 IEM_MC_ARG(uint32_t *, puDst, 0);
1503 IEM_MC_ARG(uint16_t, uSrc, 1);
1504 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1505 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1506 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1507 puDst, uSrc);
1508 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 return VINF_SUCCESS;
1512
1513 case IEMMODE_32BIT:
1514 IEM_MC_BEGIN(2, 0);
1515 IEM_MC_ARG(uint32_t *, puDst, 0);
1516 IEM_MC_ARG(uint32_t, uSrc, 1);
1517 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1518 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1519 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1520 puDst, uSrc);
1521 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 return VINF_SUCCESS;
1525
1526 case IEMMODE_64BIT:
1527 IEM_MC_BEGIN(2, 0);
1528 IEM_MC_ARG(uint32_t *, puDst, 0);
1529 IEM_MC_ARG(uint64_t, uSrc, 1);
1530 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1531 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1532 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1533 puDst, uSrc);
1534 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 return VINF_SUCCESS;
1538
1539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1540 }
1541 }
1542 else
1543 {
1544 /*
1545 * Register, memory.
1546 */
1547 switch (pVCpu->iem.s.enmEffOpSize)
1548 {
1549 case IEMMODE_16BIT:
1550 IEM_MC_BEGIN(2, 1);
1551 IEM_MC_ARG(uint32_t *, puDst, 0);
1552 IEM_MC_ARG(uint16_t, uSrc, 1);
1553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1554
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1558
1559 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1560 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1561 puDst, uSrc);
1562 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1563
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 return VINF_SUCCESS;
1567
1568 case IEMMODE_32BIT:
1569 IEM_MC_BEGIN(2, 1);
1570 IEM_MC_ARG(uint32_t *, puDst, 0);
1571 IEM_MC_ARG(uint32_t, uSrc, 1);
1572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1573
1574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1576 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1577
1578 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1579 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1580 puDst, uSrc);
1581 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1582
1583 IEM_MC_ADVANCE_RIP();
1584 IEM_MC_END();
1585 return VINF_SUCCESS;
1586
1587 case IEMMODE_64BIT:
1588 IEM_MC_BEGIN(2, 1);
1589 IEM_MC_ARG(uint32_t *, puDst, 0);
1590 IEM_MC_ARG(uint64_t, uSrc, 1);
1591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1592
1593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1595 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1596
1597 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1598 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1599 puDst, uSrc);
1600 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1601
1602 IEM_MC_ADVANCE_RIP();
1603 IEM_MC_END();
1604 return VINF_SUCCESS;
1605
1606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1607 }
1608 }
1609}
1610
1611
1612/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
1613/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
1614/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
1615/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
1616
1617/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1618/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1619/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1620/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1621
1622/* Opcode 0x0f 0x38 0xf4 - invalid. */
1623/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
1624/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
1625/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
1626
1627/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
1628/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
1629/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
1630/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
1631
1632/* Opcode 0x0f 0x38 0xf6 - invalid. */
1633/** Opcode 0x66 0x0f 0x38 0xf6. */
1634FNIEMOP_STUB(iemOp_adcx_Gy_Ey);
1635/** Opcode 0xf3 0x0f 0x38 0xf6. */
1636FNIEMOP_STUB(iemOp_adox_Gy_Ey);
1637/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
1638
1639/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
1640/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
1641/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
1642/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
1643
1644/* Opcode 0x0f 0x38 0xf8 - invalid. */
1645/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
1646/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
1647/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
1648
1649/* Opcode 0x0f 0x38 0xf9 - invalid. */
1650/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
1651/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
1652/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
1653
1654/* Opcode 0x0f 0x38 0xfa - invalid. */
1655/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
1656/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
1657/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
1658
1659/* Opcode 0x0f 0x38 0xfb - invalid. */
1660/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
1661/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
1662/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
1663
1664/* Opcode 0x0f 0x38 0xfc - invalid. */
1665/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
1666/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
1667/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
1668
1669/* Opcode 0x0f 0x38 0xfd - invalid. */
1670/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
1671/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
1672/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
1673
1674/* Opcode 0x0f 0x38 0xfe - invalid. */
1675/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
1676/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
1677/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
1678
1679/* Opcode 0x0f 0x38 0xff - invalid. */
1680/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
1681/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
1682/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
1683
1684
1685/**
1686 * Three byte opcode map, first two bytes are 0x0f 0x38.
1687 * @sa g_apfnVexMap2
1688 */
1689IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
1690{
1691 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1692 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1693 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1694 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1695 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1696 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1697 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1698 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1699 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1700 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1701 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1702 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1703 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1704 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
1705 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
1706 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
1707 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
1708
1709 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1710 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1711 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1712 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1713 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1714 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1715 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
1716 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1717 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
1718 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
1719 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
1720 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1721 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1722 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1723 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1724 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1725
1726 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1727 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1728 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1729 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1730 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1731 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1732 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1733 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1734 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1735 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1736 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1737 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1738 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
1739 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
1740 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
1741 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
1742
1743 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1744 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1745 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1746 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1747 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1748 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1749 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
1750 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1751 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1752 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1753 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1754 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1755 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1756 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1757 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1758 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1759
1760 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1761 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1762 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1763 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1764 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1765 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
1766 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
1767 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
1768 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1769 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1770 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1771 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1772 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1773 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1774 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1775 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1776
1777 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1778 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1779 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
1780 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
1781 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
1782 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
1783 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
1784 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
1785 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
1786 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
1787 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
1788 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
1789 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
1790 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
1791 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
1792 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
1793
1794 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
1795 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
1796 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
1797 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
1798 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
1799 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
1800 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
1801 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
1802 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
1803 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
1804 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
1805 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
1806 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
1807 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
1808 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
1809 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
1810
1811 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
1812 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
1813 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
1814 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
1815 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
1816 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
1817 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
1818 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
1819 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
1820 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
1821 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
1822 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
1823 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
1824 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
1825 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
1826 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
1827
1828 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1829 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1830 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1831 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
1832 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
1833 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
1834 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
1835 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
1836 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
1837 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
1838 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
1839 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
1840 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
1841 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
1842 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
1843 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
1844
1845 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
1846 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
1847 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
1848 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
1849 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
1850 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
1851 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
1852 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
1853 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
1854 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
1855 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
1856 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
1857 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
1858 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
1859 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
1860 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
1861
1862 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1863 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1864 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1865 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1866 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1867 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1868 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1869 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1870 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1871 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1872 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
1873 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
1874 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
1875 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
1876 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
1877 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
1878
1879 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1880 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1881 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1882 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1883 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1884 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1885 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1886 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1887 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1888 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1889 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
1890 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
1891 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
1892 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
1893 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
1894 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
1895
1896 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1897 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1898 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1899 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1900 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1901 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1902 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1903 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1904 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1905 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1906 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1907 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1908 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1909 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1910 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
1911 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
1912
1913 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1914 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1915 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1916 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1917 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1918 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1919 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1920 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1921 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1922 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1923 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
1924 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1925 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1926 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1927 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1928 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1929
1930 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1931 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1932 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1933 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1934 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1935 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1936 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1937 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1938 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1939 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1940 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
1941 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
1942 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
1943 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
1944 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
1945 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
1946
1947 /* 0xf0 */ iemOp_movbe_Gy_My, iemOp_movbe_Gw_Mw, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
1948 /* 0xf1 */ iemOp_movbe_My_Gy, iemOp_movbe_Mw_Gw, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
1949 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1950 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1951 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1952 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1953 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
1954 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1955 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1956 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1957 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
1958 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
1959 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
1960 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
1961 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
1962 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
1963};
1964AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
1965
1966/** @} */
1967
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use