VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 96247

Last change on this file since 96247 was 96115, checked in by vboxsync, 3 years ago

VMM/IEM: Implement [v]pmov{s,z}x[bw,bd,bq,wd,wq,dq] instructions, bugref:9898 (review is required)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 76.2 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 96115 2022-08-08 20:04:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Three byte opcodes with first two bytes 0x0f 0x38
23 * @{
24 */
25
26FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported); /* in IEMAllInstructionsTwoByteOf.cpp.h */
27
28
29/**
30 * Common worker for SSSE3 instructions on the forms:
31 * pxxx xmm1, xmm2/mem128
32 *
33 * Proper alignment of the 128-bit operand is enforced.
34 * Exceptions type 4. SSSE3 cpuid checks.
35 *
36 * @sa iemOpCommonSse2_FullFull_To_Full
37 */
38FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
39{
40 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
41 if (IEM_IS_MODRM_REG_MODE(bRm))
42 {
43 /*
44 * Register, register.
45 */
46 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
47 IEM_MC_BEGIN(2, 0);
48 IEM_MC_ARG(PRTUINT128U, puDst, 0);
49 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
50 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
51 IEM_MC_PREPARE_SSE_USAGE();
52 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
53 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
54 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 }
58 else
59 {
60 /*
61 * Register, memory.
62 */
63 IEM_MC_BEGIN(2, 2);
64 IEM_MC_ARG(PRTUINT128U, puDst, 0);
65 IEM_MC_LOCAL(RTUINT128U, uSrc);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
70 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
71 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
72 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
73
74 IEM_MC_PREPARE_SSE_USAGE();
75 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for SSE4.1 instructions on the forms:
87 * pxxx xmm1, xmm2/mem128
88 *
89 * Proper alignment of the 128-bit operand is enforced.
90 * Exceptions type 4. SSE4.1 cpuid checks.
91 *
92 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
93 * iemOpCommonSse42_FullFull_To_Full
94 */
95FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
96{
97 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
98 if (IEM_IS_MODRM_REG_MODE(bRm))
99 {
100 /*
101 * Register, register.
102 */
103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
104 IEM_MC_BEGIN(2, 0);
105 IEM_MC_ARG(PRTUINT128U, puDst, 0);
106 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
107 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
108 IEM_MC_PREPARE_SSE_USAGE();
109 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
110 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
111 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
112 IEM_MC_ADVANCE_RIP();
113 IEM_MC_END();
114 }
115 else
116 {
117 /*
118 * Register, memory.
119 */
120 IEM_MC_BEGIN(2, 2);
121 IEM_MC_ARG(PRTUINT128U, puDst, 0);
122 IEM_MC_LOCAL(RTUINT128U, uSrc);
123 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125
126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
128 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
129 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
130
131 IEM_MC_PREPARE_SSE_USAGE();
132 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
133 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
134
135 IEM_MC_ADVANCE_RIP();
136 IEM_MC_END();
137 }
138 return VINF_SUCCESS;
139}
140
141
142/**
143 * Common worker for SSE4.1 instructions on the forms:
144 * pxxx xmm1, xmm2/mem128
145 *
146 * Proper alignment of the 128-bit operand is enforced.
147 * Exceptions type 4. SSE4.1 cpuid checks.
148 *
149 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
150 * takes no FXSAVE state, just the operands.
151 *
152 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
153 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
154 */
155FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
156{
157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
158 if (IEM_IS_MODRM_REG_MODE(bRm))
159 {
160 /*
161 * Register, register.
162 */
163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
164 IEM_MC_BEGIN(2, 0);
165 IEM_MC_ARG(PRTUINT128U, puDst, 0);
166 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
167 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
168 IEM_MC_PREPARE_SSE_USAGE();
169 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
170 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
171 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
172 IEM_MC_ADVANCE_RIP();
173 IEM_MC_END();
174 }
175 else
176 {
177 /*
178 * Register, memory.
179 */
180 IEM_MC_BEGIN(2, 2);
181 IEM_MC_ARG(PRTUINT128U, puDst, 0);
182 IEM_MC_LOCAL(RTUINT128U, uSrc);
183 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
185
186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
188 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
189 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
190
191 IEM_MC_PREPARE_SSE_USAGE();
192 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
193 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
194
195 IEM_MC_ADVANCE_RIP();
196 IEM_MC_END();
197 }
198 return VINF_SUCCESS;
199}
200
201
202/**
203 * Common worker for SSE4.2 instructions on the forms:
204 * pxxx xmm1, xmm2/mem128
205 *
206 * Proper alignment of the 128-bit operand is enforced.
207 * Exceptions type 4. SSE4.2 cpuid checks.
208 *
209 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
210 * iemOpCommonSse41_FullFull_To_Full
211 */
212FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
213{
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
215 if (IEM_IS_MODRM_REG_MODE(bRm))
216 {
217 /*
218 * Register, register.
219 */
220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
221 IEM_MC_BEGIN(2, 0);
222 IEM_MC_ARG(PRTUINT128U, puDst, 0);
223 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
224 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
225 IEM_MC_PREPARE_SSE_USAGE();
226 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
227 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
228 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 /*
235 * Register, memory.
236 */
237 IEM_MC_BEGIN(2, 2);
238 IEM_MC_ARG(PRTUINT128U, puDst, 0);
239 IEM_MC_LOCAL(RTUINT128U, uSrc);
240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
242
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
245 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
246 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247
248 IEM_MC_PREPARE_SSE_USAGE();
249 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
250 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
251
252 IEM_MC_ADVANCE_RIP();
253 IEM_MC_END();
254 }
255 return VINF_SUCCESS;
256}
257
258
259/** Opcode 0x0f 0x38 0x00. */
260FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
261{
262 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
264 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback),
265 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
266}
267
268
269/** Opcode 0x66 0x0f 0x38 0x00. */
270FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
271{
272 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
273 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
274 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
275
276}
277
278
279/* Opcode 0x0f 0x38 0x01. */
280FNIEMOP_DEF(iemOp_phaddw_Pq_Qq)
281{
282 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
283 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
284 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u64,&iemAImpl_phaddw_u64_fallback),
285 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
286}
287
288
289/** Opcode 0x66 0x0f 0x38 0x01. */
290FNIEMOP_DEF(iemOp_phaddw_Vx_Wx)
291{
292 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
293 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
294 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback));
295
296}
297
298
299/** Opcode 0x0f 0x38 0x02. */
300FNIEMOP_DEF(iemOp_phaddd_Pq_Qq)
301{
302 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
303 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
304 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u64,&iemAImpl_phaddd_u64_fallback),
305 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
306}
307
308
309/** Opcode 0x66 0x0f 0x38 0x02. */
310FNIEMOP_DEF(iemOp_phaddd_Vx_Wx)
311{
312 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
313 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
314 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback));
315
316}
317
318
319/** Opcode 0x0f 0x38 0x03. */
320FNIEMOP_DEF(iemOp_phaddsw_Pq_Qq)
321{
322 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
323 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
324 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u64,&iemAImpl_phaddsw_u64_fallback),
325 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
326}
327
328
329/** Opcode 0x66 0x0f 0x38 0x03. */
330FNIEMOP_DEF(iemOp_phaddsw_Vx_Wx)
331{
332 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
333 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
334 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback));
335
336}
337
338
339/** Opcode 0x0f 0x38 0x04. */
340FNIEMOP_DEF(iemOp_pmaddubsw_Pq_Qq)
341{
342 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
343 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
344 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u64, &iemAImpl_pmaddubsw_u64_fallback),
345 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
346}
347
348
349/** Opcode 0x66 0x0f 0x38 0x04. */
350FNIEMOP_DEF(iemOp_pmaddubsw_Vx_Wx)
351{
352 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
353 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
354 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback));
355
356}
357
358
359/** Opcode 0x0f 0x38 0x05. */
360FNIEMOP_DEF(iemOp_phsubw_Pq_Qq)
361{
362 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
363 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
364 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u64,&iemAImpl_phsubw_u64_fallback),
365 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
366}
367
368
369/** Opcode 0x66 0x0f 0x38 0x05. */
370FNIEMOP_DEF(iemOp_phsubw_Vx_Wx)
371{
372 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
373 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
374 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback));
375
376}
377
378
379/** Opcode 0x0f 0x38 0x06. */
380FNIEMOP_DEF(iemOp_phsubd_Pq_Qq)
381{
382 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
383 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
384 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u64,&iemAImpl_phsubd_u64_fallback),
385 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
386}
387
388
389
390/** Opcode 0x66 0x0f 0x38 0x06. */
391FNIEMOP_DEF(iemOp_phsubd_Vx_Wx)
392{
393 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
394 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
395 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback));
396
397}
398
399
400/** Opcode 0x0f 0x38 0x07. */
401FNIEMOP_DEF(iemOp_phsubsw_Pq_Qq)
402{
403 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
404 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
405 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u64,&iemAImpl_phsubsw_u64_fallback),
406 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
407}
408
409
410/** Opcode 0x66 0x0f 0x38 0x07. */
411FNIEMOP_DEF(iemOp_phsubsw_Vx_Wx)
412{
413 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
414 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
415 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback));
416
417}
418
419
420/** Opcode 0x0f 0x38 0x08. */
421FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
422{
423 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
424 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
425 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback),
426 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
427}
428
429
430/** Opcode 0x66 0x0f 0x38 0x08. */
431FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
432{
433 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
434 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
435 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
436
437}
438
439
440/** Opcode 0x0f 0x38 0x09. */
441FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
442{
443 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
444 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
445 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback),
446 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
447}
448
449
450/** Opcode 0x66 0x0f 0x38 0x09. */
451FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
452{
453 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
454 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
455 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
456
457}
458
459
460/** Opcode 0x0f 0x38 0x0a. */
461FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
462{
463 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
464 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
465 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback),
466 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
467}
468
469
470/** Opcode 0x66 0x0f 0x38 0x0a. */
471FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
472{
473 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
474 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
475 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
476
477}
478
479
480/** Opcode 0x0f 0x38 0x0b. */
481FNIEMOP_DEF(iemOp_pmulhrsw_Pq_Qq)
482{
483 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
484 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
485 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u64, &iemAImpl_pmulhrsw_u64_fallback),
486 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
487}
488
489
490/** Opcode 0x66 0x0f 0x38 0x0b. */
491FNIEMOP_DEF(iemOp_pmulhrsw_Vx_Wx)
492{
493 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
494 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
495 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback));
496
497}
498
499
500/* Opcode 0x0f 0x38 0x0c - invalid. */
501/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
502/* Opcode 0x0f 0x38 0x0d - invalid. */
503/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
504/* Opcode 0x0f 0x38 0x0e - invalid. */
505/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
506/* Opcode 0x0f 0x38 0x0f - invalid. */
507/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
508
509
510/* Opcode 0x0f 0x38 0x10 - invalid */
511/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
512FNIEMOP_STUB(iemOp_pblendvb_Vdq_Wdq);
513/* Opcode 0x0f 0x38 0x11 - invalid */
514/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
515/* Opcode 0x0f 0x38 0x12 - invalid */
516/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
517/* Opcode 0x0f 0x38 0x13 - invalid */
518/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
519/* Opcode 0x0f 0x38 0x14 - invalid */
520/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
521FNIEMOP_STUB(iemOp_blendvps_Vdq_Wdq);
522/* Opcode 0x0f 0x38 0x15 - invalid */
523/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
524FNIEMOP_STUB(iemOp_blendvpd_Vdq_Wdq);
525/* Opcode 0x0f 0x38 0x16 - invalid */
526/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
527/* Opcode 0x0f 0x38 0x17 - invalid */
528
529
530/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
531FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
532{
533 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
535 if (IEM_IS_MODRM_REG_MODE(bRm))
536 {
537 /*
538 * Register, register.
539 */
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
541 IEM_MC_BEGIN(3, 0);
542 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
543 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
545 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
546 IEM_MC_PREPARE_SSE_USAGE();
547 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
548 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
549 IEM_MC_REF_EFLAGS(pEFlags);
550 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 }
554 else
555 {
556 /*
557 * Register, memory.
558 */
559 IEM_MC_BEGIN(3, 2);
560 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
561 IEM_MC_LOCAL(RTUINT128U, uSrc2);
562 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
563 IEM_MC_ARG(uint32_t *, pEFlags, 2);
564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
565
566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
568 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
569 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
570
571 IEM_MC_PREPARE_SSE_USAGE();
572 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
573 IEM_MC_REF_EFLAGS(pEFlags);
574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
575
576 IEM_MC_ADVANCE_RIP();
577 IEM_MC_END();
578 }
579 return VINF_SUCCESS;
580}
581
582
583/* Opcode 0x0f 0x38 0x18 - invalid */
584/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
585/* Opcode 0x0f 0x38 0x19 - invalid */
586/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
587/* Opcode 0x0f 0x38 0x1a - invalid */
588/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
589/* Opcode 0x0f 0x38 0x1b - invalid */
590/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
591
592
593/** Opcode 0x0f 0x38 0x1c. */
594FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
595{
596 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
597 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
598 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback),
599 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
600}
601
602
603/** Opcode 0x66 0x0f 0x38 0x1c. */
604FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
605{
606 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
607 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
608 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
609
610}
611
612
613/** Opcode 0x0f 0x38 0x1d. */
614FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
615{
616 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
617 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
618 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback),
619 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
620}
621
622
623/** Opcode 0x66 0x0f 0x38 0x1d. */
624FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
625{
626 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
627 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
628 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
629
630}
631
632
633/** Opcode 0x0f 0x38 0x1e. */
634FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
635{
636 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
637 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
638 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback),
639 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
640}
641
642
643/** Opcode 0x66 0x0f 0x38 0x1e. */
644FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
645{
646 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
647 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
648 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
649
650}
651
652
653/* Opcode 0x0f 0x38 0x1f - invalid */
654/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
655
656
657/** Body for the pmov{s,z}x* instructions. */
658#define IEMOP_BODY_PMOV_S_Z(a_Instr, a_SrcWidth) \
659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
660 if (IEM_IS_MODRM_REG_MODE(bRm)) \
661 { \
662 /* \
663 * Register, register. \
664 */ \
665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
666 IEM_MC_BEGIN(2, 0); \
667 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
668 IEM_MC_ARG(uint64_t, uSrc, 1); \
669 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
670 IEM_MC_PREPARE_SSE_USAGE(); \
671 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
672 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
673 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
674 iemAImpl_ ## a_Instr ## _u128, \
675 iemAImpl_v ## a_Instr ## _u128_fallback), \
676 puDst, uSrc); \
677 IEM_MC_ADVANCE_RIP(); \
678 IEM_MC_END(); \
679 } \
680 else \
681 { \
682 /* \
683 * Register, memory. \
684 */ \
685 IEM_MC_BEGIN(2, 2); \
686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
687 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
688 IEM_MC_ARG(uint ## a_SrcWidth ## _t, uSrc, 1); \
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
691 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
692 IEM_MC_PREPARE_SSE_USAGE(); \
693 IEM_MC_FETCH_MEM_U## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
694 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
695 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
696 iemAImpl_ ## a_Instr ## _u128, \
697 iemAImpl_v ## a_Instr ## _u128_fallback), \
698 puDst, uSrc); \
699 IEM_MC_ADVANCE_RIP(); \
700 IEM_MC_END(); \
701 } \
702 return VINF_SUCCESS
703
704
705/** Opcode 0x66 0x0f 0x38 0x20. */
706FNIEMOP_DEF(iemOp_pmovsxbw_Vx_UxMq)
707{
708 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
709 IEMOP_MNEMONIC2(RM, PMOVSXBW, pmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
710 IEMOP_BODY_PMOV_S_Z(pmovsxbw, 64);
711}
712
713
714/** Opcode 0x66 0x0f 0x38 0x21. */
715FNIEMOP_DEF(iemOp_pmovsxbd_Vx_UxMd)
716{
717 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
718 IEMOP_MNEMONIC2(RM, PMOVSXBD, pmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
719 IEMOP_BODY_PMOV_S_Z(pmovsxbd, 32);
720}
721
722
723/** Opcode 0x66 0x0f 0x38 0x22. */
724FNIEMOP_DEF(iemOp_pmovsxbq_Vx_UxMw)
725{
726 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
727 IEMOP_MNEMONIC2(RM, PMOVSXBQ, pmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
728 IEMOP_BODY_PMOV_S_Z(pmovsxbq, 16);
729}
730
731
732/** Opcode 0x66 0x0f 0x38 0x23. */
733FNIEMOP_DEF(iemOp_pmovsxwd_Vx_UxMq)
734{
735 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
736 IEMOP_MNEMONIC2(RM, PMOVSXWD, pmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
737 IEMOP_BODY_PMOV_S_Z(pmovsxwd, 64);
738}
739
740
741/** Opcode 0x66 0x0f 0x38 0x24. */
742FNIEMOP_DEF(iemOp_pmovsxwq_Vx_UxMd)
743{
744 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
745 IEMOP_MNEMONIC2(RM, PMOVSXWQ, pmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
746 IEMOP_BODY_PMOV_S_Z(pmovsxwq, 32);
747}
748
749
750/** Opcode 0x66 0x0f 0x38 0x25. */
751FNIEMOP_DEF(iemOp_pmovsxdq_Vx_UxMq)
752{
753 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
754 IEMOP_MNEMONIC2(RM, PMOVSXDQ, pmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
755 IEMOP_BODY_PMOV_S_Z(pmovsxdq, 64);
756}
757
758
759/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
760/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
761
762
763/** Opcode 0x66 0x0f 0x38 0x28. */
764FNIEMOP_DEF(iemOp_pmuldq_Vx_Wx)
765{
766 IEMOP_MNEMONIC2(RM, PMULDQ, pmuldq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
767 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
768 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback));
769}
770
771
772/** Opcode 0x66 0x0f 0x38 0x29. */
773FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
774{
775 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
776 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
777 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
778}
779
780
781/**
782 * @opcode 0x2a
783 * @opcodesub !11 mr/reg
784 * @oppfx 0x66
785 * @opcpuid sse4.1
786 * @opgroup og_sse41_cachect
787 * @opxcpttype 1
788 * @optest op1=-1 op2=2 -> op1=2
789 * @optest op1=0 op2=-42 -> op1=-42
790 */
791FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
792{
793 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
795 if (IEM_IS_MODRM_MEM_MODE(bRm))
796 {
797 /* Register, memory. */
798 IEM_MC_BEGIN(0, 2);
799 IEM_MC_LOCAL(RTUINT128U, uSrc);
800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
801
802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
804 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
806
807 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
808 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
809
810 IEM_MC_ADVANCE_RIP();
811 IEM_MC_END();
812 return VINF_SUCCESS;
813 }
814
815 /**
816 * @opdone
817 * @opmnemonic ud660f382areg
818 * @opcode 0x2a
819 * @opcodesub 11 mr/reg
820 * @oppfx 0x66
821 * @opunused immediate
822 * @opcpuid sse
823 * @optest ->
824 */
825 return IEMOP_RAISE_INVALID_OPCODE();
826}
827
828
829/** Opcode 0x66 0x0f 0x38 0x2b. */
830FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
831{
832 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
833 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
834}
835
836
837/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
838/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
839/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
840/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
841
842/** Opcode 0x66 0x0f 0x38 0x30. */
843FNIEMOP_DEF(iemOp_pmovzxbw_Vx_UxMq)
844{
845 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
846 IEMOP_MNEMONIC2(RM, PMOVZXBW, pmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
847 IEMOP_BODY_PMOV_S_Z(pmovzxbw, 64);
848}
849
850
851/** Opcode 0x66 0x0f 0x38 0x31. */
852FNIEMOP_DEF(iemOp_pmovzxbd_Vx_UxMd)
853{
854 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
855 IEMOP_MNEMONIC2(RM, PMOVZXBD, pmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
856 IEMOP_BODY_PMOV_S_Z(pmovzxbd, 32);
857}
858
859
860/** Opcode 0x66 0x0f 0x38 0x32. */
861FNIEMOP_DEF(iemOp_pmovzxbq_Vx_UxMw)
862{
863 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
864 IEMOP_MNEMONIC2(RM, PMOVZXBQ, pmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
865 IEMOP_BODY_PMOV_S_Z(pmovzxbq, 16);
866}
867
868
869/** Opcode 0x66 0x0f 0x38 0x33. */
870FNIEMOP_DEF(iemOp_pmovzxwd_Vx_UxMq)
871{
872 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
873 IEMOP_MNEMONIC2(RM, PMOVZXWD, pmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_BODY_PMOV_S_Z(pmovzxwd, 64);
875}
876
877
878/** Opcode 0x66 0x0f 0x38 0x34. */
879FNIEMOP_DEF(iemOp_pmovzxwq_Vx_UxMd)
880{
881 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
882 IEMOP_MNEMONIC2(RM, PMOVZXWQ, pmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
883 IEMOP_BODY_PMOV_S_Z(pmovzxwq, 32);
884}
885
886
887/** Opcode 0x66 0x0f 0x38 0x35. */
888FNIEMOP_DEF(iemOp_pmovzxdq_Vx_UxMq)
889{
890 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
891 IEMOP_MNEMONIC2(RM, PMOVZXDQ, pmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
892 IEMOP_BODY_PMOV_S_Z(pmovzxdq, 64);
893}
894
895
896/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
897
898
899/** Opcode 0x66 0x0f 0x38 0x37. */
900FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
901{
902 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
903 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
904 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
905}
906
907
908/** Opcode 0x66 0x0f 0x38 0x38. */
909FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
910{
911 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
912 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
913 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
914}
915
916
917/** Opcode 0x66 0x0f 0x38 0x39. */
918FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
919{
920 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
921 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
922 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
923}
924
925
926/** Opcode 0x66 0x0f 0x38 0x3a. */
927FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
928{
929 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
930 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
931 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
932}
933
934
935/** Opcode 0x66 0x0f 0x38 0x3b. */
936FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
937{
938 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
939 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
940 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
941}
942
943
944/** Opcode 0x66 0x0f 0x38 0x3c. */
945FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
946{
947 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
948 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
949 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
950}
951
952
953/** Opcode 0x66 0x0f 0x38 0x3d. */
954FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
955{
956 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
957 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
958 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
959}
960
961
962/** Opcode 0x66 0x0f 0x38 0x3e. */
963FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
964{
965 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
966 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
967 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
968}
969
970
971/** Opcode 0x66 0x0f 0x38 0x3f. */
972FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
973{
974 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
975 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
976 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
977}
978
979
980/** Opcode 0x66 0x0f 0x38 0x40. */
981FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
982{
983 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
984 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
985 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
986}
987
988
989/** Opcode 0x66 0x0f 0x38 0x41. */
990FNIEMOP_STUB(iemOp_phminposuw_Vdq_Wdq);
991/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
992/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
993/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
994/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
995/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
996/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
997/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
998/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
999/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
1000/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
1001/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
1002/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
1003/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
1004/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
1005
1006/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
1007/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
1008/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
1009/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
1010/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
1011/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
1012/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
1013/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
1014/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
1015/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
1016/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
1017/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
1018/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
1019/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
1020/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
1021/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
1022
1023/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
1024/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
1025/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
1026/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
1027/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
1028/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
1029/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
1030/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
1031/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
1032/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
1033/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
1034/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
1035/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
1036/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
1037/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
1038/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
1039
1040/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
1041/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
1042/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
1043/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
1044/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
1045/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
1046/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
1047/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
1048/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
1049/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
1050/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
1051/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
1052/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
1053/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
1054/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
1055/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
1056
1057/** Opcode 0x66 0x0f 0x38 0x80. */
1058#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
1059FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
1060{
1061 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1064 IEMOP_HLP_VMX_INSTR("invept", kVmxVDiag_Invept);
1065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1066 if (IEM_IS_MODRM_MEM_MODE(bRm))
1067 {
1068 /* Register, memory. */
1069 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1070 {
1071 IEM_MC_BEGIN(3, 0);
1072 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1073 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1074 IEM_MC_ARG(uint64_t, uInveptType, 2);
1075 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1077 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1078 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1079 IEM_MC_END();
1080 }
1081 else
1082 {
1083 IEM_MC_BEGIN(3, 0);
1084 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1085 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1086 IEM_MC_ARG(uint32_t, uInveptType, 2);
1087 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1089 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1090 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1091 IEM_MC_END();
1092 }
1093 }
1094 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
1095 return IEMOP_RAISE_INVALID_OPCODE();
1096}
1097#else
1098FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
1099#endif
1100
1101/** Opcode 0x66 0x0f 0x38 0x81. */
1102#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1103FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
1104{
1105 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
1106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1107 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1108 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1110 if (IEM_IS_MODRM_MEM_MODE(bRm))
1111 {
1112 /* Register, memory. */
1113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1114 {
1115 IEM_MC_BEGIN(3, 0);
1116 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1117 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1118 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
1119 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1121 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1122 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 IEM_MC_BEGIN(3, 0);
1128 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1129 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1130 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
1131 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1133 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1134 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1135 IEM_MC_END();
1136 }
1137 }
1138 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
1139 return IEMOP_RAISE_INVALID_OPCODE();
1140}
1141#else
1142FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
1143#endif
1144
1145/** Opcode 0x66 0x0f 0x38 0x82. */
1146FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
1147{
1148 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
1149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1151 if (IEM_IS_MODRM_MEM_MODE(bRm))
1152 {
1153 /* Register, memory. */
1154 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1155 {
1156 IEM_MC_BEGIN(3, 0);
1157 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1158 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1159 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
1160 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1162 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1163 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1164 IEM_MC_END();
1165 }
1166 else
1167 {
1168 IEM_MC_BEGIN(3, 0);
1169 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1170 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1171 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
1172 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1174 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1175 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1176 IEM_MC_END();
1177 }
1178 }
1179 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
1180 return IEMOP_RAISE_INVALID_OPCODE();
1181}
1182
1183
1184/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
1185/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
1186/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
1187/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
1188/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
1189/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
1190/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
1191/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
1192/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
1193/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
1194/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
1195/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
1196/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
1197
1198/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
1199/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
1200/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
1201/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
1202/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
1203/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
1204/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
1205/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
1206/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
1207/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
1208/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
1209/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
1210/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
1211/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
1212/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
1213/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
1214
1215/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
1216/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
1217/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
1218/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
1219/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
1220/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
1221/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
1222/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
1223/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
1224/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
1225/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
1226/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
1227/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
1228/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
1229/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
1230/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
1231
1232/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
1233/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
1234/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
1235/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
1236/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
1237/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
1238/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
1239/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
1240/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
1241/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
1242/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
1243/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
1244/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
1245/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
1246/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
1247/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
1248
1249/* Opcode 0x0f 0x38 0xc0 - invalid. */
1250/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
1251/* Opcode 0x0f 0x38 0xc1 - invalid. */
1252/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
1253/* Opcode 0x0f 0x38 0xc2 - invalid. */
1254/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
1255/* Opcode 0x0f 0x38 0xc3 - invalid. */
1256/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
1257/* Opcode 0x0f 0x38 0xc4 - invalid. */
1258/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
1259/* Opcode 0x0f 0x38 0xc5 - invalid. */
1260/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
1261/* Opcode 0x0f 0x38 0xc6 - invalid. */
1262/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
1263/* Opcode 0x0f 0x38 0xc7 - invalid. */
1264/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
1265/** Opcode 0x0f 0x38 0xc8. */
1266FNIEMOP_STUB(iemOp_sha1nexte_Vdq_Wdq);
1267/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1268/** Opcode 0x0f 0x38 0xc9. */
1269FNIEMOP_STUB(iemOp_sha1msg1_Vdq_Wdq);
1270/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1271/** Opcode 0x0f 0x38 0xca. */
1272FNIEMOP_STUB(iemOp_sha1msg2_Vdq_Wdq);
1273/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1274/** Opcode 0x0f 0x38 0xcb. */
1275FNIEMOP_STUB(iemOp_sha256rnds2_Vdq_Wdq);
1276/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1277/** Opcode 0x0f 0x38 0xcc. */
1278FNIEMOP_STUB(iemOp_sha256msg1_Vdq_Wdq);
1279/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1280/** Opcode 0x0f 0x38 0xcd. */
1281FNIEMOP_STUB(iemOp_sha256msg2_Vdq_Wdq);
1282/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1283/* Opcode 0x0f 0x38 0xce - invalid. */
1284/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1285/* Opcode 0x0f 0x38 0xcf - invalid. */
1286/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1287
1288/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1289/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1290/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1291/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1292/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1293/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1294/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1295/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1296/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1297/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1298/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1299/** Opcode 0x66 0x0f 0x38 0xdb. */
1300FNIEMOP_STUB(iemOp_aesimc_Vdq_Wdq);
1301/** Opcode 0x66 0x0f 0x38 0xdc. */
1302FNIEMOP_STUB(iemOp_aesenc_Vdq_Wdq);
1303/** Opcode 0x66 0x0f 0x38 0xdd. */
1304FNIEMOP_STUB(iemOp_aesenclast_Vdq_Wdq);
1305/** Opcode 0x66 0x0f 0x38 0xde. */
1306FNIEMOP_STUB(iemOp_aesdec_Vdq_Wdq);
1307/** Opcode 0x66 0x0f 0x38 0xdf. */
1308FNIEMOP_STUB(iemOp_aesdeclast_Vdq_Wdq);
1309
1310/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1311/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1312/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1313/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1314/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1315/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1316/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1317/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1318/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1319/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1320/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1321/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1322/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1323/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1324/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1325/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1326
1327
1328/** Opcode 0x0f 0x38 0xf0. */
1329FNIEMOP_STUB(iemOp_movbe_Gy_My);
1330/** Opcode 0x66 0x0f 0x38 0xf0. */
1331FNIEMOP_STUB(iemOp_movbe_Gw_Mw);
1332/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1333
1334
1335/** Opcode 0xf2 0x0f 0x38 0xf0. */
1336FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1337{
1338 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1339 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1340 return iemOp_InvalidNeedRM(pVCpu);
1341
1342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1343 if (IEM_IS_MODRM_REG_MODE(bRm))
1344 {
1345 /*
1346 * Register, register.
1347 */
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 IEM_MC_BEGIN(2, 0);
1350 IEM_MC_ARG(uint32_t *, puDst, 0);
1351 IEM_MC_ARG(uint8_t, uSrc, 1);
1352 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1353 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1354 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1355 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1356 IEM_MC_ADVANCE_RIP();
1357 IEM_MC_END();
1358 }
1359 else
1360 {
1361 /*
1362 * Register, memory.
1363 */
1364 IEM_MC_BEGIN(2, 1);
1365 IEM_MC_ARG(uint32_t *, puDst, 0);
1366 IEM_MC_ARG(uint8_t, uSrc, 1);
1367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1368
1369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1371 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1372
1373 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1374 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1375 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1376
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 }
1380 return VINF_SUCCESS;
1381}
1382
1383
1384/** Opcode 0x0f 0x38 0xf1. */
1385FNIEMOP_STUB(iemOp_movbe_My_Gy);
1386/** Opcode 0x66 0x0f 0x38 0xf1. */
1387FNIEMOP_STUB(iemOp_movbe_Mw_Gw);
1388/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1389
1390
1391/** Opcode 0xf2 0x0f 0x38 0xf1. */
1392FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1393{
1394 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1395 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1396 return iemOp_InvalidNeedRM(pVCpu);
1397
1398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1399 if (IEM_IS_MODRM_REG_MODE(bRm))
1400 {
1401 /*
1402 * Register, register.
1403 */
1404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1405 switch (pVCpu->iem.s.enmEffOpSize)
1406 {
1407 case IEMMODE_16BIT:
1408 IEM_MC_BEGIN(2, 0);
1409 IEM_MC_ARG(uint32_t *, puDst, 0);
1410 IEM_MC_ARG(uint16_t, uSrc, 1);
1411 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1412 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1413 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1414 puDst, uSrc);
1415 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1416 IEM_MC_ADVANCE_RIP();
1417 IEM_MC_END();
1418 return VINF_SUCCESS;
1419
1420 case IEMMODE_32BIT:
1421 IEM_MC_BEGIN(2, 0);
1422 IEM_MC_ARG(uint32_t *, puDst, 0);
1423 IEM_MC_ARG(uint32_t, uSrc, 1);
1424 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1425 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1426 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1427 puDst, uSrc);
1428 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1429 IEM_MC_ADVANCE_RIP();
1430 IEM_MC_END();
1431 return VINF_SUCCESS;
1432
1433 case IEMMODE_64BIT:
1434 IEM_MC_BEGIN(2, 0);
1435 IEM_MC_ARG(uint32_t *, puDst, 0);
1436 IEM_MC_ARG(uint64_t, uSrc, 1);
1437 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1438 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1439 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1440 puDst, uSrc);
1441 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1442 IEM_MC_ADVANCE_RIP();
1443 IEM_MC_END();
1444 return VINF_SUCCESS;
1445
1446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1447 }
1448 }
1449 else
1450 {
1451 /*
1452 * Register, memory.
1453 */
1454 switch (pVCpu->iem.s.enmEffOpSize)
1455 {
1456 case IEMMODE_16BIT:
1457 IEM_MC_BEGIN(2, 1);
1458 IEM_MC_ARG(uint32_t *, puDst, 0);
1459 IEM_MC_ARG(uint16_t, uSrc, 1);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1465
1466 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1467 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1468 puDst, uSrc);
1469 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1470
1471 IEM_MC_ADVANCE_RIP();
1472 IEM_MC_END();
1473 return VINF_SUCCESS;
1474
1475 case IEMMODE_32BIT:
1476 IEM_MC_BEGIN(2, 1);
1477 IEM_MC_ARG(uint32_t *, puDst, 0);
1478 IEM_MC_ARG(uint32_t, uSrc, 1);
1479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1480
1481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1483 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1484
1485 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1486 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1487 puDst, uSrc);
1488 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1489
1490 IEM_MC_ADVANCE_RIP();
1491 IEM_MC_END();
1492 return VINF_SUCCESS;
1493
1494 case IEMMODE_64BIT:
1495 IEM_MC_BEGIN(2, 1);
1496 IEM_MC_ARG(uint32_t *, puDst, 0);
1497 IEM_MC_ARG(uint64_t, uSrc, 1);
1498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1499
1500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1502 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1503
1504 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1505 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1506 puDst, uSrc);
1507 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 return VINF_SUCCESS;
1512
1513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1514 }
1515 }
1516}
1517
1518
1519/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
1520/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
1521/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
1522/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
1523
1524/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1525/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1526/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1527/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1528
1529/* Opcode 0x0f 0x38 0xf4 - invalid. */
1530/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
1531/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
1532/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
1533
1534/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
1535/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
1536/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
1537/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
1538
1539/* Opcode 0x0f 0x38 0xf6 - invalid. */
1540/** Opcode 0x66 0x0f 0x38 0xf6. */
1541FNIEMOP_STUB(iemOp_adcx_Gy_Ey);
1542/** Opcode 0xf3 0x0f 0x38 0xf6. */
1543FNIEMOP_STUB(iemOp_adox_Gy_Ey);
1544/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
1545
1546/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
1547/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
1548/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
1549/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
1550
1551/* Opcode 0x0f 0x38 0xf8 - invalid. */
1552/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
1553/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
1554/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
1555
1556/* Opcode 0x0f 0x38 0xf9 - invalid. */
1557/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
1558/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
1559/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
1560
1561/* Opcode 0x0f 0x38 0xfa - invalid. */
1562/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
1563/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
1564/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
1565
1566/* Opcode 0x0f 0x38 0xfb - invalid. */
1567/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
1568/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
1569/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
1570
1571/* Opcode 0x0f 0x38 0xfc - invalid. */
1572/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
1573/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
1574/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
1575
1576/* Opcode 0x0f 0x38 0xfd - invalid. */
1577/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
1578/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
1579/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
1580
1581/* Opcode 0x0f 0x38 0xfe - invalid. */
1582/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
1583/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
1584/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
1585
1586/* Opcode 0x0f 0x38 0xff - invalid. */
1587/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
1588/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
1589/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
1590
1591
1592/**
1593 * Three byte opcode map, first two bytes are 0x0f 0x38.
1594 * @sa g_apfnVexMap2
1595 */
1596IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
1597{
1598 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1599 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1600 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1601 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1602 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1603 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1604 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1605 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1606 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1607 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1608 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1609 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1610 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1611 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
1612 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
1613 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
1614 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
1615
1616 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1617 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1618 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1619 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1620 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1621 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1622 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
1623 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1624 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
1625 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
1626 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
1627 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1628 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1629 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1630 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1631 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1632
1633 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1634 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1635 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1636 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1637 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1638 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1639 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1640 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1641 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1642 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1643 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1644 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1645 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
1646 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
1647 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
1648 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
1649
1650 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1651 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1652 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1653 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1654 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1655 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1656 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
1657 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1658 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1659 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1660 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1661 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1662 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1663 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1664 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1665 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1666
1667 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1668 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1669 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1670 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1671 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1672 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
1673 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
1674 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
1675 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1676 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1677 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1678 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1679 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1680 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1681 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1682 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1683
1684 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1685 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1686 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
1687 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
1688 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
1689 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
1690 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
1691 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
1692 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
1693 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
1694 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
1695 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
1696 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
1697 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
1698 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
1699 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
1700
1701 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
1702 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
1703 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
1704 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
1705 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
1706 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
1707 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
1708 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
1709 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
1710 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
1711 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
1712 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
1713 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
1714 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
1715 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
1716 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
1717
1718 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
1719 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
1720 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
1721 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
1722 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
1723 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
1724 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
1725 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
1726 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
1727 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
1728 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
1729 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
1730 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
1731 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
1732 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
1733 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
1734
1735 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1736 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1737 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1738 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
1739 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
1740 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
1741 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
1742 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
1743 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
1744 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
1745 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
1746 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
1747 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
1748 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
1749 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
1750 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
1751
1752 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
1753 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
1754 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
1755 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
1756 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
1757 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
1758 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
1759 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
1760 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
1761 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
1762 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
1763 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
1764 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
1765 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
1766 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
1767 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
1768
1769 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1770 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1771 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1772 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1773 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1774 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1775 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1776 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1777 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1778 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1779 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
1780 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
1781 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
1782 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
1783 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
1784 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
1785
1786 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1787 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1788 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1789 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1790 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1791 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1792 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1793 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1794 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1795 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1796 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
1797 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
1798 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
1799 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
1800 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
1801 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
1802
1803 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1804 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1805 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1806 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1807 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1808 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1809 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1810 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1811 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1812 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1813 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1814 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1815 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1816 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1817 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
1818 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
1819
1820 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1821 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1822 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1823 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1824 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1825 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1826 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1827 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1828 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1829 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1830 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
1831 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1832 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1833 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1834 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1835 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1836
1837 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1838 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1839 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1840 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1841 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1842 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1843 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1844 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1845 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1846 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1847 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
1848 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
1849 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
1850 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
1851 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
1852 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
1853
1854 /* 0xf0 */ iemOp_movbe_Gy_My, iemOp_movbe_Gw_Mw, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
1855 /* 0xf1 */ iemOp_movbe_My_Gy, iemOp_movbe_Mw_Gw, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
1856 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1857 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1858 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1859 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1860 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
1861 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1862 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1863 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1864 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
1865 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
1866 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
1867 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
1868 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
1869 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
1870};
1871AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
1872
1873/** @} */
1874
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette