VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstThree0f3a.cpp.h

Last change on this file was 104368, checked in by vboxsync, 4 weeks ago

VMM/IEM: bugref:9898 Fix insertps emulation to apply the clear mask after (rather than before) copying the source value to the destination.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 79.3 KB
Line 
1/* $Id: IEMAllInstThree0f3a.cpp.h 104368 2024-04-19 06:54:56Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstVexMap3.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x3a
33 * @{
34 */
35
36/**
37 * Common worker for SSSE3 instructions on the forms:
38 * pxxx xmm1, xmm2/mem128, imm8
39 *
40 * Proper alignment of the 128-bit operand is enforced.
41 * Exceptions type 4. SSSE3 cpuid checks.
42 *
43 * @sa iemOpCommonSse41_FullFullImm8_To_Full
44 */
45FNIEMOP_DEF_1(iemOpCommonSsse3_FullFullImm8_To_Full, PFNIEMAIMPLMEDIAOPTF2U128IMM8, pfnU128)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
56 IEM_MC_ARG(PRTUINT128U, puDst, 0);
57 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
58 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
59 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
60 IEM_MC_PREPARE_SSE_USAGE();
61 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
62 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
63 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
64 IEM_MC_ADVANCE_RIP_AND_FINISH();
65 IEM_MC_END();
66 }
67 else
68 {
69 /*
70 * Register, memory.
71 */
72 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
73 IEM_MC_ARG(PRTUINT128U, puDst, 0);
74 IEM_MC_LOCAL(RTUINT128U, uSrc);
75 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
76 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
77
78 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
79 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
80 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
81 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
82 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
83 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
84
85 IEM_MC_PREPARE_SSE_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
88
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92}
93
94
95/**
96 * Common worker for SSE 4.1 instructions on the forms:
97 * pxxx xmm1, xmm2/mem128, imm8
98 *
99 * Proper alignment of the 128-bit operand is enforced.
100 * No SIMD exceptions. SSE 4.1 cpuid checks.
101 *
102 * @sa iemOpCommonSsse3_FullFullImm8_To_Full
103 */
104FNIEMOP_DEF_1(iemOpCommonSse41_FullFullImm8_To_Full, PFNIEMAIMPLMEDIAOPTF2U128IMM8, pfnU128)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * XMM, XMM, imm8
111 */
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
115 IEM_MC_ARG(PRTUINT128U, puDst, 0);
116 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
117 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
118 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
119 IEM_MC_PREPARE_SSE_USAGE();
120 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
121 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
122 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 /*
129 * XMM, [mem128], imm8.
130 */
131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
132 IEM_MC_ARG(PRTUINT128U, puDst, 0);
133 IEM_MC_LOCAL(RTUINT128U, uSrc);
134 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
136
137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
138 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
139 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
141 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
142 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143
144 IEM_MC_PREPARE_SSE_USAGE();
145 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
146 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151}
152
153
154/**
155 * Common worker for SSE 4.1 instructions of the form:
156 * xxx xmm1, xmm2/mem128, imm8
157 *
158 * Proper alignment of the 128-bit operand is enforced.
159 * MXCSR is used as input and output.
160 * Exceptions type 4. SSE 4.1 cpuid checks.
161 *
162 * @sa iemOpCommonSse41_FullFullImm8_To_Full
163 */
164FNIEMOP_DEF_1(iemOpCommonSse41Fp_FullFullImm8_To_Full, PFNIEMAIMPLMXCSRF2XMMIMM8, pfnU128)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * XMM, XMM, imm8.
171 */
172 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
175 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
176 IEM_MC_LOCAL(X86XMMREG, Dst);
177 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
178 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
179 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
180 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
181 IEM_MC_PREPARE_SSE_USAGE();
182
183 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
184 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pDst, pSrc, bImmArg);
185 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
186 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
187
188 IEM_MC_ADVANCE_RIP_AND_FINISH();
189 IEM_MC_END();
190 }
191 else
192 {
193 /*
194 * XMM, [mem128], imm8.
195 */
196 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
197 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
198 IEM_MC_LOCAL(X86XMMREG, Dst);
199 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
200 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
204 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
205 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
207 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
208 IEM_MC_PREPARE_SSE_USAGE();
209
210 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pDst, pSrc, bImmArg);
212 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
213 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE-style AES-NI instructions of the form:
223 * aesxxx xmm1, xmm2/mem128, imm8
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * Exceptions type 4. AES-NI cpuid checks.
227 *
228 * @sa iemOpCommonSsse3_FullFullImm8_To_Full
229 * @sa iemOpCommonSse41_FullFullImm8_To_Full
230 */
231FNIEMOP_DEF_1(iemOpCommonAesNi_FullFullImm8_To_Full, PFNIEMAIMPLMEDIAOPTF2U128IMM8, pfnU128)
232{
233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
234 if (IEM_IS_MODRM_REG_MODE(bRm))
235 {
236 /*
237 * Register, register.
238 */
239 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fAesNi);
242 IEM_MC_ARG(PRTUINT128U, puDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
244 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
245 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
246 IEM_MC_PREPARE_SSE_USAGE();
247 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
248 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
249 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * Register, memory.
257 */
258 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(PRTUINT128U, puDst, 0);
260 IEM_MC_LOCAL(RTUINT128U, uSrc);
261 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
265 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
266 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fAesNi);
268 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
269 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
270
271 IEM_MC_PREPARE_SSE_USAGE();
272 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
274
275 IEM_MC_ADVANCE_RIP_AND_FINISH();
276 IEM_MC_END();
277 }
278}
279
280
281/** Opcode 0x66 0x0f 0x00 - invalid (vex only). */
282/** Opcode 0x66 0x0f 0x01 - invalid (vex only). */
283/** Opcode 0x66 0x0f 0x02 - invalid (vex only). */
284/* Opcode 0x66 0x0f 0x03 - invalid */
285/** Opcode 0x66 0x0f 0x04 - invalid (vex only). */
286/** Opcode 0x66 0x0f 0x05 - invalid (vex only). */
287/* Opcode 0x66 0x0f 0x06 - invalid (vex only) */
288/* Opcode 0x66 0x0f 0x07 - invalid */
289/** Opcode 0x66 0x0f 0x08. */
290FNIEMOP_DEF(iemOp_roundps_Vx_Wx_Ib)
291{
292 IEMOP_MNEMONIC3(RMI, ROUNDPS, roundps, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
293 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
294 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_roundps_u128, iemAImpl_roundps_u128_fallback));
295}
296
297
298/** Opcode 0x66 0x0f 0x09. */
299FNIEMOP_DEF(iemOp_roundpd_Vx_Wx_Ib)
300{
301 IEMOP_MNEMONIC3(RMI, ROUNDPD, roundpd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
302 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
303 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_roundpd_u128, iemAImpl_roundpd_u128_fallback));
304}
305
306
307/** Opcode 0x66 0x0f 0x0a. */
308FNIEMOP_DEF(iemOp_roundss_Vss_Wss_Ib)
309{
310 /* The instruction form is very similar to CMPSS. */
311 IEMOP_MNEMONIC3(RMI, ROUNDSS, roundss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
312
313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
314 if (IEM_IS_MODRM_REG_MODE(bRm))
315 {
316 /*
317 * XMM32, XMM32.
318 */
319 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
320 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
322 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
323 IEM_MC_LOCAL(X86XMMREG, Dst);
324 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
325 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
326 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
328 IEM_MC_PREPARE_SSE_USAGE();
329 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
330 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_roundss_u128, pDst, pSrc, bImmArg);
331 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
332 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
333
334 IEM_MC_ADVANCE_RIP_AND_FINISH();
335 IEM_MC_END();
336 }
337 else
338 {
339 /*
340 * XMM32, [mem32].
341 */
342 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
343 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
344 IEM_MC_LOCAL(X86XMMREG, Dst);
345 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
350 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
351 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
353 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
354 IEM_MC_PREPARE_SSE_USAGE();
355
356 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
357 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
358 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_roundss_u128, pDst, pSrc, bImmArg);
359 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
360 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
361
362 IEM_MC_ADVANCE_RIP_AND_FINISH();
363 IEM_MC_END();
364 }
365}
366
367/** Opcode 0x66 0x0f 0x0b. */
368FNIEMOP_DEF(iemOp_roundsd_Vsd_Wsd_Ib)
369{
370 /* The instruction form is very similar to CMPSD. */
371 IEMOP_MNEMONIC3(RMI, ROUNDSD, roundsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
372
373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
374 if (IEM_IS_MODRM_REG_MODE(bRm))
375 {
376 /*
377 * XMM64, XMM64, imm8.
378 */
379 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
380 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
382 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
383 IEM_MC_LOCAL(X86XMMREG, Dst);
384 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
385 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
386 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
388 IEM_MC_PREPARE_SSE_USAGE();
389 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
390 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_roundsd_u128, pDst, pSrc, bImmArg);
391 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
392 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
393
394 IEM_MC_ADVANCE_RIP_AND_FINISH();
395 IEM_MC_END();
396 }
397 else
398 {
399 /*
400 * XMM64, [mem64], imm8.
401 */
402 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
403 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
404 IEM_MC_LOCAL(X86XMMREG, Dst);
405 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
406 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
408
409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
410 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
411 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
414 IEM_MC_PREPARE_SSE_USAGE();
415
416 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
417 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
418 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_roundsd_u128, pDst, pSrc, bImmArg);
419 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
420 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
421
422 IEM_MC_ADVANCE_RIP_AND_FINISH();
423 IEM_MC_END();
424 }
425}
426
427
428/** Opcode 0x66 0x0f 0x0c. */
429FNIEMOP_DEF(iemOp_blendps_Vx_Wx_Ib)
430{
431 IEMOP_MNEMONIC3(RMI, BLENDPS, blendps, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
432 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
433 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_blendps_u128, iemAImpl_blendps_u128_fallback));
434}
435
436
437/** Opcode 0x66 0x0f 0x0d. */
438FNIEMOP_DEF(iemOp_blendpd_Vx_Wx_Ib)
439{
440 IEMOP_MNEMONIC3(RMI, BLENDPD, blendpd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
441 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
442 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_blendpd_u128, iemAImpl_blendpd_u128_fallback));
443}
444
445
446/** Opcode 0x66 0x0f 0x0e. */
447FNIEMOP_DEF(iemOp_pblendw_Vx_Wx_Ib)
448{
449 IEMOP_MNEMONIC3(RMI, PBLENDW, pblendw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
450 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
451 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pblendw_u128, iemAImpl_pblendw_u128_fallback));
452}
453
454
455/** Opcode 0x0f 0x0f. */
456FNIEMOP_DEF(iemOp_palignr_Pq_Qq_Ib)
457{
458 IEMOP_MNEMONIC3(RMI, PALIGNR, palignr, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
460 if (IEM_IS_MODRM_REG_MODE(bRm))
461 {
462 /*
463 * Register, register.
464 */
465 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
466 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
467 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
470 IEM_MC_ARG(uint64_t *, pDst, 0);
471 IEM_MC_ARG(uint64_t, uSrc, 1);
472 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
473 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
474 IEM_MC_PREPARE_FPU_USAGE();
475 IEM_MC_FPU_TO_MMX_MODE();
476 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
477 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
478 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_palignr_u64, iemAImpl_palignr_u64_fallback),
479 pDst, uSrc, bImmArg);
480 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * Register, memory.
488 */
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(uint64_t *, pDst, 0);
491 IEM_MC_ARG(uint64_t, uSrc, 1);
492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
493
494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
495 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
496 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
499 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
500
501 IEM_MC_PREPARE_FPU_USAGE();
502 IEM_MC_FPU_TO_MMX_MODE();
503 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
504 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_palignr_u64, iemAImpl_palignr_u64_fallback),
505 pDst, uSrc, bImmArg);
506 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
507
508 IEM_MC_ADVANCE_RIP_AND_FINISH();
509 IEM_MC_END();
510 }
511}
512
513
514/** Opcode 0x66 0x0f 0x0f. */
515FNIEMOP_DEF(iemOp_palignr_Vx_Wx_Ib)
516{
517 IEMOP_MNEMONIC3(RMI, PALIGNR, palignr, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
518 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFullImm8_To_Full,
519 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_palignr_u128, iemAImpl_palignr_u128_fallback));
520}
521
522
523/* Opcode 0x66 0x0f 0x10 - invalid */
524/* Opcode 0x66 0x0f 0x11 - invalid */
525/* Opcode 0x66 0x0f 0x12 - invalid */
526/* Opcode 0x66 0x0f 0x13 - invalid */
527
528
529/** Opcode 0x66 0x0f 0x14. */
530FNIEMOP_DEF(iemOp_pextrb_RdMb_Vdq_Ib)
531{
532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
533 IEMOP_MNEMONIC3(MRI, PEXTRB, pextrb, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
534 if (IEM_IS_MODRM_REG_MODE(bRm))
535 {
536 /*
537 * greg32, XMM.
538 */
539 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
540 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
542 IEM_MC_LOCAL(uint8_t, uValue);
543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
544 IEM_MC_PREPARE_SSE_USAGE();
545 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
546 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
547 IEM_MC_ADVANCE_RIP_AND_FINISH();
548 IEM_MC_END();
549 }
550 else
551 {
552 /*
553 * [mem8], XMM.
554 */
555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
556 IEM_MC_LOCAL(uint8_t, uValue);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
560 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
563 IEM_MC_PREPARE_SSE_USAGE();
564
565 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
566 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
567 IEM_MC_ADVANCE_RIP_AND_FINISH();
568 IEM_MC_END();
569 }
570}
571
572
573/** Opcode 0x66 0x0f 0x15. */
574FNIEMOP_DEF(iemOp_pextrw_RdMw_Vdq_Ib)
575{
576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
577 IEMOP_MNEMONIC3(MRI, PEXTRW, pextrw, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
578 if (IEM_IS_MODRM_REG_MODE(bRm))
579 {
580 /*
581 * greg32, XMM.
582 */
583 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
584 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
586 IEM_MC_LOCAL(uint16_t, uValue);
587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
588 IEM_MC_PREPARE_SSE_USAGE();
589 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7 /*a_iWord*/);
590 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
591 IEM_MC_ADVANCE_RIP_AND_FINISH();
592 IEM_MC_END();
593 }
594 else
595 {
596 /*
597 * [mem16], XMM.
598 */
599 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
600 IEM_MC_LOCAL(uint16_t, uValue);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602
603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
604 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
606 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
607 IEM_MC_PREPARE_SSE_USAGE();
608
609 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7 /*a_iWord*/);
610 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
611 IEM_MC_ADVANCE_RIP_AND_FINISH();
612 IEM_MC_END();
613 }
614}
615
616
617FNIEMOP_DEF(iemOp_pextrd_q_RdMw_Vdq_Ib)
618{
619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
620 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
621 {
622 /**
623 * @opcode 0x16
624 * @opcodesub rex.w=1
625 * @oppfx 0x66
626 * @opcpuid sse
627 */
628 IEMOP_MNEMONIC3(MRI, PEXTRQ, pextrq, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
629 if (IEM_IS_MODRM_REG_MODE(bRm))
630 {
631 /*
632 * greg64, XMM.
633 */
634 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
635 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
637 IEM_MC_LOCAL(uint64_t, uSrc);
638 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
639 IEM_MC_PREPARE_SSE_USAGE();
640 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/);
641 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
642 IEM_MC_ADVANCE_RIP_AND_FINISH();
643 IEM_MC_END();
644 }
645 else
646 {
647 /*
648 * [mem64], XMM.
649 */
650 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
651 IEM_MC_LOCAL(uint64_t, uSrc);
652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
653
654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
655 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
657 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
658 IEM_MC_PREPARE_SSE_USAGE();
659
660 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/);
661 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
662 IEM_MC_ADVANCE_RIP_AND_FINISH();
663 IEM_MC_END();
664 }
665 }
666 else
667 {
668 /**
669 * @opdone
670 * @opcode 0x16
671 * @opcodesub rex.w=0
672 * @oppfx 0x66
673 * @opcpuid sse
674 */
675 IEMOP_MNEMONIC3(MRI, PEXTRD, pextrd, Ey, Vd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
676 if (IEM_IS_MODRM_REG_MODE(bRm))
677 {
678 /*
679 * greg32, XMM.
680 */
681 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
682 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
684 IEM_MC_LOCAL(uint32_t, uSrc);
685 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
686 IEM_MC_PREPARE_SSE_USAGE();
687 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
689 IEM_MC_ADVANCE_RIP_AND_FINISH();
690 IEM_MC_END();
691 }
692 else
693 {
694 /*
695 * [mem32], XMM.
696 */
697 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
698 IEM_MC_LOCAL(uint32_t, uSrc);
699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
700
701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
702 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
704 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
705 IEM_MC_PREPARE_SSE_USAGE();
706 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
707 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
708 IEM_MC_ADVANCE_RIP_AND_FINISH();
709 IEM_MC_END();
710 }
711 }
712}
713
714
715/** Opcode 0x66 0x0f 0x17. */
716FNIEMOP_DEF(iemOp_extractps_Ed_Vdq_Ib)
717{
718 IEMOP_MNEMONIC3(MRI, EXTRACTPS, extractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
720 if (IEM_IS_MODRM_REG_MODE(bRm))
721 {
722 /*
723 * greg32, XMM.
724 */
725 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
726 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
728 IEM_MC_LOCAL(uint32_t, uSrc);
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 IEM_MC_PREPARE_SSE_USAGE();
731 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
732 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
733 IEM_MC_ADVANCE_RIP_AND_FINISH();
734 IEM_MC_END();
735 }
736 else
737 {
738 /*
739 * [mem32], XMM.
740 */
741 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
742 IEM_MC_LOCAL(uint32_t, uSrc);
743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
744
745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
746 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
748 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
749 IEM_MC_PREPARE_SSE_USAGE();
750 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
751 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
752 IEM_MC_ADVANCE_RIP_AND_FINISH();
753 IEM_MC_END();
754 }
755}
756
757
758/* Opcode 0x66 0x0f 0x18 - invalid (vex only). */
759/* Opcode 0x66 0x0f 0x19 - invalid (vex only). */
760/* Opcode 0x66 0x0f 0x1a - invalid */
761/* Opcode 0x66 0x0f 0x1b - invalid */
762/* Opcode 0x66 0x0f 0x1c - invalid */
763/* Opcode 0x66 0x0f 0x1d - invalid (vex only). */
764/* Opcode 0x66 0x0f 0x1e - invalid */
765/* Opcode 0x66 0x0f 0x1f - invalid */
766
767
768/** Opcode 0x66 0x0f 0x20. */
769FNIEMOP_DEF(iemOp_pinsrb_Vdq_RyMb_Ib)
770{
771 IEMOP_MNEMONIC3(RMI, PINSRB, pinsrb, Vd, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 if (IEM_IS_MODRM_REG_MODE(bRm))
774 {
775 /*
776 * XMM, greg32.
777 */
778 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
779 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
781 IEM_MC_LOCAL(uint8_t, uSrc);
782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
783 IEM_MC_PREPARE_SSE_USAGE();
784 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
785 IEM_MC_STORE_XREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/, uSrc);
786 IEM_MC_ADVANCE_RIP_AND_FINISH();
787 IEM_MC_END();
788 }
789 else
790 {
791 /*
792 * XMM, [mem8].
793 */
794 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
795 IEM_MC_LOCAL(uint8_t, uSrc);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
799 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
802 IEM_MC_PREPARE_SSE_USAGE();
803
804 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
805 IEM_MC_STORE_XREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/, uSrc);
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811/** Opcode 0x66 0x0f 0x21, */
812FNIEMOP_DEF(iemOp_insertps_Vdq_UdqMd_Ib)
813{
814 IEMOP_MNEMONIC3(RMI, INSERTPS, insertps, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
816 if (IEM_IS_MODRM_REG_MODE(bRm))
817 {
818 /*
819 * XMM, XMM.
820 */
821 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
824 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
825 IEM_MC_PREPARE_SSE_USAGE();
826
827 IEM_MC_LOCAL(uint32_t, uSrc);
828 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3);
829 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc);
830 IEM_MC_CLEAR_XREG_U32_MASK(IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
831
832 IEM_MC_ADVANCE_RIP_AND_FINISH();
833 IEM_MC_END();
834 }
835 else
836 {
837 /*
838 * XMM, [mem32].
839 */
840 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
843 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
844
845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
846 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
847 IEM_MC_PREPARE_SSE_USAGE();
848
849 IEM_MC_LOCAL(uint32_t, uSrc);
850 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
851 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc);
852 IEM_MC_CLEAR_XREG_U32_MASK(IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
853 IEM_MC_ADVANCE_RIP_AND_FINISH();
854 IEM_MC_END();
855 }
856}
857
858FNIEMOP_DEF(iemOp_pinsrd_q_Vdq_Ey_Ib)
859{
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
861 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
862 {
863 /**
864 * @opcode 0x22
865 * @opcodesub rex.w=1
866 * @oppfx 0x66
867 * @opcpuid sse
868 */
869 IEMOP_MNEMONIC3(RMI, PINSRQ, pinsrq, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
870 if (IEM_IS_MODRM_REG_MODE(bRm))
871 {
872 /*
873 * XMM, greg64.
874 */
875 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
876 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
878 IEM_MC_LOCAL(uint64_t, uSrc);
879 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
880 IEM_MC_PREPARE_SSE_USAGE();
881 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
882 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/, uSrc);
883 IEM_MC_ADVANCE_RIP_AND_FINISH();
884 IEM_MC_END();
885 }
886 else
887 {
888 /*
889 * XMM, [mem64].
890 */
891 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
892 IEM_MC_LOCAL(uint64_t, uSrc);
893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
894
895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
896 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
899 IEM_MC_PREPARE_SSE_USAGE();
900
901 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
902 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/, uSrc);
903 IEM_MC_ADVANCE_RIP_AND_FINISH();
904 IEM_MC_END();
905 }
906 }
907 else
908 {
909 /**
910 * @opdone
911 * @opcode 0x22
912 * @opcodesub rex.w=0
913 * @oppfx 0x66
914 * @opcpuid sse
915 */
916 IEMOP_MNEMONIC3(RMI, PINSRD, pinsrd, Vd, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
917 if (IEM_IS_MODRM_REG_MODE(bRm))
918 {
919 /*
920 * XMM, greg32.
921 */
922 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
923 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
925 IEM_MC_LOCAL(uint32_t, uSrc);
926 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
927 IEM_MC_PREPARE_SSE_USAGE();
928 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
929 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/, uSrc);
930 IEM_MC_ADVANCE_RIP_AND_FINISH();
931 IEM_MC_END();
932 }
933 else
934 {
935 /*
936 * XMM, [mem32].
937 */
938 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
939 IEM_MC_LOCAL(uint32_t, uSrc);
940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
941
942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
943 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
946 IEM_MC_PREPARE_SSE_USAGE();
947
948 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
949 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/, uSrc);
950 IEM_MC_ADVANCE_RIP_AND_FINISH();
951 IEM_MC_END();
952 }
953 }
954}
955
956
957/* Opcode 0x66 0x0f 0x23 - invalid */
958/* Opcode 0x66 0x0f 0x24 - invalid */
959/* Opcode 0x66 0x0f 0x25 - invalid */
960/* Opcode 0x66 0x0f 0x26 - invalid */
961/* Opcode 0x66 0x0f 0x27 - invalid */
962/* Opcode 0x66 0x0f 0x28 - invalid */
963/* Opcode 0x66 0x0f 0x29 - invalid */
964/* Opcode 0x66 0x0f 0x2a - invalid */
965/* Opcode 0x66 0x0f 0x2b - invalid */
966/* Opcode 0x66 0x0f 0x2c - invalid */
967/* Opcode 0x66 0x0f 0x2d - invalid */
968/* Opcode 0x66 0x0f 0x2e - invalid */
969/* Opcode 0x66 0x0f 0x2f - invalid */
970
971
972/* Opcode 0x66 0x0f 0x30 - invalid */
973/* Opcode 0x66 0x0f 0x31 - invalid */
974/* Opcode 0x66 0x0f 0x32 - invalid */
975/* Opcode 0x66 0x0f 0x33 - invalid */
976/* Opcode 0x66 0x0f 0x34 - invalid */
977/* Opcode 0x66 0x0f 0x35 - invalid */
978/* Opcode 0x66 0x0f 0x36 - invalid */
979/* Opcode 0x66 0x0f 0x37 - invalid */
980/* Opcode 0x66 0x0f 0x38 - invalid (vex only). */
981/* Opcode 0x66 0x0f 0x39 - invalid (vex only). */
982/* Opcode 0x66 0x0f 0x3a - invalid */
983/* Opcode 0x66 0x0f 0x3b - invalid */
984/* Opcode 0x66 0x0f 0x3c - invalid */
985/* Opcode 0x66 0x0f 0x3d - invalid */
986/* Opcode 0x66 0x0f 0x3e - invalid */
987/* Opcode 0x66 0x0f 0x3f - invalid */
988
989
990/** Opcode 0x66 0x0f 0x40. */
991FNIEMOP_DEF(iemOp_dpps_Vx_Wx_Ib)
992{
993 IEMOP_MNEMONIC3(RMI, DPPS, dpps, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
994 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
995 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_dpps_u128, iemAImpl_dpps_u128_fallback));
996}
997
998
999/** Opcode 0x66 0x0f 0x41, */
1000FNIEMOP_DEF(iemOp_dppd_Vdq_Wdq_Ib)
1001{
1002 IEMOP_MNEMONIC3(RMI, DPPD, dppd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1003 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
1004 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_dppd_u128, iemAImpl_dppd_u128_fallback));
1005}
1006
1007
1008/** Opcode 0x66 0x0f 0x42. */
1009FNIEMOP_DEF(iemOp_mpsadbw_Vx_Wx_Ib)
1010{
1011 IEMOP_MNEMONIC3(RMI, MPSADBW, mpsadbw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1012 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
1013 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_mpsadbw_u128, iemAImpl_mpsadbw_u128_fallback));
1014}
1015
1016
1017/* Opcode 0x66 0x0f 0x43 - invalid */
1018
1019
1020/** Opcode 0x66 0x0f 0x44. */
1021FNIEMOP_DEF(iemOp_pclmulqdq_Vdq_Wdq_Ib)
1022{
1023 IEMOP_MNEMONIC3(RMI, PCLMULQDQ, pclmulqdq, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1024
1025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1026 if (IEM_IS_MODRM_REG_MODE(bRm))
1027 {
1028 /*
1029 * Register, register.
1030 */
1031 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1032 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fPclMul);
1034 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1035 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1036 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1037 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1038 IEM_MC_PREPARE_SSE_USAGE();
1039 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1040 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1041 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fPclMul,
1042 iemAImpl_pclmulqdq_u128,
1043 iemAImpl_pclmulqdq_u128_fallback),
1044 puDst, puSrc, bImmArg);
1045 IEM_MC_ADVANCE_RIP_AND_FINISH();
1046 IEM_MC_END();
1047 }
1048 else
1049 {
1050 /*
1051 * Register, memory.
1052 */
1053 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1054 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1055 IEM_MC_LOCAL(RTUINT128U, uSrc);
1056 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1058
1059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1060 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1061 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fPclMul);
1063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1064 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1065
1066 IEM_MC_PREPARE_SSE_USAGE();
1067 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fPclMul,
1069 iemAImpl_pclmulqdq_u128,
1070 iemAImpl_pclmulqdq_u128_fallback),
1071 puDst, puSrc, bImmArg);
1072
1073 IEM_MC_ADVANCE_RIP_AND_FINISH();
1074 IEM_MC_END();
1075 }
1076}
1077
1078
1079/* Opcode 0x66 0x0f 0x45 - invalid */
1080/* Opcode 0x66 0x0f 0x46 - invalid (vex only) */
1081/* Opcode 0x66 0x0f 0x47 - invalid */
1082/* Opcode 0x66 0x0f 0x48 - invalid */
1083/* Opcode 0x66 0x0f 0x49 - invalid */
1084/* Opcode 0x66 0x0f 0x4a - invalid (vex only). */
1085/* Opcode 0x66 0x0f 0x4b - invalid (vex only). */
1086/* Opcode 0x66 0x0f 0x4c - invalid (vex only). */
1087/* Opcode 0x66 0x0f 0x4d - invalid */
1088/* Opcode 0x66 0x0f 0x4e - invalid */
1089/* Opcode 0x66 0x0f 0x4f - invalid */
1090
1091
1092/* Opcode 0x66 0x0f 0x50 - invalid */
1093/* Opcode 0x66 0x0f 0x51 - invalid */
1094/* Opcode 0x66 0x0f 0x52 - invalid */
1095/* Opcode 0x66 0x0f 0x53 - invalid */
1096/* Opcode 0x66 0x0f 0x54 - invalid */
1097/* Opcode 0x66 0x0f 0x55 - invalid */
1098/* Opcode 0x66 0x0f 0x56 - invalid */
1099/* Opcode 0x66 0x0f 0x57 - invalid */
1100/* Opcode 0x66 0x0f 0x58 - invalid */
1101/* Opcode 0x66 0x0f 0x59 - invalid */
1102/* Opcode 0x66 0x0f 0x5a - invalid */
1103/* Opcode 0x66 0x0f 0x5b - invalid */
1104/* Opcode 0x66 0x0f 0x5c - invalid */
1105/* Opcode 0x66 0x0f 0x5d - invalid */
1106/* Opcode 0x66 0x0f 0x5e - invalid */
1107/* Opcode 0x66 0x0f 0x5f - invalid */
1108
1109
1110/**
1111 * @opcode 0x60
1112 * @oppfx 0x66
1113 * @opflmodify cf,pf,af,zf,sf,of
1114 * @opflclear pf,af
1115 */
1116FNIEMOP_DEF(iemOp_pcmpestrm_Vdq_Wdq_Ib)
1117{
1118 IEMOP_MNEMONIC3(RMI, PCMPESTRM, pcmpestrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1119
1120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1121 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1122 {
1123 if (IEM_IS_MODRM_REG_MODE(bRm))
1124 {
1125 /*
1126 * Register, register.
1127 */
1128 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1129 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1132 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1133 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1134 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1135 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_PREPARE_SSE_USAGE();
1138 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1139 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1140 IEM_MC_REF_EFLAGS(pEFlags);
1141 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1142 iemAImpl_pcmpestrm_u128,
1143 iemAImpl_pcmpestrm_u128_fallback),
1144 puDst, pEFlags, pSrc, bImmArg);
1145 IEM_MC_ADVANCE_RIP_AND_FINISH();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 /*
1151 * Register, memory.
1152 */
1153 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1154 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1155 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1156 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1157 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1159
1160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1161 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1162 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1165 IEM_MC_PREPARE_SSE_USAGE();
1166
1167 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1168 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1169 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1170 IEM_MC_REF_EFLAGS(pEFlags);
1171 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1172 iemAImpl_pcmpestrm_u128,
1173 iemAImpl_pcmpestrm_u128_fallback),
1174 puDst, pEFlags, pSrc, bImmArg);
1175 IEM_MC_ADVANCE_RIP_AND_FINISH();
1176 IEM_MC_END();
1177 }
1178 }
1179 else
1180 {
1181 if (IEM_IS_MODRM_REG_MODE(bRm))
1182 {
1183 /*
1184 * Register, register.
1185 */
1186 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1187 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1190 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1191 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1192 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1193 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1195 IEM_MC_PREPARE_SSE_USAGE();
1196 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1197 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1198 IEM_MC_REF_EFLAGS(pEFlags);
1199 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1200 iemAImpl_pcmpestrm_u128,
1201 iemAImpl_pcmpestrm_u128_fallback),
1202 puDst, pEFlags, pSrc, bImmArg);
1203 IEM_MC_ADVANCE_RIP_AND_FINISH();
1204 IEM_MC_END();
1205 }
1206 else
1207 {
1208 /*
1209 * Register, memory.
1210 */
1211 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1212 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1213 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1214 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1215 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1217
1218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1219 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1220 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1223 IEM_MC_PREPARE_SSE_USAGE();
1224
1225 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1226 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1227 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1228 IEM_MC_REF_EFLAGS(pEFlags);
1229 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1230 iemAImpl_pcmpestrm_u128,
1231 iemAImpl_pcmpestrm_u128_fallback),
1232 puDst, pEFlags, pSrc, bImmArg);
1233 IEM_MC_ADVANCE_RIP_AND_FINISH();
1234 IEM_MC_END();
1235 }
1236 }
1237}
1238
1239
1240/**
1241 * @opcode 0x61
1242 * @oppfx 0x66
1243 * @opflmodify cf,pf,af,zf,sf,of
1244 * @opflclear pf,af
1245 */
1246FNIEMOP_DEF(iemOp_pcmpestri_Vdq_Wdq_Ib)
1247{
1248 IEMOP_MNEMONIC3(RMI, PCMPESTRI, pcmpestri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1249
1250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1251 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1252 {
1253 if (IEM_IS_MODRM_REG_MODE(bRm))
1254 {
1255 /*
1256 * Register, register.
1257 */
1258 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1259 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1261 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1262 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1263 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1264 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1265 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1266 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1267 IEM_MC_PREPARE_SSE_USAGE();
1268 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1269 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1270 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1271 IEM_MC_REF_EFLAGS(pEFlags);
1272 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1273 iemAImpl_pcmpestri_u128,
1274 iemAImpl_pcmpestri_u128_fallback),
1275 pu32Ecx, pEFlags, pSrc, bImmArg);
1276 /** @todo testcase: High dword of RCX cleared? */
1277 IEM_MC_ADVANCE_RIP_AND_FINISH();
1278 IEM_MC_END();
1279 }
1280 else
1281 {
1282 /*
1283 * Register, memory.
1284 */
1285 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1286 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1287 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1288 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1289 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1291
1292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1293 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1294 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1297 IEM_MC_PREPARE_SSE_USAGE();
1298
1299 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1300 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1302 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1303 IEM_MC_REF_EFLAGS(pEFlags);
1304 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1305 iemAImpl_pcmpestri_u128,
1306 iemAImpl_pcmpestri_u128_fallback),
1307 pu32Ecx, pEFlags, pSrc, bImmArg);
1308 /** @todo testcase: High dword of RCX cleared? */
1309 IEM_MC_ADVANCE_RIP_AND_FINISH();
1310 IEM_MC_END();
1311 }
1312 }
1313 else
1314 {
1315 if (IEM_IS_MODRM_REG_MODE(bRm))
1316 {
1317 /*
1318 * Register, register.
1319 */
1320 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1321 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1323 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1324 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1325 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1326 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1327 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1329 IEM_MC_PREPARE_SSE_USAGE();
1330 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1331 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1332 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1333 IEM_MC_REF_EFLAGS(pEFlags);
1334 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1335 iemAImpl_pcmpestri_u128,
1336 iemAImpl_pcmpestri_u128_fallback),
1337 pu32Ecx, pEFlags, pSrc, bImmArg);
1338 /** @todo testcase: High dword of RCX cleared? */
1339 IEM_MC_ADVANCE_RIP_AND_FINISH();
1340 IEM_MC_END();
1341 }
1342 else
1343 {
1344 /*
1345 * Register, memory.
1346 */
1347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1348 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1349 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1350 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1351 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1353
1354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1355 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1356 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1359 IEM_MC_PREPARE_SSE_USAGE();
1360
1361 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1362 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1363 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1364 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1365 IEM_MC_REF_EFLAGS(pEFlags);
1366 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1367 iemAImpl_pcmpestri_u128,
1368 iemAImpl_pcmpestri_u128_fallback),
1369 pu32Ecx, pEFlags, pSrc, bImmArg);
1370 /** @todo testcase: High dword of RCX cleared? */
1371 IEM_MC_ADVANCE_RIP_AND_FINISH();
1372 IEM_MC_END();
1373 }
1374 }
1375}
1376
1377
1378/**
1379 * @opcode 0x62
1380 * @oppfx 0x66
1381 * @opflmodify cf,pf,af,zf,sf,of
1382 * @opflclear pf,af
1383 */
1384FNIEMOP_DEF(iemOp_pcmpistrm_Vdq_Wdq_Ib)
1385{
1386 IEMOP_MNEMONIC3(RMI, PCMPISTRM, pcmpistrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1387
1388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1389 if (IEM_IS_MODRM_REG_MODE(bRm))
1390 {
1391 /*
1392 * Register, register.
1393 */
1394 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1397 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1398 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1399 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
1400 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
1401 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1403 IEM_MC_PREPARE_SSE_USAGE();
1404 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1405 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1406 IEM_MC_REF_EFLAGS(pEFlags);
1407 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1408 iemAImpl_pcmpistrm_u128,
1409 iemAImpl_pcmpistrm_u128_fallback),
1410 puDst, pEFlags, pSrc, bImmArg);
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 /*
1417 * Register, memory.
1418 */
1419 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1420 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1421 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1422 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
1423 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
1424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1425
1426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1427 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1428 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1430 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1431 IEM_MC_PREPARE_SSE_USAGE();
1432
1433 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1434 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1435 IEM_MC_REF_EFLAGS(pEFlags);
1436 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1437 iemAImpl_pcmpistrm_u128,
1438 iemAImpl_pcmpistrm_u128_fallback),
1439 puDst, pEFlags, pSrc, bImmArg);
1440 IEM_MC_ADVANCE_RIP_AND_FINISH();
1441 IEM_MC_END();
1442 }
1443}
1444
1445
1446/**
1447 * @opcode 0x63
1448 * @oppfx 0x66
1449 * @opflmodify cf,pf,af,zf,sf,of
1450 * @opflclear pf,af
1451 */
1452FNIEMOP_DEF(iemOp_pcmpistri_Vdq_Wdq_Ib)
1453{
1454 IEMOP_MNEMONIC3(RMI, PCMPISTRI, pcmpistri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1455
1456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1457 if (IEM_IS_MODRM_REG_MODE(bRm))
1458 {
1459 /*
1460 * Register, register.
1461 */
1462 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1463 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1465 IEM_MC_ARG(uint32_t *, pEFlags, 0);
1466 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
1467 IEM_MC_ARG(PCRTUINT128U, pSrc2, 2);
1468 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1469 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1470 IEM_MC_PREPARE_SSE_USAGE();
1471 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1472 IEM_MC_REF_XREG_U128_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1473 IEM_MC_REF_EFLAGS(pEFlags);
1474 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
1475 IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1476 iemAImpl_pcmpistri_u128,
1477 iemAImpl_pcmpistri_u128_fallback),
1478 pEFlags, pSrc1, pSrc2, bImmArg);
1479 /** @todo testcase: High dword of RCX cleared? */
1480 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
1481 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1482
1483 IEM_MC_ADVANCE_RIP_AND_FINISH();
1484 IEM_MC_END();
1485 }
1486 else
1487 {
1488 /*
1489 * Register, memory.
1490 */
1491 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1492 IEM_MC_ARG(uint32_t *, pEFlags, 0);
1493 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
1494 IEM_MC_LOCAL(RTUINT128U, Src2);
1495 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc2, Src2, 2);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1499 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1500 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1502 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1503 IEM_MC_PREPARE_SSE_USAGE();
1504
1505 IEM_MC_FETCH_MEM_U128(Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1506 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1507 IEM_MC_REF_EFLAGS(pEFlags);
1508 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
1509 IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1510 iemAImpl_pcmpistri_u128,
1511 iemAImpl_pcmpistri_u128_fallback),
1512 pEFlags, pSrc1, pSrc2, bImmArg);
1513 /** @todo testcase: High dword of RCX cleared? */
1514 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
1515 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1516 IEM_MC_ADVANCE_RIP_AND_FINISH();
1517 IEM_MC_END();
1518 }
1519}
1520
1521
1522/* Opcode 0x66 0x0f 0x64 - invalid */
1523/* Opcode 0x66 0x0f 0x65 - invalid */
1524/* Opcode 0x66 0x0f 0x66 - invalid */
1525/* Opcode 0x66 0x0f 0x67 - invalid */
1526/* Opcode 0x66 0x0f 0x68 - invalid */
1527/* Opcode 0x66 0x0f 0x69 - invalid */
1528/* Opcode 0x66 0x0f 0x6a - invalid */
1529/* Opcode 0x66 0x0f 0x6b - invalid */
1530/* Opcode 0x66 0x0f 0x6c - invalid */
1531/* Opcode 0x66 0x0f 0x6d - invalid */
1532/* Opcode 0x66 0x0f 0x6e - invalid */
1533/* Opcode 0x66 0x0f 0x6f - invalid */
1534
1535/* Opcodes 0x0f 0x70 thru 0x0f 0xb0 are unused. */
1536
1537
1538/* Opcode 0x0f 0xc0 - invalid */
1539/* Opcode 0x0f 0xc1 - invalid */
1540/* Opcode 0x0f 0xc2 - invalid */
1541/* Opcode 0x0f 0xc3 - invalid */
1542/* Opcode 0x0f 0xc4 - invalid */
1543/* Opcode 0x0f 0xc5 - invalid */
1544/* Opcode 0x0f 0xc6 - invalid */
1545/* Opcode 0x0f 0xc7 - invalid */
1546/* Opcode 0x0f 0xc8 - invalid */
1547/* Opcode 0x0f 0xc9 - invalid */
1548/* Opcode 0x0f 0xca - invalid */
1549/* Opcode 0x0f 0xcb - invalid */
1550
1551
1552/* Opcode 0x0f 0xcc */
1553FNIEMOP_DEF(iemOp_sha1rnds4_Vdq_Wdq_Ib)
1554{
1555 IEMOP_MNEMONIC3(RMI, SHA1RNDS4, sha1rnds4, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1556
1557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1558 if (IEM_IS_MODRM_REG_MODE(bRm))
1559 {
1560 /*
1561 * XMM, XMM, imm8
1562 */
1563 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1564 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1568 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1569 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1570 IEM_MC_PREPARE_SSE_USAGE();
1571 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1572 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1573 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha,
1574 iemAImpl_sha1rnds4_u128,
1575 iemAImpl_sha1rnds4_u128_fallback),
1576 puDst, puSrc, bImmArg);
1577 IEM_MC_ADVANCE_RIP_AND_FINISH();
1578 IEM_MC_END();
1579 }
1580 else
1581 {
1582 /*
1583 * XMM, [mem128], imm8.
1584 */
1585 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1586 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1587 IEM_MC_LOCAL(RTUINT128U, uSrc);
1588 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1590
1591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1592 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
1595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1596 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1597
1598 IEM_MC_PREPARE_SSE_USAGE();
1599 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1600 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha,
1601 iemAImpl_sha1rnds4_u128,
1602 iemAImpl_sha1rnds4_u128_fallback),
1603 puDst, puSrc, bImmArg);
1604 IEM_MC_ADVANCE_RIP_AND_FINISH();
1605 IEM_MC_END();
1606 }
1607}
1608
1609
1610/* Opcode 0x0f 0xcd - invalid */
1611/* Opcode 0x0f 0xce - invalid */
1612/* Opcode 0x0f 0xcf - invalid */
1613
1614
1615/* Opcode 0x66 0x0f 0xd0 - invalid */
1616/* Opcode 0x66 0x0f 0xd1 - invalid */
1617/* Opcode 0x66 0x0f 0xd2 - invalid */
1618/* Opcode 0x66 0x0f 0xd3 - invalid */
1619/* Opcode 0x66 0x0f 0xd4 - invalid */
1620/* Opcode 0x66 0x0f 0xd5 - invalid */
1621/* Opcode 0x66 0x0f 0xd6 - invalid */
1622/* Opcode 0x66 0x0f 0xd7 - invalid */
1623/* Opcode 0x66 0x0f 0xd8 - invalid */
1624/* Opcode 0x66 0x0f 0xd9 - invalid */
1625/* Opcode 0x66 0x0f 0xda - invalid */
1626/* Opcode 0x66 0x0f 0xdb - invalid */
1627/* Opcode 0x66 0x0f 0xdc - invalid */
1628/* Opcode 0x66 0x0f 0xdd - invalid */
1629/* Opcode 0x66 0x0f 0xde - invalid */
1630
1631
1632/* Opcode 0x66 0x0f 0xdf - (aeskeygenassist). */
1633FNIEMOP_DEF(iemOp_aeskeygen_Vdq_Wdq_Ib)
1634{
1635 IEMOP_MNEMONIC3(RMI, AESKEYGEN, aeskeygen, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1636 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFullImm8_To_Full,
1637 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback));
1638}
1639
1640
1641/* Opcode 0xf2 0x0f 0xf0 - invalid (vex only) */
1642
1643
1644/**
1645 * Three byte opcode map, first two bytes are 0x0f 0x3a.
1646 * @sa g_apfnVexMap2
1647 */
1648const PFNIEMOP g_apfnThreeByte0f3a[] =
1649{
1650 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1651 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1652 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1653 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1654 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1655 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1656 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1657 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1658 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1659 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_roundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1660 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_roundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1661 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_roundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1662 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_roundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1663 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_blendps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1664 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_blendpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1665 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_pblendw_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1666 /* 0x0f */ iemOp_palignr_Pq_Qq_Ib, iemOp_palignr_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1667
1668 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1669 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1670 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1671 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1672 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_pextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1673 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_pextrw_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1674 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_pextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1675 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_extractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1676 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1677 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1678 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1679 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1680 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1681 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1682 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1683 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1684
1685 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_pinsrb_Vdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1686 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_insertps_Vdq_UdqMd_Ib,iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1687 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_pinsrd_q_Vdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1688 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1689 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1690 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1691 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1692 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1693 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1694 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1695 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1696 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1697 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1698 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1699 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1700 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1701
1702 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1703 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1704 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1705 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1706 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1707 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1708 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1709 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1710 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1711 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1712 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1713 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1714 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1715 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1716 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1717 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1718
1719 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_dpps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1720 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_dppd_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1721 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_mpsadbw_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1722 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1723 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_pclmulqdq_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1724 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1725 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1726 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1727 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1728 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1729 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1730 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1731 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1732 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1733 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1734 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1735
1736 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1737 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1738 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1739 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1740 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1741 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1742 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1743 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1744 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1745 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1746 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1747 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1748 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1749 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1750 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1751 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1752
1753 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1754 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1755 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1756 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1757 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1758 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1759 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1760 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1761 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1762 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1763 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1764 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1765 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1766 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1767 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1768 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1769
1770 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1771 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1772 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1773 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1774 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1775 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1776 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1777 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1778 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1779 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1780 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1781 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1782 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1783 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1784 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1785 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1786
1787 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1788 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1789 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1790 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1791 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1792 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1793 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1794 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1795 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1796 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1797 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1798 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1799 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1800 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1801 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1802 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1803
1804 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1805 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1806 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1807 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1808 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1809 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1810 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1811 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1812 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1813 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1814 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1815 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1816 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1817 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1818 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1819 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1820
1821 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1822 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1823 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1824 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1825 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1826 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1827 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1828 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1829 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1830 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1831 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1832 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1833 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1834 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1835 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1836 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1837
1838 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1839 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1840 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1841 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1842 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1843 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1844 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1845 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1846 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1847 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1848 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1849 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1850 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1851 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1852 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1853 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1854
1855 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1856 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1857 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1858 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1859 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1860 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1861 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1862 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1863 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1864 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1865 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1866 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1867 /* 0xcc */ iemOp_sha1rnds4_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1868 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1869 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1870 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1871
1872 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1873 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1874 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1875 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1876 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1877 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1878 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1879 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1880 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1881 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1882 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1883 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1884 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1885 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1886 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1887 /* 0xdf */ iemOp_InvalidNeedRMImm8, iemOp_aeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1888
1889 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1890 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1891 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1892 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1893 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1894 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1895 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1896 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1897 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1898 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1899 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1900 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1901 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1902 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1903 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1904 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1905
1906 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1907 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1908 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1909 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1910 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1911 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1912 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1913 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1914 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1915 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1916 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1917 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1918 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1919 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1920 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1921 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1922};
1923AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f3a) == 1024);
1924
1925/** @} */
1926
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use