VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 97138

Last change on this file since 97138 was 97138, checked in by vboxsync, 19 months ago

IEM: Make unsupported (not recommended to be used) forms of MOVSXD visibly fail.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 395.6 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 97138 2022-10-13 15:20:45Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 return VINF_SUCCESS;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP();
1113 IEM_MC_END();
1114 return VINF_SUCCESS;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP();
1124 IEM_MC_END();
1125 return VINF_SUCCESS;
1126 }
1127 return VINF_SUCCESS;
1128}
1129
1130
1131/**
1132 * @opcode 0x40
1133 */
1134FNIEMOP_DEF(iemOp_inc_eAX)
1135{
1136 /*
1137 * This is a REX prefix in 64-bit mode.
1138 */
1139 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1140 {
1141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1143
1144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1146 }
1147
1148 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1149 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1150}
1151
1152
1153/**
1154 * @opcode 0x41
1155 */
1156FNIEMOP_DEF(iemOp_inc_eCX)
1157{
1158 /*
1159 * This is a REX prefix in 64-bit mode.
1160 */
1161 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1162 {
1163 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1164 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1165 pVCpu->iem.s.uRexB = 1 << 3;
1166
1167 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1168 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1169 }
1170
1171 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1172 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1173}
1174
1175
1176/**
1177 * @opcode 0x42
1178 */
1179FNIEMOP_DEF(iemOp_inc_eDX)
1180{
1181 /*
1182 * This is a REX prefix in 64-bit mode.
1183 */
1184 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1185 {
1186 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1187 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1188 pVCpu->iem.s.uRexIndex = 1 << 3;
1189
1190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1192 }
1193
1194 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1195 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1196}
1197
1198
1199
1200/**
1201 * @opcode 0x43
1202 */
1203FNIEMOP_DEF(iemOp_inc_eBX)
1204{
1205 /*
1206 * This is a REX prefix in 64-bit mode.
1207 */
1208 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1209 {
1210 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1211 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1212 pVCpu->iem.s.uRexB = 1 << 3;
1213 pVCpu->iem.s.uRexIndex = 1 << 3;
1214
1215 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1216 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1217 }
1218
1219 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1220 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1221}
1222
1223
1224/**
1225 * @opcode 0x44
1226 */
1227FNIEMOP_DEF(iemOp_inc_eSP)
1228{
1229 /*
1230 * This is a REX prefix in 64-bit mode.
1231 */
1232 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1233 {
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1236 pVCpu->iem.s.uRexReg = 1 << 3;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240 }
1241
1242 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1243 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1244}
1245
1246
1247/**
1248 * @opcode 0x45
1249 */
1250FNIEMOP_DEF(iemOp_inc_eBP)
1251{
1252 /*
1253 * This is a REX prefix in 64-bit mode.
1254 */
1255 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1256 {
1257 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1259 pVCpu->iem.s.uRexReg = 1 << 3;
1260 pVCpu->iem.s.uRexB = 1 << 3;
1261
1262 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1263 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1264 }
1265
1266 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1267 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1268}
1269
1270
1271/**
1272 * @opcode 0x46
1273 */
1274FNIEMOP_DEF(iemOp_inc_eSI)
1275{
1276 /*
1277 * This is a REX prefix in 64-bit mode.
1278 */
1279 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1280 {
1281 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1282 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1283 pVCpu->iem.s.uRexReg = 1 << 3;
1284 pVCpu->iem.s.uRexIndex = 1 << 3;
1285
1286 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1287 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1288 }
1289
1290 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1291 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1292}
1293
1294
1295/**
1296 * @opcode 0x47
1297 */
1298FNIEMOP_DEF(iemOp_inc_eDI)
1299{
1300 /*
1301 * This is a REX prefix in 64-bit mode.
1302 */
1303 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1304 {
1305 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1306 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1307 pVCpu->iem.s.uRexReg = 1 << 3;
1308 pVCpu->iem.s.uRexB = 1 << 3;
1309 pVCpu->iem.s.uRexIndex = 1 << 3;
1310
1311 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1312 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1313 }
1314
1315 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1316 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1317}
1318
1319
1320/**
1321 * @opcode 0x48
1322 */
1323FNIEMOP_DEF(iemOp_dec_eAX)
1324{
1325 /*
1326 * This is a REX prefix in 64-bit mode.
1327 */
1328 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1329 {
1330 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1331 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1332 iemRecalEffOpSize(pVCpu);
1333
1334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1336 }
1337
1338 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1339 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1340}
1341
1342
1343/**
1344 * @opcode 0x49
1345 */
1346FNIEMOP_DEF(iemOp_dec_eCX)
1347{
1348 /*
1349 * This is a REX prefix in 64-bit mode.
1350 */
1351 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1352 {
1353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1355 pVCpu->iem.s.uRexB = 1 << 3;
1356 iemRecalEffOpSize(pVCpu);
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360 }
1361
1362 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1363 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1364}
1365
1366
1367/**
1368 * @opcode 0x4a
1369 */
1370FNIEMOP_DEF(iemOp_dec_eDX)
1371{
1372 /*
1373 * This is a REX prefix in 64-bit mode.
1374 */
1375 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1376 {
1377 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1378 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1379 pVCpu->iem.s.uRexIndex = 1 << 3;
1380 iemRecalEffOpSize(pVCpu);
1381
1382 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1383 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1384 }
1385
1386 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1387 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1388}
1389
1390
1391/**
1392 * @opcode 0x4b
1393 */
1394FNIEMOP_DEF(iemOp_dec_eBX)
1395{
1396 /*
1397 * This is a REX prefix in 64-bit mode.
1398 */
1399 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1400 {
1401 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1402 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1403 pVCpu->iem.s.uRexB = 1 << 3;
1404 pVCpu->iem.s.uRexIndex = 1 << 3;
1405 iemRecalEffOpSize(pVCpu);
1406
1407 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1408 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1409 }
1410
1411 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1412 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1413}
1414
1415
1416/**
1417 * @opcode 0x4c
1418 */
1419FNIEMOP_DEF(iemOp_dec_eSP)
1420{
1421 /*
1422 * This is a REX prefix in 64-bit mode.
1423 */
1424 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1425 {
1426 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1427 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1428 pVCpu->iem.s.uRexReg = 1 << 3;
1429 iemRecalEffOpSize(pVCpu);
1430
1431 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1432 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1433 }
1434
1435 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1436 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1437}
1438
1439
1440/**
1441 * @opcode 0x4d
1442 */
1443FNIEMOP_DEF(iemOp_dec_eBP)
1444{
1445 /*
1446 * This is a REX prefix in 64-bit mode.
1447 */
1448 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1449 {
1450 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1451 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1452 pVCpu->iem.s.uRexReg = 1 << 3;
1453 pVCpu->iem.s.uRexB = 1 << 3;
1454 iemRecalEffOpSize(pVCpu);
1455
1456 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1457 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1458 }
1459
1460 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1461 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1462}
1463
1464
1465/**
1466 * @opcode 0x4e
1467 */
1468FNIEMOP_DEF(iemOp_dec_eSI)
1469{
1470 /*
1471 * This is a REX prefix in 64-bit mode.
1472 */
1473 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1474 {
1475 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1476 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1477 pVCpu->iem.s.uRexReg = 1 << 3;
1478 pVCpu->iem.s.uRexIndex = 1 << 3;
1479 iemRecalEffOpSize(pVCpu);
1480
1481 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1482 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1483 }
1484
1485 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1486 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1487}
1488
1489
1490/**
1491 * @opcode 0x4f
1492 */
1493FNIEMOP_DEF(iemOp_dec_eDI)
1494{
1495 /*
1496 * This is a REX prefix in 64-bit mode.
1497 */
1498 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1499 {
1500 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1501 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1502 pVCpu->iem.s.uRexReg = 1 << 3;
1503 pVCpu->iem.s.uRexB = 1 << 3;
1504 pVCpu->iem.s.uRexIndex = 1 << 3;
1505 iemRecalEffOpSize(pVCpu);
1506
1507 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1508 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1509 }
1510
1511 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1512 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1513}
1514
1515
1516/**
1517 * Common 'push register' helper.
1518 */
1519FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1520{
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1523 {
1524 iReg |= pVCpu->iem.s.uRexB;
1525 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1526 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1527 }
1528
1529 switch (pVCpu->iem.s.enmEffOpSize)
1530 {
1531 case IEMMODE_16BIT:
1532 IEM_MC_BEGIN(0, 1);
1533 IEM_MC_LOCAL(uint16_t, u16Value);
1534 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1535 IEM_MC_PUSH_U16(u16Value);
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 break;
1539
1540 case IEMMODE_32BIT:
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(uint32_t, u32Value);
1543 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1544 IEM_MC_PUSH_U32(u32Value);
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 break;
1548
1549 case IEMMODE_64BIT:
1550 IEM_MC_BEGIN(0, 1);
1551 IEM_MC_LOCAL(uint64_t, u64Value);
1552 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1553 IEM_MC_PUSH_U64(u64Value);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 break;
1557 }
1558
1559 return VINF_SUCCESS;
1560}
1561
1562
1563/**
1564 * @opcode 0x50
1565 */
1566FNIEMOP_DEF(iemOp_push_eAX)
1567{
1568 IEMOP_MNEMONIC(push_rAX, "push rAX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1570}
1571
1572
1573/**
1574 * @opcode 0x51
1575 */
1576FNIEMOP_DEF(iemOp_push_eCX)
1577{
1578 IEMOP_MNEMONIC(push_rCX, "push rCX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1580}
1581
1582
1583/**
1584 * @opcode 0x52
1585 */
1586FNIEMOP_DEF(iemOp_push_eDX)
1587{
1588 IEMOP_MNEMONIC(push_rDX, "push rDX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1590}
1591
1592
1593/**
1594 * @opcode 0x53
1595 */
1596FNIEMOP_DEF(iemOp_push_eBX)
1597{
1598 IEMOP_MNEMONIC(push_rBX, "push rBX");
1599 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1600}
1601
1602
1603/**
1604 * @opcode 0x54
1605 */
1606FNIEMOP_DEF(iemOp_push_eSP)
1607{
1608 IEMOP_MNEMONIC(push_rSP, "push rSP");
1609 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1610 {
1611 IEM_MC_BEGIN(0, 1);
1612 IEM_MC_LOCAL(uint16_t, u16Value);
1613 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1614 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1615 IEM_MC_PUSH_U16(u16Value);
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1620}
1621
1622
1623/**
1624 * @opcode 0x55
1625 */
1626FNIEMOP_DEF(iemOp_push_eBP)
1627{
1628 IEMOP_MNEMONIC(push_rBP, "push rBP");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1630}
1631
1632
1633/**
1634 * @opcode 0x56
1635 */
1636FNIEMOP_DEF(iemOp_push_eSI)
1637{
1638 IEMOP_MNEMONIC(push_rSI, "push rSI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1640}
1641
1642
1643/**
1644 * @opcode 0x57
1645 */
1646FNIEMOP_DEF(iemOp_push_eDI)
1647{
1648 IEMOP_MNEMONIC(push_rDI, "push rDI");
1649 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1650}
1651
1652
1653/**
1654 * Common 'pop register' helper.
1655 */
1656FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1657{
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1659 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1660 {
1661 iReg |= pVCpu->iem.s.uRexB;
1662 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1663 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1664 }
1665
1666 switch (pVCpu->iem.s.enmEffOpSize)
1667 {
1668 case IEMMODE_16BIT:
1669 IEM_MC_BEGIN(0, 1);
1670 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1671 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1672 IEM_MC_POP_U16(pu16Dst);
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_32BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1680 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1681 IEM_MC_POP_U32(pu32Dst);
1682 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1683 IEM_MC_ADVANCE_RIP();
1684 IEM_MC_END();
1685 break;
1686
1687 case IEMMODE_64BIT:
1688 IEM_MC_BEGIN(0, 1);
1689 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1690 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1691 IEM_MC_POP_U64(pu64Dst);
1692 IEM_MC_ADVANCE_RIP();
1693 IEM_MC_END();
1694 break;
1695 }
1696
1697 return VINF_SUCCESS;
1698}
1699
1700
1701/**
1702 * @opcode 0x58
1703 */
1704FNIEMOP_DEF(iemOp_pop_eAX)
1705{
1706 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1708}
1709
1710
1711/**
1712 * @opcode 0x59
1713 */
1714FNIEMOP_DEF(iemOp_pop_eCX)
1715{
1716 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5a
1723 */
1724FNIEMOP_DEF(iemOp_pop_eDX)
1725{
1726 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5b
1733 */
1734FNIEMOP_DEF(iemOp_pop_eBX)
1735{
1736 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1737 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1738}
1739
1740
1741/**
1742 * @opcode 0x5c
1743 */
1744FNIEMOP_DEF(iemOp_pop_eSP)
1745{
1746 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1747 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1748 {
1749 if (pVCpu->iem.s.uRexB)
1750 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1751 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1752 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1753 }
1754
1755 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1756 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1757 /** @todo add testcase for this instruction. */
1758 switch (pVCpu->iem.s.enmEffOpSize)
1759 {
1760 case IEMMODE_16BIT:
1761 IEM_MC_BEGIN(0, 1);
1762 IEM_MC_LOCAL(uint16_t, u16Dst);
1763 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1764 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1765 IEM_MC_ADVANCE_RIP();
1766 IEM_MC_END();
1767 break;
1768
1769 case IEMMODE_32BIT:
1770 IEM_MC_BEGIN(0, 1);
1771 IEM_MC_LOCAL(uint32_t, u32Dst);
1772 IEM_MC_POP_U32(&u32Dst);
1773 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 break;
1777
1778 case IEMMODE_64BIT:
1779 IEM_MC_BEGIN(0, 1);
1780 IEM_MC_LOCAL(uint64_t, u64Dst);
1781 IEM_MC_POP_U64(&u64Dst);
1782 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 break;
1786 }
1787
1788 return VINF_SUCCESS;
1789}
1790
1791
1792/**
1793 * @opcode 0x5d
1794 */
1795FNIEMOP_DEF(iemOp_pop_eBP)
1796{
1797 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1799}
1800
1801
1802/**
1803 * @opcode 0x5e
1804 */
1805FNIEMOP_DEF(iemOp_pop_eSI)
1806{
1807 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1809}
1810
1811
1812/**
1813 * @opcode 0x5f
1814 */
1815FNIEMOP_DEF(iemOp_pop_eDI)
1816{
1817 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1818 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1819}
1820
1821
1822/**
1823 * @opcode 0x60
1824 */
1825FNIEMOP_DEF(iemOp_pusha)
1826{
1827 IEMOP_MNEMONIC(pusha, "pusha");
1828 IEMOP_HLP_MIN_186();
1829 IEMOP_HLP_NO_64BIT();
1830 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1831 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1832 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1833 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1834}
1835
1836
1837/**
1838 * @opcode 0x61
1839 */
1840FNIEMOP_DEF(iemOp_popa__mvex)
1841{
1842 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1843 {
1844 IEMOP_MNEMONIC(popa, "popa");
1845 IEMOP_HLP_MIN_186();
1846 IEMOP_HLP_NO_64BIT();
1847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1848 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1849 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1850 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1851 }
1852 IEMOP_MNEMONIC(mvex, "mvex");
1853 Log(("mvex prefix is not supported!\n"));
1854 return IEMOP_RAISE_INVALID_OPCODE();
1855}
1856
1857
1858/**
1859 * @opcode 0x62
1860 * @opmnemonic bound
1861 * @op1 Gv_RO
1862 * @op2 Ma
1863 * @opmincpu 80186
1864 * @ophints harmless invalid_64
1865 * @optest op1=0 op2=0 ->
1866 * @optest op1=1 op2=0 -> value.xcpt=5
1867 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1868 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1869 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1870 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1871 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1872 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1873 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1874 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1875 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1876 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1880 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1889 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1890 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1892 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1893 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1894 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1895 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1896 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1897 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1898 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1902 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1909 */
1910FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1911{
1912 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1913 compatability mode it is invalid with MOD=3.
1914
1915 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1916 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1917 given as R and X without an exact description, so we assume it builds on
1918 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1919 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1920 uint8_t bRm;
1921 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1922 {
1923 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1924 IEMOP_HLP_MIN_186();
1925 IEM_OPCODE_GET_NEXT_U8(&bRm);
1926 if (IEM_IS_MODRM_MEM_MODE(bRm))
1927 {
1928 /** @todo testcase: check that there are two memory accesses involved. Check
1929 * whether they're both read before the \#BR triggers. */
1930 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1931 {
1932 IEM_MC_BEGIN(3, 1);
1933 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1934 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1935 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1937
1938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1940
1941 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1942 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1943 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1944
1945 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1946 IEM_MC_END();
1947 }
1948 else /* 32-bit operands */
1949 {
1950 IEM_MC_BEGIN(3, 1);
1951 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1952 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1953 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1955
1956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1958
1959 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1960 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1961 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1962
1963 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1964 IEM_MC_END();
1965 }
1966 }
1967
1968 /*
1969 * @opdone
1970 */
1971 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1972 {
1973 /* Note that there is no need for the CPU to fetch further bytes
1974 here because MODRM.MOD == 3. */
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return IEMOP_RAISE_INVALID_OPCODE();
1977 }
1978 }
1979 else
1980 {
1981 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1982 * does modr/m read, whereas AMD probably doesn't... */
1983 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1984 {
1985 Log(("evex not supported by the guest CPU!\n"));
1986 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1987 }
1988 IEM_OPCODE_GET_NEXT_U8(&bRm);
1989 }
1990
1991 IEMOP_MNEMONIC(evex, "evex");
1992 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1993 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1994 Log(("evex prefix is not implemented!\n"));
1995 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1996}
1997
1998
1999/** Opcode 0x63 - non-64-bit modes. */
2000FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2001{
2002 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2003 IEMOP_HLP_MIN_286();
2004 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2006
2007 if (IEM_IS_MODRM_REG_MODE(bRm))
2008 {
2009 /* Register */
2010 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2011 IEM_MC_BEGIN(3, 0);
2012 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2013 IEM_MC_ARG(uint16_t, u16Src, 1);
2014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2015
2016 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2017 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2018 IEM_MC_REF_EFLAGS(pEFlags);
2019 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2020
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 }
2024 else
2025 {
2026 /* Memory */
2027 IEM_MC_BEGIN(3, 2);
2028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2029 IEM_MC_ARG(uint16_t, u16Src, 1);
2030 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2032
2033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2034 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2035 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2036 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2037 IEM_MC_FETCH_EFLAGS(EFlags);
2038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2039
2040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2041 IEM_MC_COMMIT_EFLAGS(EFlags);
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 return VINF_SUCCESS;
2046
2047}
2048
2049
2050/**
2051 * @opcode 0x63
2052 *
2053 * @note This is a weird one. It works like a regular move instruction if
2054 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2055 * @todo This definitely needs a testcase to verify the odd cases. */
2056FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2057{
2058 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2059
2060 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062
2063 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2064 {
2065 if (IEM_IS_MODRM_REG_MODE(bRm))
2066 {
2067 /*
2068 * Register to register.
2069 */
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_BEGIN(0, 1);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2074 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2075 IEM_MC_ADVANCE_RIP();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 /*
2081 * We're loading a register from memory.
2082 */
2083 IEM_MC_BEGIN(0, 2);
2084 IEM_MC_LOCAL(uint64_t, u64Value);
2085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2088 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2089 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2090 IEM_MC_ADVANCE_RIP();
2091 IEM_MC_END();
2092 }
2093 }
2094 else
2095 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2096
2097 return VINF_SUCCESS;
2098}
2099
2100
2101/**
2102 * @opcode 0x64
2103 * @opmnemonic segfs
2104 * @opmincpu 80386
2105 * @opgroup og_prefixes
2106 */
2107FNIEMOP_DEF(iemOp_seg_FS)
2108{
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2110 IEMOP_HLP_MIN_386();
2111
2112 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2113 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2114
2115 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2116 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2117}
2118
2119
2120/**
2121 * @opcode 0x65
2122 * @opmnemonic seggs
2123 * @opmincpu 80386
2124 * @opgroup og_prefixes
2125 */
2126FNIEMOP_DEF(iemOp_seg_GS)
2127{
2128 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2129 IEMOP_HLP_MIN_386();
2130
2131 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2132 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2133
2134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2136}
2137
2138
2139/**
2140 * @opcode 0x66
2141 * @opmnemonic opsize
2142 * @openc prefix
2143 * @opmincpu 80386
2144 * @ophints harmless
2145 * @opgroup og_prefixes
2146 */
2147FNIEMOP_DEF(iemOp_op_size)
2148{
2149 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2150 IEMOP_HLP_MIN_386();
2151
2152 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2153 iemRecalEffOpSize(pVCpu);
2154
2155 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2156 when REPZ or REPNZ are present. */
2157 if (pVCpu->iem.s.idxPrefix == 0)
2158 pVCpu->iem.s.idxPrefix = 1;
2159
2160 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2161 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2162}
2163
2164
2165/**
2166 * @opcode 0x67
2167 * @opmnemonic addrsize
2168 * @openc prefix
2169 * @opmincpu 80386
2170 * @ophints harmless
2171 * @opgroup og_prefixes
2172 */
2173FNIEMOP_DEF(iemOp_addr_size)
2174{
2175 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2176 IEMOP_HLP_MIN_386();
2177
2178 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2179 switch (pVCpu->iem.s.enmDefAddrMode)
2180 {
2181 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2182 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2183 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2184 default: AssertFailed();
2185 }
2186
2187 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2188 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2189}
2190
2191
2192/**
2193 * @opcode 0x68
2194 */
2195FNIEMOP_DEF(iemOp_push_Iz)
2196{
2197 IEMOP_MNEMONIC(push_Iz, "push Iz");
2198 IEMOP_HLP_MIN_186();
2199 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2200 switch (pVCpu->iem.s.enmEffOpSize)
2201 {
2202 case IEMMODE_16BIT:
2203 {
2204 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2206 IEM_MC_BEGIN(0,0);
2207 IEM_MC_PUSH_U16(u16Imm);
2208 IEM_MC_ADVANCE_RIP();
2209 IEM_MC_END();
2210 return VINF_SUCCESS;
2211 }
2212
2213 case IEMMODE_32BIT:
2214 {
2215 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2217 IEM_MC_BEGIN(0,0);
2218 IEM_MC_PUSH_U32(u32Imm);
2219 IEM_MC_ADVANCE_RIP();
2220 IEM_MC_END();
2221 return VINF_SUCCESS;
2222 }
2223
2224 case IEMMODE_64BIT:
2225 {
2226 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2228 IEM_MC_BEGIN(0,0);
2229 IEM_MC_PUSH_U64(u64Imm);
2230 IEM_MC_ADVANCE_RIP();
2231 IEM_MC_END();
2232 return VINF_SUCCESS;
2233 }
2234
2235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2236 }
2237}
2238
2239
2240/**
2241 * @opcode 0x69
2242 */
2243FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2244{
2245 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2246 IEMOP_HLP_MIN_186();
2247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2249
2250 switch (pVCpu->iem.s.enmEffOpSize)
2251 {
2252 case IEMMODE_16BIT:
2253 {
2254 if (IEM_IS_MODRM_REG_MODE(bRm))
2255 {
2256 /* register operand */
2257 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2259
2260 IEM_MC_BEGIN(3, 1);
2261 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2262 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2264 IEM_MC_LOCAL(uint16_t, u16Tmp);
2265
2266 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2267 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2268 IEM_MC_REF_EFLAGS(pEFlags);
2269 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2270 pu16Dst, u16Src, pEFlags);
2271 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2272
2273 IEM_MC_ADVANCE_RIP();
2274 IEM_MC_END();
2275 }
2276 else
2277 {
2278 /* memory operand */
2279 IEM_MC_BEGIN(3, 2);
2280 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2281 IEM_MC_ARG(uint16_t, u16Src, 1);
2282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2283 IEM_MC_LOCAL(uint16_t, u16Tmp);
2284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2285
2286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2287 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2288 IEM_MC_ASSIGN(u16Src, u16Imm);
2289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2290 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2291 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2292 IEM_MC_REF_EFLAGS(pEFlags);
2293 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2294 pu16Dst, u16Src, pEFlags);
2295 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2296
2297 IEM_MC_ADVANCE_RIP();
2298 IEM_MC_END();
2299 }
2300 return VINF_SUCCESS;
2301 }
2302
2303 case IEMMODE_32BIT:
2304 {
2305 if (IEM_IS_MODRM_REG_MODE(bRm))
2306 {
2307 /* register operand */
2308 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310
2311 IEM_MC_BEGIN(3, 1);
2312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2313 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2315 IEM_MC_LOCAL(uint32_t, u32Tmp);
2316
2317 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2318 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2319 IEM_MC_REF_EFLAGS(pEFlags);
2320 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2321 pu32Dst, u32Src, pEFlags);
2322 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 else
2328 {
2329 /* memory operand */
2330 IEM_MC_BEGIN(3, 2);
2331 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2332 IEM_MC_ARG(uint32_t, u32Src, 1);
2333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2334 IEM_MC_LOCAL(uint32_t, u32Tmp);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2338 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2339 IEM_MC_ASSIGN(u32Src, u32Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2342 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2343 IEM_MC_REF_EFLAGS(pEFlags);
2344 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2345 pu32Dst, u32Src, pEFlags);
2346 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2347
2348 IEM_MC_ADVANCE_RIP();
2349 IEM_MC_END();
2350 }
2351 return VINF_SUCCESS;
2352 }
2353
2354 case IEMMODE_64BIT:
2355 {
2356 if (IEM_IS_MODRM_REG_MODE(bRm))
2357 {
2358 /* register operand */
2359 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361
2362 IEM_MC_BEGIN(3, 1);
2363 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2364 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2365 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2366 IEM_MC_LOCAL(uint64_t, u64Tmp);
2367
2368 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2369 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2370 IEM_MC_REF_EFLAGS(pEFlags);
2371 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2372 pu64Dst, u64Src, pEFlags);
2373 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2374
2375 IEM_MC_ADVANCE_RIP();
2376 IEM_MC_END();
2377 }
2378 else
2379 {
2380 /* memory operand */
2381 IEM_MC_BEGIN(3, 2);
2382 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2383 IEM_MC_ARG(uint64_t, u64Src, 1);
2384 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2385 IEM_MC_LOCAL(uint64_t, u64Tmp);
2386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2387
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2389 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2390 IEM_MC_ASSIGN(u64Src, u64Imm);
2391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2392 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2393 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2394 IEM_MC_REF_EFLAGS(pEFlags);
2395 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2396 pu64Dst, u64Src, pEFlags);
2397 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2398
2399 IEM_MC_ADVANCE_RIP();
2400 IEM_MC_END();
2401 }
2402 return VINF_SUCCESS;
2403 }
2404 }
2405 AssertFailedReturn(VERR_IEM_IPE_9);
2406}
2407
2408
2409/**
2410 * @opcode 0x6a
2411 */
2412FNIEMOP_DEF(iemOp_push_Ib)
2413{
2414 IEMOP_MNEMONIC(push_Ib, "push Ib");
2415 IEMOP_HLP_MIN_186();
2416 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2419
2420 IEM_MC_BEGIN(0,0);
2421 switch (pVCpu->iem.s.enmEffOpSize)
2422 {
2423 case IEMMODE_16BIT:
2424 IEM_MC_PUSH_U16(i8Imm);
2425 break;
2426 case IEMMODE_32BIT:
2427 IEM_MC_PUSH_U32(i8Imm);
2428 break;
2429 case IEMMODE_64BIT:
2430 IEM_MC_PUSH_U64(i8Imm);
2431 break;
2432 }
2433 IEM_MC_ADVANCE_RIP();
2434 IEM_MC_END();
2435 return VINF_SUCCESS;
2436}
2437
2438
2439/**
2440 * @opcode 0x6b
2441 */
2442FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2443{
2444 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2445 IEMOP_HLP_MIN_186();
2446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2447 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2448
2449 switch (pVCpu->iem.s.enmEffOpSize)
2450 {
2451 case IEMMODE_16BIT:
2452 if (IEM_IS_MODRM_REG_MODE(bRm))
2453 {
2454 /* register operand */
2455 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2457
2458 IEM_MC_BEGIN(3, 1);
2459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2462 IEM_MC_LOCAL(uint16_t, u16Tmp);
2463
2464 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2465 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2466 IEM_MC_REF_EFLAGS(pEFlags);
2467 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2468 pu16Dst, u16Src, pEFlags);
2469 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 else
2475 {
2476 /* memory operand */
2477 IEM_MC_BEGIN(3, 2);
2478 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2479 IEM_MC_ARG(uint16_t, u16Src, 1);
2480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2481 IEM_MC_LOCAL(uint16_t, u16Tmp);
2482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2483
2484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2485 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2486 IEM_MC_ASSIGN(u16Src, u16Imm);
2487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2488 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2489 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2490 IEM_MC_REF_EFLAGS(pEFlags);
2491 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2492 pu16Dst, u16Src, pEFlags);
2493 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2494
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 return VINF_SUCCESS;
2499
2500 case IEMMODE_32BIT:
2501 if (IEM_IS_MODRM_REG_MODE(bRm))
2502 {
2503 /* register operand */
2504 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2506
2507 IEM_MC_BEGIN(3, 1);
2508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2509 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2510 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2511 IEM_MC_LOCAL(uint32_t, u32Tmp);
2512
2513 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2514 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2515 IEM_MC_REF_EFLAGS(pEFlags);
2516 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2517 pu32Dst, u32Src, pEFlags);
2518 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2519
2520 IEM_MC_ADVANCE_RIP();
2521 IEM_MC_END();
2522 }
2523 else
2524 {
2525 /* memory operand */
2526 IEM_MC_BEGIN(3, 2);
2527 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2528 IEM_MC_ARG(uint32_t, u32Src, 1);
2529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2530 IEM_MC_LOCAL(uint32_t, u32Tmp);
2531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2532
2533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2534 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2535 IEM_MC_ASSIGN(u32Src, u32Imm);
2536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2537 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2538 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2539 IEM_MC_REF_EFLAGS(pEFlags);
2540 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2541 pu32Dst, u32Src, pEFlags);
2542 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2543
2544 IEM_MC_ADVANCE_RIP();
2545 IEM_MC_END();
2546 }
2547 return VINF_SUCCESS;
2548
2549 case IEMMODE_64BIT:
2550 if (IEM_IS_MODRM_REG_MODE(bRm))
2551 {
2552 /* register operand */
2553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555
2556 IEM_MC_BEGIN(3, 1);
2557 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2558 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2559 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2560 IEM_MC_LOCAL(uint64_t, u64Tmp);
2561
2562 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2563 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2564 IEM_MC_REF_EFLAGS(pEFlags);
2565 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2566 pu64Dst, u64Src, pEFlags);
2567 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2568
2569 IEM_MC_ADVANCE_RIP();
2570 IEM_MC_END();
2571 }
2572 else
2573 {
2574 /* memory operand */
2575 IEM_MC_BEGIN(3, 2);
2576 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2577 IEM_MC_ARG(uint64_t, u64Src, 1);
2578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2579 IEM_MC_LOCAL(uint64_t, u64Tmp);
2580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2581
2582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2583 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2584 IEM_MC_ASSIGN(u64Src, u64Imm);
2585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2586 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2587 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2588 IEM_MC_REF_EFLAGS(pEFlags);
2589 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2590 pu64Dst, u64Src, pEFlags);
2591 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2592
2593 IEM_MC_ADVANCE_RIP();
2594 IEM_MC_END();
2595 }
2596 return VINF_SUCCESS;
2597 }
2598 AssertFailedReturn(VERR_IEM_IPE_8);
2599}
2600
2601
2602/**
2603 * @opcode 0x6c
2604 */
2605FNIEMOP_DEF(iemOp_insb_Yb_DX)
2606{
2607 IEMOP_HLP_MIN_186();
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2610 {
2611 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2612 switch (pVCpu->iem.s.enmEffAddrMode)
2613 {
2614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2618 }
2619 }
2620 else
2621 {
2622 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2623 switch (pVCpu->iem.s.enmEffAddrMode)
2624 {
2625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2629 }
2630 }
2631}
2632
2633
2634/**
2635 * @opcode 0x6d
2636 */
2637FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2638{
2639 IEMOP_HLP_MIN_186();
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2642 {
2643 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2644 switch (pVCpu->iem.s.enmEffOpSize)
2645 {
2646 case IEMMODE_16BIT:
2647 switch (pVCpu->iem.s.enmEffAddrMode)
2648 {
2649 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2650 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2651 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2653 }
2654 break;
2655 case IEMMODE_64BIT:
2656 case IEMMODE_32BIT:
2657 switch (pVCpu->iem.s.enmEffAddrMode)
2658 {
2659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2663 }
2664 break;
2665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2666 }
2667 }
2668 else
2669 {
2670 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2671 switch (pVCpu->iem.s.enmEffOpSize)
2672 {
2673 case IEMMODE_16BIT:
2674 switch (pVCpu->iem.s.enmEffAddrMode)
2675 {
2676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2680 }
2681 break;
2682 case IEMMODE_64BIT:
2683 case IEMMODE_32BIT:
2684 switch (pVCpu->iem.s.enmEffAddrMode)
2685 {
2686 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2687 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2688 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2690 }
2691 break;
2692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2693 }
2694 }
2695}
2696
2697
2698/**
2699 * @opcode 0x6e
2700 */
2701FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2702{
2703 IEMOP_HLP_MIN_186();
2704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2705 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2706 {
2707 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2708 switch (pVCpu->iem.s.enmEffAddrMode)
2709 {
2710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2714 }
2715 }
2716 else
2717 {
2718 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2719 switch (pVCpu->iem.s.enmEffAddrMode)
2720 {
2721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2725 }
2726 }
2727}
2728
2729
2730/**
2731 * @opcode 0x6f
2732 */
2733FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2734{
2735 IEMOP_HLP_MIN_186();
2736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2737 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2738 {
2739 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2740 switch (pVCpu->iem.s.enmEffOpSize)
2741 {
2742 case IEMMODE_16BIT:
2743 switch (pVCpu->iem.s.enmEffAddrMode)
2744 {
2745 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2747 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2749 }
2750 break;
2751 case IEMMODE_64BIT:
2752 case IEMMODE_32BIT:
2753 switch (pVCpu->iem.s.enmEffAddrMode)
2754 {
2755 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2759 }
2760 break;
2761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2762 }
2763 }
2764 else
2765 {
2766 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2767 switch (pVCpu->iem.s.enmEffOpSize)
2768 {
2769 case IEMMODE_16BIT:
2770 switch (pVCpu->iem.s.enmEffAddrMode)
2771 {
2772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2776 }
2777 break;
2778 case IEMMODE_64BIT:
2779 case IEMMODE_32BIT:
2780 switch (pVCpu->iem.s.enmEffAddrMode)
2781 {
2782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2786 }
2787 break;
2788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2789 }
2790 }
2791}
2792
2793
2794/**
2795 * @opcode 0x70
2796 */
2797FNIEMOP_DEF(iemOp_jo_Jb)
2798{
2799 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2800 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2803
2804 IEM_MC_BEGIN(0, 0);
2805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2806 IEM_MC_REL_JMP_S8(i8Imm);
2807 } IEM_MC_ELSE() {
2808 IEM_MC_ADVANCE_RIP();
2809 } IEM_MC_ENDIF();
2810 IEM_MC_END();
2811 return VINF_SUCCESS;
2812}
2813
2814
2815/**
2816 * @opcode 0x71
2817 */
2818FNIEMOP_DEF(iemOp_jno_Jb)
2819{
2820 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2821 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2824
2825 IEM_MC_BEGIN(0, 0);
2826 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2827 IEM_MC_ADVANCE_RIP();
2828 } IEM_MC_ELSE() {
2829 IEM_MC_REL_JMP_S8(i8Imm);
2830 } IEM_MC_ENDIF();
2831 IEM_MC_END();
2832 return VINF_SUCCESS;
2833}
2834
2835/**
2836 * @opcode 0x72
2837 */
2838FNIEMOP_DEF(iemOp_jc_Jb)
2839{
2840 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2844
2845 IEM_MC_BEGIN(0, 0);
2846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2847 IEM_MC_REL_JMP_S8(i8Imm);
2848 } IEM_MC_ELSE() {
2849 IEM_MC_ADVANCE_RIP();
2850 } IEM_MC_ENDIF();
2851 IEM_MC_END();
2852 return VINF_SUCCESS;
2853}
2854
2855
2856/**
2857 * @opcode 0x73
2858 */
2859FNIEMOP_DEF(iemOp_jnc_Jb)
2860{
2861 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2864 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2865
2866 IEM_MC_BEGIN(0, 0);
2867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2868 IEM_MC_ADVANCE_RIP();
2869 } IEM_MC_ELSE() {
2870 IEM_MC_REL_JMP_S8(i8Imm);
2871 } IEM_MC_ENDIF();
2872 IEM_MC_END();
2873 return VINF_SUCCESS;
2874}
2875
2876
2877/**
2878 * @opcode 0x74
2879 */
2880FNIEMOP_DEF(iemOp_je_Jb)
2881{
2882 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2886
2887 IEM_MC_BEGIN(0, 0);
2888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2889 IEM_MC_REL_JMP_S8(i8Imm);
2890 } IEM_MC_ELSE() {
2891 IEM_MC_ADVANCE_RIP();
2892 } IEM_MC_ENDIF();
2893 IEM_MC_END();
2894 return VINF_SUCCESS;
2895}
2896
2897
2898/**
2899 * @opcode 0x75
2900 */
2901FNIEMOP_DEF(iemOp_jne_Jb)
2902{
2903 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2904 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2907
2908 IEM_MC_BEGIN(0, 0);
2909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2910 IEM_MC_ADVANCE_RIP();
2911 } IEM_MC_ELSE() {
2912 IEM_MC_REL_JMP_S8(i8Imm);
2913 } IEM_MC_ENDIF();
2914 IEM_MC_END();
2915 return VINF_SUCCESS;
2916}
2917
2918
2919/**
2920 * @opcode 0x76
2921 */
2922FNIEMOP_DEF(iemOp_jbe_Jb)
2923{
2924 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2925 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2927 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2928
2929 IEM_MC_BEGIN(0, 0);
2930 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2931 IEM_MC_REL_JMP_S8(i8Imm);
2932 } IEM_MC_ELSE() {
2933 IEM_MC_ADVANCE_RIP();
2934 } IEM_MC_ENDIF();
2935 IEM_MC_END();
2936 return VINF_SUCCESS;
2937}
2938
2939
2940/**
2941 * @opcode 0x77
2942 */
2943FNIEMOP_DEF(iemOp_jnbe_Jb)
2944{
2945 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2946 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2948 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2949
2950 IEM_MC_BEGIN(0, 0);
2951 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2952 IEM_MC_ADVANCE_RIP();
2953 } IEM_MC_ELSE() {
2954 IEM_MC_REL_JMP_S8(i8Imm);
2955 } IEM_MC_ENDIF();
2956 IEM_MC_END();
2957 return VINF_SUCCESS;
2958}
2959
2960
2961/**
2962 * @opcode 0x78
2963 */
2964FNIEMOP_DEF(iemOp_js_Jb)
2965{
2966 IEMOP_MNEMONIC(js_Jb, "js Jb");
2967 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2970
2971 IEM_MC_BEGIN(0, 0);
2972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2973 IEM_MC_REL_JMP_S8(i8Imm);
2974 } IEM_MC_ELSE() {
2975 IEM_MC_ADVANCE_RIP();
2976 } IEM_MC_ENDIF();
2977 IEM_MC_END();
2978 return VINF_SUCCESS;
2979}
2980
2981
2982/**
2983 * @opcode 0x79
2984 */
2985FNIEMOP_DEF(iemOp_jns_Jb)
2986{
2987 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2988 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2990 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2991
2992 IEM_MC_BEGIN(0, 0);
2993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2994 IEM_MC_ADVANCE_RIP();
2995 } IEM_MC_ELSE() {
2996 IEM_MC_REL_JMP_S8(i8Imm);
2997 } IEM_MC_ENDIF();
2998 IEM_MC_END();
2999 return VINF_SUCCESS;
3000}
3001
3002
3003/**
3004 * @opcode 0x7a
3005 */
3006FNIEMOP_DEF(iemOp_jp_Jb)
3007{
3008 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3009 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3012
3013 IEM_MC_BEGIN(0, 0);
3014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3015 IEM_MC_REL_JMP_S8(i8Imm);
3016 } IEM_MC_ELSE() {
3017 IEM_MC_ADVANCE_RIP();
3018 } IEM_MC_ENDIF();
3019 IEM_MC_END();
3020 return VINF_SUCCESS;
3021}
3022
3023
3024/**
3025 * @opcode 0x7b
3026 */
3027FNIEMOP_DEF(iemOp_jnp_Jb)
3028{
3029 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3030 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3032 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3033
3034 IEM_MC_BEGIN(0, 0);
3035 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3036 IEM_MC_ADVANCE_RIP();
3037 } IEM_MC_ELSE() {
3038 IEM_MC_REL_JMP_S8(i8Imm);
3039 } IEM_MC_ENDIF();
3040 IEM_MC_END();
3041 return VINF_SUCCESS;
3042}
3043
3044
3045/**
3046 * @opcode 0x7c
3047 */
3048FNIEMOP_DEF(iemOp_jl_Jb)
3049{
3050 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3051 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3053 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3054
3055 IEM_MC_BEGIN(0, 0);
3056 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3057 IEM_MC_REL_JMP_S8(i8Imm);
3058 } IEM_MC_ELSE() {
3059 IEM_MC_ADVANCE_RIP();
3060 } IEM_MC_ENDIF();
3061 IEM_MC_END();
3062 return VINF_SUCCESS;
3063}
3064
3065
3066/**
3067 * @opcode 0x7d
3068 */
3069FNIEMOP_DEF(iemOp_jnl_Jb)
3070{
3071 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3072 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3074 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3075
3076 IEM_MC_BEGIN(0, 0);
3077 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3078 IEM_MC_ADVANCE_RIP();
3079 } IEM_MC_ELSE() {
3080 IEM_MC_REL_JMP_S8(i8Imm);
3081 } IEM_MC_ENDIF();
3082 IEM_MC_END();
3083 return VINF_SUCCESS;
3084}
3085
3086
3087/**
3088 * @opcode 0x7e
3089 */
3090FNIEMOP_DEF(iemOp_jle_Jb)
3091{
3092 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3093 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3096
3097 IEM_MC_BEGIN(0, 0);
3098 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3099 IEM_MC_REL_JMP_S8(i8Imm);
3100 } IEM_MC_ELSE() {
3101 IEM_MC_ADVANCE_RIP();
3102 } IEM_MC_ENDIF();
3103 IEM_MC_END();
3104 return VINF_SUCCESS;
3105}
3106
3107
3108/**
3109 * @opcode 0x7f
3110 */
3111FNIEMOP_DEF(iemOp_jnle_Jb)
3112{
3113 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3114 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3117
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3120 IEM_MC_ADVANCE_RIP();
3121 } IEM_MC_ELSE() {
3122 IEM_MC_REL_JMP_S8(i8Imm);
3123 } IEM_MC_ENDIF();
3124 IEM_MC_END();
3125 return VINF_SUCCESS;
3126}
3127
3128
3129/**
3130 * @opcode 0x80
3131 */
3132FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3133{
3134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3135 switch (IEM_GET_MODRM_REG_8(bRm))
3136 {
3137 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3138 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3139 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3140 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3141 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3142 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3143 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3144 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3145 }
3146 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3147
3148 if (IEM_IS_MODRM_REG_MODE(bRm))
3149 {
3150 /* register target */
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3153 IEM_MC_BEGIN(3, 0);
3154 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3155 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3156 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3157
3158 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3159 IEM_MC_REF_EFLAGS(pEFlags);
3160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3161
3162 IEM_MC_ADVANCE_RIP();
3163 IEM_MC_END();
3164 }
3165 else
3166 {
3167 /* memory target */
3168 uint32_t fAccess;
3169 if (pImpl->pfnLockedU8)
3170 fAccess = IEM_ACCESS_DATA_RW;
3171 else /* CMP */
3172 fAccess = IEM_ACCESS_DATA_R;
3173 IEM_MC_BEGIN(3, 2);
3174 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3177
3178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3179 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3180 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3181 if (pImpl->pfnLockedU8)
3182 IEMOP_HLP_DONE_DECODING();
3183 else
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185
3186 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3187 IEM_MC_FETCH_EFLAGS(EFlags);
3188 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3190 else
3191 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3192
3193 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3194 IEM_MC_COMMIT_EFLAGS(EFlags);
3195 IEM_MC_ADVANCE_RIP();
3196 IEM_MC_END();
3197 }
3198 return VINF_SUCCESS;
3199}
3200
3201
3202/**
3203 * @opcode 0x81
3204 */
3205FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3206{
3207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3208 switch (IEM_GET_MODRM_REG_8(bRm))
3209 {
3210 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3211 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3212 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3213 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3214 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3215 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3216 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3217 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3218 }
3219 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3220
3221 switch (pVCpu->iem.s.enmEffOpSize)
3222 {
3223 case IEMMODE_16BIT:
3224 {
3225 if (IEM_IS_MODRM_REG_MODE(bRm))
3226 {
3227 /* register target */
3228 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3230 IEM_MC_BEGIN(3, 0);
3231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3232 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3234
3235 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3236 IEM_MC_REF_EFLAGS(pEFlags);
3237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3238
3239 IEM_MC_ADVANCE_RIP();
3240 IEM_MC_END();
3241 }
3242 else
3243 {
3244 /* memory target */
3245 uint32_t fAccess;
3246 if (pImpl->pfnLockedU16)
3247 fAccess = IEM_ACCESS_DATA_RW;
3248 else /* CMP, TEST */
3249 fAccess = IEM_ACCESS_DATA_R;
3250 IEM_MC_BEGIN(3, 2);
3251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3252 IEM_MC_ARG(uint16_t, u16Src, 1);
3253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3255
3256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3257 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3258 IEM_MC_ASSIGN(u16Src, u16Imm);
3259 if (pImpl->pfnLockedU16)
3260 IEMOP_HLP_DONE_DECODING();
3261 else
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3264 IEM_MC_FETCH_EFLAGS(EFlags);
3265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3267 else
3268 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3269
3270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3271 IEM_MC_COMMIT_EFLAGS(EFlags);
3272 IEM_MC_ADVANCE_RIP();
3273 IEM_MC_END();
3274 }
3275 break;
3276 }
3277
3278 case IEMMODE_32BIT:
3279 {
3280 if (IEM_IS_MODRM_REG_MODE(bRm))
3281 {
3282 /* register target */
3283 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 IEM_MC_BEGIN(3, 0);
3286 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3287 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3288 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3289
3290 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3291 IEM_MC_REF_EFLAGS(pEFlags);
3292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3293 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3294 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3295
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 else
3300 {
3301 /* memory target */
3302 uint32_t fAccess;
3303 if (pImpl->pfnLockedU32)
3304 fAccess = IEM_ACCESS_DATA_RW;
3305 else /* CMP, TEST */
3306 fAccess = IEM_ACCESS_DATA_R;
3307 IEM_MC_BEGIN(3, 2);
3308 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3309 IEM_MC_ARG(uint32_t, u32Src, 1);
3310 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3312
3313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3314 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3315 IEM_MC_ASSIGN(u32Src, u32Imm);
3316 if (pImpl->pfnLockedU32)
3317 IEMOP_HLP_DONE_DECODING();
3318 else
3319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3320 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3321 IEM_MC_FETCH_EFLAGS(EFlags);
3322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3323 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3324 else
3325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3326
3327 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3328 IEM_MC_COMMIT_EFLAGS(EFlags);
3329 IEM_MC_ADVANCE_RIP();
3330 IEM_MC_END();
3331 }
3332 break;
3333 }
3334
3335 case IEMMODE_64BIT:
3336 {
3337 if (IEM_IS_MODRM_REG_MODE(bRm))
3338 {
3339 /* register target */
3340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3342 IEM_MC_BEGIN(3, 0);
3343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3344 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3346
3347 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3348 IEM_MC_REF_EFLAGS(pEFlags);
3349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3350
3351 IEM_MC_ADVANCE_RIP();
3352 IEM_MC_END();
3353 }
3354 else
3355 {
3356 /* memory target */
3357 uint32_t fAccess;
3358 if (pImpl->pfnLockedU64)
3359 fAccess = IEM_ACCESS_DATA_RW;
3360 else /* CMP */
3361 fAccess = IEM_ACCESS_DATA_R;
3362 IEM_MC_BEGIN(3, 2);
3363 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3364 IEM_MC_ARG(uint64_t, u64Src, 1);
3365 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3367
3368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3369 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3370 if (pImpl->pfnLockedU64)
3371 IEMOP_HLP_DONE_DECODING();
3372 else
3373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3374 IEM_MC_ASSIGN(u64Src, u64Imm);
3375 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3376 IEM_MC_FETCH_EFLAGS(EFlags);
3377 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3378 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3379 else
3380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3381
3382 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3383 IEM_MC_COMMIT_EFLAGS(EFlags);
3384 IEM_MC_ADVANCE_RIP();
3385 IEM_MC_END();
3386 }
3387 break;
3388 }
3389 }
3390 return VINF_SUCCESS;
3391}
3392
3393
3394/**
3395 * @opcode 0x82
3396 * @opmnemonic grp1_82
3397 * @opgroup og_groups
3398 */
3399FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3400{
3401 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3402 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3403}
3404
3405
3406/**
3407 * @opcode 0x83
3408 */
3409FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3410{
3411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3412 switch (IEM_GET_MODRM_REG_8(bRm))
3413 {
3414 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3415 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3416 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3417 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3418 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3419 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3420 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3421 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3422 }
3423 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3424 to the 386 even if absent in the intel reference manuals and some
3425 3rd party opcode listings. */
3426 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3427
3428 if (IEM_IS_MODRM_REG_MODE(bRm))
3429 {
3430 /*
3431 * Register target
3432 */
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3435 switch (pVCpu->iem.s.enmEffOpSize)
3436 {
3437 case IEMMODE_16BIT:
3438 {
3439 IEM_MC_BEGIN(3, 0);
3440 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3441 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3442 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3443
3444 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3445 IEM_MC_REF_EFLAGS(pEFlags);
3446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3447
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 break;
3451 }
3452
3453 case IEMMODE_32BIT:
3454 {
3455 IEM_MC_BEGIN(3, 0);
3456 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3457 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3458 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3459
3460 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3461 IEM_MC_REF_EFLAGS(pEFlags);
3462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3463 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3464 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3465
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 break;
3469 }
3470
3471 case IEMMODE_64BIT:
3472 {
3473 IEM_MC_BEGIN(3, 0);
3474 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3475 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3477
3478 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3479 IEM_MC_REF_EFLAGS(pEFlags);
3480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 break;
3485 }
3486 }
3487 }
3488 else
3489 {
3490 /*
3491 * Memory target.
3492 */
3493 uint32_t fAccess;
3494 if (pImpl->pfnLockedU16)
3495 fAccess = IEM_ACCESS_DATA_RW;
3496 else /* CMP */
3497 fAccess = IEM_ACCESS_DATA_R;
3498
3499 switch (pVCpu->iem.s.enmEffOpSize)
3500 {
3501 case IEMMODE_16BIT:
3502 {
3503 IEM_MC_BEGIN(3, 2);
3504 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3505 IEM_MC_ARG(uint16_t, u16Src, 1);
3506 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3508
3509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3510 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3511 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3512 if (pImpl->pfnLockedU16)
3513 IEMOP_HLP_DONE_DECODING();
3514 else
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3516 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3517 IEM_MC_FETCH_EFLAGS(EFlags);
3518 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3519 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3520 else
3521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3522
3523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3524 IEM_MC_COMMIT_EFLAGS(EFlags);
3525 IEM_MC_ADVANCE_RIP();
3526 IEM_MC_END();
3527 break;
3528 }
3529
3530 case IEMMODE_32BIT:
3531 {
3532 IEM_MC_BEGIN(3, 2);
3533 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3534 IEM_MC_ARG(uint32_t, u32Src, 1);
3535 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3537
3538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3539 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3540 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3541 if (pImpl->pfnLockedU32)
3542 IEMOP_HLP_DONE_DECODING();
3543 else
3544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3545 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3546 IEM_MC_FETCH_EFLAGS(EFlags);
3547 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3548 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3549 else
3550 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3551
3552 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3553 IEM_MC_COMMIT_EFLAGS(EFlags);
3554 IEM_MC_ADVANCE_RIP();
3555 IEM_MC_END();
3556 break;
3557 }
3558
3559 case IEMMODE_64BIT:
3560 {
3561 IEM_MC_BEGIN(3, 2);
3562 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3563 IEM_MC_ARG(uint64_t, u64Src, 1);
3564 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3566
3567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3568 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3569 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3570 if (pImpl->pfnLockedU64)
3571 IEMOP_HLP_DONE_DECODING();
3572 else
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3575 IEM_MC_FETCH_EFLAGS(EFlags);
3576 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3577 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3578 else
3579 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3580
3581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3582 IEM_MC_COMMIT_EFLAGS(EFlags);
3583 IEM_MC_ADVANCE_RIP();
3584 IEM_MC_END();
3585 break;
3586 }
3587 }
3588 }
3589 return VINF_SUCCESS;
3590}
3591
3592
3593/**
3594 * @opcode 0x84
3595 */
3596FNIEMOP_DEF(iemOp_test_Eb_Gb)
3597{
3598 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3599 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3601}
3602
3603
3604/**
3605 * @opcode 0x85
3606 */
3607FNIEMOP_DEF(iemOp_test_Ev_Gv)
3608{
3609 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3612}
3613
3614
3615/**
3616 * @opcode 0x86
3617 */
3618FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3619{
3620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3621 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3622
3623 /*
3624 * If rm is denoting a register, no more instruction bytes.
3625 */
3626 if (IEM_IS_MODRM_REG_MODE(bRm))
3627 {
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 2);
3631 IEM_MC_LOCAL(uint8_t, uTmp1);
3632 IEM_MC_LOCAL(uint8_t, uTmp2);
3633
3634 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3635 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3636 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3637 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3638
3639 IEM_MC_ADVANCE_RIP();
3640 IEM_MC_END();
3641 }
3642 else
3643 {
3644 /*
3645 * We're accessing memory.
3646 */
3647/** @todo the register must be committed separately! */
3648 IEM_MC_BEGIN(2, 2);
3649 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3650 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3652
3653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3654 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3655 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3656 if (!pVCpu->iem.s.fDisregardLock)
3657 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3658 else
3659 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3661
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 }
3665 return VINF_SUCCESS;
3666}
3667
3668
3669/**
3670 * @opcode 0x87
3671 */
3672FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3673{
3674 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3676
3677 /*
3678 * If rm is denoting a register, no more instruction bytes.
3679 */
3680 if (IEM_IS_MODRM_REG_MODE(bRm))
3681 {
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683
3684 switch (pVCpu->iem.s.enmEffOpSize)
3685 {
3686 case IEMMODE_16BIT:
3687 IEM_MC_BEGIN(0, 2);
3688 IEM_MC_LOCAL(uint16_t, uTmp1);
3689 IEM_MC_LOCAL(uint16_t, uTmp2);
3690
3691 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3692 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3693 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3694 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3695
3696 IEM_MC_ADVANCE_RIP();
3697 IEM_MC_END();
3698 return VINF_SUCCESS;
3699
3700 case IEMMODE_32BIT:
3701 IEM_MC_BEGIN(0, 2);
3702 IEM_MC_LOCAL(uint32_t, uTmp1);
3703 IEM_MC_LOCAL(uint32_t, uTmp2);
3704
3705 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3706 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3707 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3708 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3709
3710 IEM_MC_ADVANCE_RIP();
3711 IEM_MC_END();
3712 return VINF_SUCCESS;
3713
3714 case IEMMODE_64BIT:
3715 IEM_MC_BEGIN(0, 2);
3716 IEM_MC_LOCAL(uint64_t, uTmp1);
3717 IEM_MC_LOCAL(uint64_t, uTmp2);
3718
3719 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3720 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3721 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3722 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3723
3724 IEM_MC_ADVANCE_RIP();
3725 IEM_MC_END();
3726 return VINF_SUCCESS;
3727
3728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3729 }
3730 }
3731 else
3732 {
3733 /*
3734 * We're accessing memory.
3735 */
3736 switch (pVCpu->iem.s.enmEffOpSize)
3737 {
3738/** @todo the register must be committed separately! */
3739 case IEMMODE_16BIT:
3740 IEM_MC_BEGIN(2, 2);
3741 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3742 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3746 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3747 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3748 if (!pVCpu->iem.s.fDisregardLock)
3749 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3750 else
3751 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3753
3754 IEM_MC_ADVANCE_RIP();
3755 IEM_MC_END();
3756 return VINF_SUCCESS;
3757
3758 case IEMMODE_32BIT:
3759 IEM_MC_BEGIN(2, 2);
3760 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3761 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3763
3764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3765 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3766 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3767 if (!pVCpu->iem.s.fDisregardLock)
3768 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3769 else
3770 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3772
3773 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3774 IEM_MC_ADVANCE_RIP();
3775 IEM_MC_END();
3776 return VINF_SUCCESS;
3777
3778 case IEMMODE_64BIT:
3779 IEM_MC_BEGIN(2, 2);
3780 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3781 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3783
3784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3785 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3786 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3787 if (!pVCpu->iem.s.fDisregardLock)
3788 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3789 else
3790 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3792
3793 IEM_MC_ADVANCE_RIP();
3794 IEM_MC_END();
3795 return VINF_SUCCESS;
3796
3797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3798 }
3799 }
3800}
3801
3802
3803/**
3804 * @opcode 0x88
3805 */
3806FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3807{
3808 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3809
3810 uint8_t bRm;
3811 IEM_OPCODE_GET_NEXT_U8(&bRm);
3812
3813 /*
3814 * If rm is denoting a register, no more instruction bytes.
3815 */
3816 if (IEM_IS_MODRM_REG_MODE(bRm))
3817 {
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 IEM_MC_BEGIN(0, 1);
3820 IEM_MC_LOCAL(uint8_t, u8Value);
3821 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3822 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3823 IEM_MC_ADVANCE_RIP();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 /*
3829 * We're writing a register to memory.
3830 */
3831 IEM_MC_BEGIN(0, 2);
3832 IEM_MC_LOCAL(uint8_t, u8Value);
3833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3837 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3838 IEM_MC_ADVANCE_RIP();
3839 IEM_MC_END();
3840 }
3841 return VINF_SUCCESS;
3842
3843}
3844
3845
3846/**
3847 * @opcode 0x89
3848 */
3849FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3850{
3851 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3852
3853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3854
3855 /*
3856 * If rm is denoting a register, no more instruction bytes.
3857 */
3858 if (IEM_IS_MODRM_REG_MODE(bRm))
3859 {
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861 switch (pVCpu->iem.s.enmEffOpSize)
3862 {
3863 case IEMMODE_16BIT:
3864 IEM_MC_BEGIN(0, 1);
3865 IEM_MC_LOCAL(uint16_t, u16Value);
3866 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3867 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3868 IEM_MC_ADVANCE_RIP();
3869 IEM_MC_END();
3870 break;
3871
3872 case IEMMODE_32BIT:
3873 IEM_MC_BEGIN(0, 1);
3874 IEM_MC_LOCAL(uint32_t, u32Value);
3875 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3876 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 break;
3880
3881 case IEMMODE_64BIT:
3882 IEM_MC_BEGIN(0, 1);
3883 IEM_MC_LOCAL(uint64_t, u64Value);
3884 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3885 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3886 IEM_MC_ADVANCE_RIP();
3887 IEM_MC_END();
3888 break;
3889 }
3890 }
3891 else
3892 {
3893 /*
3894 * We're writing a register to memory.
3895 */
3896 switch (pVCpu->iem.s.enmEffOpSize)
3897 {
3898 case IEMMODE_16BIT:
3899 IEM_MC_BEGIN(0, 2);
3900 IEM_MC_LOCAL(uint16_t, u16Value);
3901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3904 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3905 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3906 IEM_MC_ADVANCE_RIP();
3907 IEM_MC_END();
3908 break;
3909
3910 case IEMMODE_32BIT:
3911 IEM_MC_BEGIN(0, 2);
3912 IEM_MC_LOCAL(uint32_t, u32Value);
3913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3917 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 break;
3921
3922 case IEMMODE_64BIT:
3923 IEM_MC_BEGIN(0, 2);
3924 IEM_MC_LOCAL(uint64_t, u64Value);
3925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3929 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3930 IEM_MC_ADVANCE_RIP();
3931 IEM_MC_END();
3932 break;
3933 }
3934 }
3935 return VINF_SUCCESS;
3936}
3937
3938
3939/**
3940 * @opcode 0x8a
3941 */
3942FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3943{
3944 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3945
3946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3947
3948 /*
3949 * If rm is denoting a register, no more instruction bytes.
3950 */
3951 if (IEM_IS_MODRM_REG_MODE(bRm))
3952 {
3953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3954 IEM_MC_BEGIN(0, 1);
3955 IEM_MC_LOCAL(uint8_t, u8Value);
3956 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3957 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3958 IEM_MC_ADVANCE_RIP();
3959 IEM_MC_END();
3960 }
3961 else
3962 {
3963 /*
3964 * We're loading a register from memory.
3965 */
3966 IEM_MC_BEGIN(0, 2);
3967 IEM_MC_LOCAL(uint8_t, u8Value);
3968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3971 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3972 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3973 IEM_MC_ADVANCE_RIP();
3974 IEM_MC_END();
3975 }
3976 return VINF_SUCCESS;
3977}
3978
3979
3980/**
3981 * @opcode 0x8b
3982 */
3983FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3984{
3985 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3986
3987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3988
3989 /*
3990 * If rm is denoting a register, no more instruction bytes.
3991 */
3992 if (IEM_IS_MODRM_REG_MODE(bRm))
3993 {
3994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3995 switch (pVCpu->iem.s.enmEffOpSize)
3996 {
3997 case IEMMODE_16BIT:
3998 IEM_MC_BEGIN(0, 1);
3999 IEM_MC_LOCAL(uint16_t, u16Value);
4000 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4001 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4002 IEM_MC_ADVANCE_RIP();
4003 IEM_MC_END();
4004 break;
4005
4006 case IEMMODE_32BIT:
4007 IEM_MC_BEGIN(0, 1);
4008 IEM_MC_LOCAL(uint32_t, u32Value);
4009 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4010 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4011 IEM_MC_ADVANCE_RIP();
4012 IEM_MC_END();
4013 break;
4014
4015 case IEMMODE_64BIT:
4016 IEM_MC_BEGIN(0, 1);
4017 IEM_MC_LOCAL(uint64_t, u64Value);
4018 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4019 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4020 IEM_MC_ADVANCE_RIP();
4021 IEM_MC_END();
4022 break;
4023 }
4024 }
4025 else
4026 {
4027 /*
4028 * We're loading a register from memory.
4029 */
4030 switch (pVCpu->iem.s.enmEffOpSize)
4031 {
4032 case IEMMODE_16BIT:
4033 IEM_MC_BEGIN(0, 2);
4034 IEM_MC_LOCAL(uint16_t, u16Value);
4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4038 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4039 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4040 IEM_MC_ADVANCE_RIP();
4041 IEM_MC_END();
4042 break;
4043
4044 case IEMMODE_32BIT:
4045 IEM_MC_BEGIN(0, 2);
4046 IEM_MC_LOCAL(uint32_t, u32Value);
4047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4050 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4051 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4052 IEM_MC_ADVANCE_RIP();
4053 IEM_MC_END();
4054 break;
4055
4056 case IEMMODE_64BIT:
4057 IEM_MC_BEGIN(0, 2);
4058 IEM_MC_LOCAL(uint64_t, u64Value);
4059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4062 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 break;
4067 }
4068 }
4069 return VINF_SUCCESS;
4070}
4071
4072
4073/**
4074 * opcode 0x63
4075 * @todo Table fixme
4076 */
4077FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4078{
4079 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4080 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4081 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4082 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4083 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4084}
4085
4086
4087/**
4088 * @opcode 0x8c
4089 */
4090FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4091{
4092 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4093
4094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4095
4096 /*
4097 * Check that the destination register exists. The REX.R prefix is ignored.
4098 */
4099 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4100 if ( iSegReg > X86_SREG_GS)
4101 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4102
4103 /*
4104 * If rm is denoting a register, no more instruction bytes.
4105 * In that case, the operand size is respected and the upper bits are
4106 * cleared (starting with some pentium).
4107 */
4108 if (IEM_IS_MODRM_REG_MODE(bRm))
4109 {
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4111 switch (pVCpu->iem.s.enmEffOpSize)
4112 {
4113 case IEMMODE_16BIT:
4114 IEM_MC_BEGIN(0, 1);
4115 IEM_MC_LOCAL(uint16_t, u16Value);
4116 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4117 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4118 IEM_MC_ADVANCE_RIP();
4119 IEM_MC_END();
4120 break;
4121
4122 case IEMMODE_32BIT:
4123 IEM_MC_BEGIN(0, 1);
4124 IEM_MC_LOCAL(uint32_t, u32Value);
4125 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4126 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 break;
4130
4131 case IEMMODE_64BIT:
4132 IEM_MC_BEGIN(0, 1);
4133 IEM_MC_LOCAL(uint64_t, u64Value);
4134 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4135 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4136 IEM_MC_ADVANCE_RIP();
4137 IEM_MC_END();
4138 break;
4139 }
4140 }
4141 else
4142 {
4143 /*
4144 * We're saving the register to memory. The access is word sized
4145 * regardless of operand size prefixes.
4146 */
4147#if 0 /* not necessary */
4148 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4149#endif
4150 IEM_MC_BEGIN(0, 2);
4151 IEM_MC_LOCAL(uint16_t, u16Value);
4152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4155 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4156 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4157 IEM_MC_ADVANCE_RIP();
4158 IEM_MC_END();
4159 }
4160 return VINF_SUCCESS;
4161}
4162
4163
4164
4165
4166/**
4167 * @opcode 0x8d
4168 */
4169FNIEMOP_DEF(iemOp_lea_Gv_M)
4170{
4171 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4173 if (IEM_IS_MODRM_REG_MODE(bRm))
4174 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4175
4176 switch (pVCpu->iem.s.enmEffOpSize)
4177 {
4178 case IEMMODE_16BIT:
4179 IEM_MC_BEGIN(0, 2);
4180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4181 IEM_MC_LOCAL(uint16_t, u16Cast);
4182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4184 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4185 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4186 IEM_MC_ADVANCE_RIP();
4187 IEM_MC_END();
4188 return VINF_SUCCESS;
4189
4190 case IEMMODE_32BIT:
4191 IEM_MC_BEGIN(0, 2);
4192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4193 IEM_MC_LOCAL(uint32_t, u32Cast);
4194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4196 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4197 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4198 IEM_MC_ADVANCE_RIP();
4199 IEM_MC_END();
4200 return VINF_SUCCESS;
4201
4202 case IEMMODE_64BIT:
4203 IEM_MC_BEGIN(0, 1);
4204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4208 IEM_MC_ADVANCE_RIP();
4209 IEM_MC_END();
4210 return VINF_SUCCESS;
4211 }
4212 AssertFailedReturn(VERR_IEM_IPE_7);
4213}
4214
4215
4216/**
4217 * @opcode 0x8e
4218 */
4219FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4220{
4221 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4222
4223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4224
4225 /*
4226 * The practical operand size is 16-bit.
4227 */
4228#if 0 /* not necessary */
4229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4230#endif
4231
4232 /*
4233 * Check that the destination register exists and can be used with this
4234 * instruction. The REX.R prefix is ignored.
4235 */
4236 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4237 if ( iSegReg == X86_SREG_CS
4238 || iSegReg > X86_SREG_GS)
4239 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4240
4241 /*
4242 * If rm is denoting a register, no more instruction bytes.
4243 */
4244 if (IEM_IS_MODRM_REG_MODE(bRm))
4245 {
4246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4247 IEM_MC_BEGIN(2, 0);
4248 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4249 IEM_MC_ARG(uint16_t, u16Value, 1);
4250 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4251 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4252 IEM_MC_END();
4253 }
4254 else
4255 {
4256 /*
4257 * We're loading the register from memory. The access is word sized
4258 * regardless of operand size prefixes.
4259 */
4260 IEM_MC_BEGIN(2, 1);
4261 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4262 IEM_MC_ARG(uint16_t, u16Value, 1);
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4266 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4267 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4268 IEM_MC_END();
4269 }
4270 return VINF_SUCCESS;
4271}
4272
4273
4274/** Opcode 0x8f /0. */
4275FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4276{
4277 /* This bugger is rather annoying as it requires rSP to be updated before
4278 doing the effective address calculations. Will eventually require a
4279 split between the R/M+SIB decoding and the effective address
4280 calculation - which is something that is required for any attempt at
4281 reusing this code for a recompiler. It may also be good to have if we
4282 need to delay #UD exception caused by invalid lock prefixes.
4283
4284 For now, we'll do a mostly safe interpreter-only implementation here. */
4285 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4286 * now until tests show it's checked.. */
4287 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4288
4289 /* Register access is relatively easy and can share code. */
4290 if (IEM_IS_MODRM_REG_MODE(bRm))
4291 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4292
4293 /*
4294 * Memory target.
4295 *
4296 * Intel says that RSP is incremented before it's used in any effective
4297 * address calcuations. This means some serious extra annoyance here since
4298 * we decode and calculate the effective address in one step and like to
4299 * delay committing registers till everything is done.
4300 *
4301 * So, we'll decode and calculate the effective address twice. This will
4302 * require some recoding if turned into a recompiler.
4303 */
4304 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4305
4306#ifndef TST_IEM_CHECK_MC
4307 /* Calc effective address with modified ESP. */
4308/** @todo testcase */
4309 RTGCPTR GCPtrEff;
4310 VBOXSTRICTRC rcStrict;
4311 switch (pVCpu->iem.s.enmEffOpSize)
4312 {
4313 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4314 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4315 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4317 }
4318 if (rcStrict != VINF_SUCCESS)
4319 return rcStrict;
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4321
4322 /* Perform the operation - this should be CImpl. */
4323 RTUINT64U TmpRsp;
4324 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4325 switch (pVCpu->iem.s.enmEffOpSize)
4326 {
4327 case IEMMODE_16BIT:
4328 {
4329 uint16_t u16Value;
4330 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4331 if (rcStrict == VINF_SUCCESS)
4332 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4333 break;
4334 }
4335
4336 case IEMMODE_32BIT:
4337 {
4338 uint32_t u32Value;
4339 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4340 if (rcStrict == VINF_SUCCESS)
4341 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4342 break;
4343 }
4344
4345 case IEMMODE_64BIT:
4346 {
4347 uint64_t u64Value;
4348 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4349 if (rcStrict == VINF_SUCCESS)
4350 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4351 break;
4352 }
4353
4354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4355 }
4356 if (rcStrict == VINF_SUCCESS)
4357 {
4358 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4359 iemRegUpdateRipAndClearRF(pVCpu);
4360 }
4361 return rcStrict;
4362
4363#else
4364 return VERR_IEM_IPE_2;
4365#endif
4366}
4367
4368
4369/**
4370 * @opcode 0x8f
4371 */
4372FNIEMOP_DEF(iemOp_Grp1A__xop)
4373{
4374 /*
4375 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4376 * three byte VEX prefix, except that the mmmmm field cannot have the values
4377 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4378 */
4379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4380 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4381 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4382
4383 IEMOP_MNEMONIC(xop, "xop");
4384 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4385 {
4386 /** @todo Test when exctly the XOP conformance checks kick in during
4387 * instruction decoding and fetching (using \#PF). */
4388 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4389 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4390 if ( ( pVCpu->iem.s.fPrefixes
4391 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4392 == 0)
4393 {
4394 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4395 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4396 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4397 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4398 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4399 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4400 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4401 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4402 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4403
4404 /** @todo XOP: Just use new tables and decoders. */
4405 switch (bRm & 0x1f)
4406 {
4407 case 8: /* xop opcode map 8. */
4408 IEMOP_BITCH_ABOUT_STUB();
4409 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4410
4411 case 9: /* xop opcode map 9. */
4412 IEMOP_BITCH_ABOUT_STUB();
4413 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4414
4415 case 10: /* xop opcode map 10. */
4416 IEMOP_BITCH_ABOUT_STUB();
4417 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4418
4419 default:
4420 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4421 return IEMOP_RAISE_INVALID_OPCODE();
4422 }
4423 }
4424 else
4425 Log(("XOP: Invalid prefix mix!\n"));
4426 }
4427 else
4428 Log(("XOP: XOP support disabled!\n"));
4429 return IEMOP_RAISE_INVALID_OPCODE();
4430}
4431
4432
4433/**
4434 * Common 'xchg reg,rAX' helper.
4435 */
4436FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4437{
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4439
4440 iReg |= pVCpu->iem.s.uRexB;
4441 switch (pVCpu->iem.s.enmEffOpSize)
4442 {
4443 case IEMMODE_16BIT:
4444 IEM_MC_BEGIN(0, 2);
4445 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4446 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4447 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4448 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4449 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4450 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 return VINF_SUCCESS;
4454
4455 case IEMMODE_32BIT:
4456 IEM_MC_BEGIN(0, 2);
4457 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4458 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4459 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4460 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4461 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4462 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4463 IEM_MC_ADVANCE_RIP();
4464 IEM_MC_END();
4465 return VINF_SUCCESS;
4466
4467 case IEMMODE_64BIT:
4468 IEM_MC_BEGIN(0, 2);
4469 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4470 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4471 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4472 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4473 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4474 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4475 IEM_MC_ADVANCE_RIP();
4476 IEM_MC_END();
4477 return VINF_SUCCESS;
4478
4479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4480 }
4481}
4482
4483
4484/**
4485 * @opcode 0x90
4486 */
4487FNIEMOP_DEF(iemOp_nop)
4488{
4489 /* R8/R8D and RAX/EAX can be exchanged. */
4490 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4491 {
4492 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4493 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4494 }
4495
4496 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4497 {
4498 IEMOP_MNEMONIC(pause, "pause");
4499#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4500 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4501 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4502#endif
4503#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4504 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4505 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4506#endif
4507 }
4508 else
4509 IEMOP_MNEMONIC(nop, "nop");
4510 IEM_MC_BEGIN(0, 0);
4511 IEM_MC_ADVANCE_RIP();
4512 IEM_MC_END();
4513 return VINF_SUCCESS;
4514}
4515
4516
4517/**
4518 * @opcode 0x91
4519 */
4520FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4521{
4522 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4523 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4524}
4525
4526
4527/**
4528 * @opcode 0x92
4529 */
4530FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4531{
4532 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4533 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4534}
4535
4536
4537/**
4538 * @opcode 0x93
4539 */
4540FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4541{
4542 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4543 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4544}
4545
4546
4547/**
4548 * @opcode 0x94
4549 */
4550FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4551{
4552 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4553 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4554}
4555
4556
4557/**
4558 * @opcode 0x95
4559 */
4560FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4561{
4562 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4563 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4564}
4565
4566
4567/**
4568 * @opcode 0x96
4569 */
4570FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4571{
4572 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4573 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4574}
4575
4576
4577/**
4578 * @opcode 0x97
4579 */
4580FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4581{
4582 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4583 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4584}
4585
4586
4587/**
4588 * @opcode 0x98
4589 */
4590FNIEMOP_DEF(iemOp_cbw)
4591{
4592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4593 switch (pVCpu->iem.s.enmEffOpSize)
4594 {
4595 case IEMMODE_16BIT:
4596 IEMOP_MNEMONIC(cbw, "cbw");
4597 IEM_MC_BEGIN(0, 1);
4598 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4599 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4600 } IEM_MC_ELSE() {
4601 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4602 } IEM_MC_ENDIF();
4603 IEM_MC_ADVANCE_RIP();
4604 IEM_MC_END();
4605 return VINF_SUCCESS;
4606
4607 case IEMMODE_32BIT:
4608 IEMOP_MNEMONIC(cwde, "cwde");
4609 IEM_MC_BEGIN(0, 1);
4610 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4611 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4612 } IEM_MC_ELSE() {
4613 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4614 } IEM_MC_ENDIF();
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 return VINF_SUCCESS;
4618
4619 case IEMMODE_64BIT:
4620 IEMOP_MNEMONIC(cdqe, "cdqe");
4621 IEM_MC_BEGIN(0, 1);
4622 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4623 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4624 } IEM_MC_ELSE() {
4625 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4626 } IEM_MC_ENDIF();
4627 IEM_MC_ADVANCE_RIP();
4628 IEM_MC_END();
4629 return VINF_SUCCESS;
4630
4631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4632 }
4633}
4634
4635
4636/**
4637 * @opcode 0x99
4638 */
4639FNIEMOP_DEF(iemOp_cwd)
4640{
4641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4642 switch (pVCpu->iem.s.enmEffOpSize)
4643 {
4644 case IEMMODE_16BIT:
4645 IEMOP_MNEMONIC(cwd, "cwd");
4646 IEM_MC_BEGIN(0, 1);
4647 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4648 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4649 } IEM_MC_ELSE() {
4650 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4651 } IEM_MC_ENDIF();
4652 IEM_MC_ADVANCE_RIP();
4653 IEM_MC_END();
4654 return VINF_SUCCESS;
4655
4656 case IEMMODE_32BIT:
4657 IEMOP_MNEMONIC(cdq, "cdq");
4658 IEM_MC_BEGIN(0, 1);
4659 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4660 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4661 } IEM_MC_ELSE() {
4662 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4663 } IEM_MC_ENDIF();
4664 IEM_MC_ADVANCE_RIP();
4665 IEM_MC_END();
4666 return VINF_SUCCESS;
4667
4668 case IEMMODE_64BIT:
4669 IEMOP_MNEMONIC(cqo, "cqo");
4670 IEM_MC_BEGIN(0, 1);
4671 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4672 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4673 } IEM_MC_ELSE() {
4674 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4675 } IEM_MC_ENDIF();
4676 IEM_MC_ADVANCE_RIP();
4677 IEM_MC_END();
4678 return VINF_SUCCESS;
4679
4680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4681 }
4682}
4683
4684
4685/**
4686 * @opcode 0x9a
4687 */
4688FNIEMOP_DEF(iemOp_call_Ap)
4689{
4690 IEMOP_MNEMONIC(call_Ap, "call Ap");
4691 IEMOP_HLP_NO_64BIT();
4692
4693 /* Decode the far pointer address and pass it on to the far call C implementation. */
4694 uint32_t offSeg;
4695 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4696 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4697 else
4698 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4699 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4701 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4702}
4703
4704
4705/** Opcode 0x9b. (aka fwait) */
4706FNIEMOP_DEF(iemOp_wait)
4707{
4708 IEMOP_MNEMONIC(wait, "wait");
4709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4710
4711 IEM_MC_BEGIN(0, 0);
4712 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4714 IEM_MC_ADVANCE_RIP();
4715 IEM_MC_END();
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/**
4721 * @opcode 0x9c
4722 */
4723FNIEMOP_DEF(iemOp_pushf_Fv)
4724{
4725 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4727 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4728 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4729}
4730
4731
4732/**
4733 * @opcode 0x9d
4734 */
4735FNIEMOP_DEF(iemOp_popf_Fv)
4736{
4737 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4740 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4741}
4742
4743
4744/**
4745 * @opcode 0x9e
4746 */
4747FNIEMOP_DEF(iemOp_sahf)
4748{
4749 IEMOP_MNEMONIC(sahf, "sahf");
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4751 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4752 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4753 return IEMOP_RAISE_INVALID_OPCODE();
4754 IEM_MC_BEGIN(0, 2);
4755 IEM_MC_LOCAL(uint32_t, u32Flags);
4756 IEM_MC_LOCAL(uint32_t, EFlags);
4757 IEM_MC_FETCH_EFLAGS(EFlags);
4758 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4759 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4760 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4761 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4762 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4763 IEM_MC_COMMIT_EFLAGS(EFlags);
4764 IEM_MC_ADVANCE_RIP();
4765 IEM_MC_END();
4766 return VINF_SUCCESS;
4767}
4768
4769
4770/**
4771 * @opcode 0x9f
4772 */
4773FNIEMOP_DEF(iemOp_lahf)
4774{
4775 IEMOP_MNEMONIC(lahf, "lahf");
4776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4777 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4778 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4779 return IEMOP_RAISE_INVALID_OPCODE();
4780 IEM_MC_BEGIN(0, 1);
4781 IEM_MC_LOCAL(uint8_t, u8Flags);
4782 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4783 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4784 IEM_MC_ADVANCE_RIP();
4785 IEM_MC_END();
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/**
4791 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4792 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4793 * prefixes. Will return on failures.
4794 * @param a_GCPtrMemOff The variable to store the offset in.
4795 */
4796#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4797 do \
4798 { \
4799 switch (pVCpu->iem.s.enmEffAddrMode) \
4800 { \
4801 case IEMMODE_16BIT: \
4802 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4803 break; \
4804 case IEMMODE_32BIT: \
4805 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4806 break; \
4807 case IEMMODE_64BIT: \
4808 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4809 break; \
4810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4811 } \
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4813 } while (0)
4814
4815/**
4816 * @opcode 0xa0
4817 */
4818FNIEMOP_DEF(iemOp_mov_AL_Ob)
4819{
4820 /*
4821 * Get the offset and fend off lock prefixes.
4822 */
4823 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4824 RTGCPTR GCPtrMemOff;
4825 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4826
4827 /*
4828 * Fetch AL.
4829 */
4830 IEM_MC_BEGIN(0,1);
4831 IEM_MC_LOCAL(uint8_t, u8Tmp);
4832 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4833 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4834 IEM_MC_ADVANCE_RIP();
4835 IEM_MC_END();
4836 return VINF_SUCCESS;
4837}
4838
4839
4840/**
4841 * @opcode 0xa1
4842 */
4843FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4844{
4845 /*
4846 * Get the offset and fend off lock prefixes.
4847 */
4848 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4849 RTGCPTR GCPtrMemOff;
4850 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4851
4852 /*
4853 * Fetch rAX.
4854 */
4855 switch (pVCpu->iem.s.enmEffOpSize)
4856 {
4857 case IEMMODE_16BIT:
4858 IEM_MC_BEGIN(0,1);
4859 IEM_MC_LOCAL(uint16_t, u16Tmp);
4860 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4861 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4862 IEM_MC_ADVANCE_RIP();
4863 IEM_MC_END();
4864 return VINF_SUCCESS;
4865
4866 case IEMMODE_32BIT:
4867 IEM_MC_BEGIN(0,1);
4868 IEM_MC_LOCAL(uint32_t, u32Tmp);
4869 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4870 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4871 IEM_MC_ADVANCE_RIP();
4872 IEM_MC_END();
4873 return VINF_SUCCESS;
4874
4875 case IEMMODE_64BIT:
4876 IEM_MC_BEGIN(0,1);
4877 IEM_MC_LOCAL(uint64_t, u64Tmp);
4878 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4879 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4885 }
4886}
4887
4888
4889/**
4890 * @opcode 0xa2
4891 */
4892FNIEMOP_DEF(iemOp_mov_Ob_AL)
4893{
4894 /*
4895 * Get the offset and fend off lock prefixes.
4896 */
4897 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4898 RTGCPTR GCPtrMemOff;
4899 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4900
4901 /*
4902 * Store AL.
4903 */
4904 IEM_MC_BEGIN(0,1);
4905 IEM_MC_LOCAL(uint8_t, u8Tmp);
4906 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4907 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4908 IEM_MC_ADVANCE_RIP();
4909 IEM_MC_END();
4910 return VINF_SUCCESS;
4911}
4912
4913
4914/**
4915 * @opcode 0xa3
4916 */
4917FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4918{
4919 /*
4920 * Get the offset and fend off lock prefixes.
4921 */
4922 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4923 RTGCPTR GCPtrMemOff;
4924 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4925
4926 /*
4927 * Store rAX.
4928 */
4929 switch (pVCpu->iem.s.enmEffOpSize)
4930 {
4931 case IEMMODE_16BIT:
4932 IEM_MC_BEGIN(0,1);
4933 IEM_MC_LOCAL(uint16_t, u16Tmp);
4934 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4935 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4936 IEM_MC_ADVANCE_RIP();
4937 IEM_MC_END();
4938 return VINF_SUCCESS;
4939
4940 case IEMMODE_32BIT:
4941 IEM_MC_BEGIN(0,1);
4942 IEM_MC_LOCAL(uint32_t, u32Tmp);
4943 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4944 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_64BIT:
4950 IEM_MC_BEGIN(0,1);
4951 IEM_MC_LOCAL(uint64_t, u64Tmp);
4952 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4953 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4954 IEM_MC_ADVANCE_RIP();
4955 IEM_MC_END();
4956 return VINF_SUCCESS;
4957
4958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4959 }
4960}
4961
4962/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4963#define IEM_MOVS_CASE(ValBits, AddrBits) \
4964 IEM_MC_BEGIN(0, 2); \
4965 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4966 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4967 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4968 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4969 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4970 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4971 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4972 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4973 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4974 } IEM_MC_ELSE() { \
4975 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4976 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4977 } IEM_MC_ENDIF(); \
4978 IEM_MC_ADVANCE_RIP(); \
4979 IEM_MC_END();
4980
4981/**
4982 * @opcode 0xa4
4983 */
4984FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4985{
4986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4987
4988 /*
4989 * Use the C implementation if a repeat prefix is encountered.
4990 */
4991 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4992 {
4993 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4994 switch (pVCpu->iem.s.enmEffAddrMode)
4995 {
4996 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4997 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4998 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5000 }
5001 }
5002 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5003
5004 /*
5005 * Sharing case implementation with movs[wdq] below.
5006 */
5007 switch (pVCpu->iem.s.enmEffAddrMode)
5008 {
5009 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5010 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5011 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5013 }
5014 return VINF_SUCCESS;
5015}
5016
5017
5018/**
5019 * @opcode 0xa5
5020 */
5021FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5022{
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5024
5025 /*
5026 * Use the C implementation if a repeat prefix is encountered.
5027 */
5028 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5029 {
5030 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5031 switch (pVCpu->iem.s.enmEffOpSize)
5032 {
5033 case IEMMODE_16BIT:
5034 switch (pVCpu->iem.s.enmEffAddrMode)
5035 {
5036 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5037 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5038 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5040 }
5041 break;
5042 case IEMMODE_32BIT:
5043 switch (pVCpu->iem.s.enmEffAddrMode)
5044 {
5045 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5046 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5047 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5049 }
5050 case IEMMODE_64BIT:
5051 switch (pVCpu->iem.s.enmEffAddrMode)
5052 {
5053 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5054 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5055 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5057 }
5058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5059 }
5060 }
5061 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5062
5063 /*
5064 * Annoying double switch here.
5065 * Using ugly macro for implementing the cases, sharing it with movsb.
5066 */
5067 switch (pVCpu->iem.s.enmEffOpSize)
5068 {
5069 case IEMMODE_16BIT:
5070 switch (pVCpu->iem.s.enmEffAddrMode)
5071 {
5072 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5073 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5074 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5076 }
5077 break;
5078
5079 case IEMMODE_32BIT:
5080 switch (pVCpu->iem.s.enmEffAddrMode)
5081 {
5082 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5083 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5084 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5086 }
5087 break;
5088
5089 case IEMMODE_64BIT:
5090 switch (pVCpu->iem.s.enmEffAddrMode)
5091 {
5092 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5093 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5094 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5096 }
5097 break;
5098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5099 }
5100 return VINF_SUCCESS;
5101}
5102
5103#undef IEM_MOVS_CASE
5104
5105/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5106#define IEM_CMPS_CASE(ValBits, AddrBits) \
5107 IEM_MC_BEGIN(3, 3); \
5108 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5109 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5110 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5111 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5112 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5113 \
5114 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5115 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5116 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5117 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5118 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5119 IEM_MC_REF_EFLAGS(pEFlags); \
5120 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5121 \
5122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5123 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5124 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5125 } IEM_MC_ELSE() { \
5126 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5127 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5128 } IEM_MC_ENDIF(); \
5129 IEM_MC_ADVANCE_RIP(); \
5130 IEM_MC_END(); \
5131
5132/**
5133 * @opcode 0xa6
5134 */
5135FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5136{
5137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5138
5139 /*
5140 * Use the C implementation if a repeat prefix is encountered.
5141 */
5142 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5143 {
5144 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5145 switch (pVCpu->iem.s.enmEffAddrMode)
5146 {
5147 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5148 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5149 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5151 }
5152 }
5153 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5154 {
5155 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5156 switch (pVCpu->iem.s.enmEffAddrMode)
5157 {
5158 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5159 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5160 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163 }
5164 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5165
5166 /*
5167 * Sharing case implementation with cmps[wdq] below.
5168 */
5169 switch (pVCpu->iem.s.enmEffAddrMode)
5170 {
5171 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5172 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5173 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5175 }
5176 return VINF_SUCCESS;
5177
5178}
5179
5180
5181/**
5182 * @opcode 0xa7
5183 */
5184FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5185{
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 /*
5189 * Use the C implementation if a repeat prefix is encountered.
5190 */
5191 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5192 {
5193 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5194 switch (pVCpu->iem.s.enmEffOpSize)
5195 {
5196 case IEMMODE_16BIT:
5197 switch (pVCpu->iem.s.enmEffAddrMode)
5198 {
5199 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5200 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5201 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5203 }
5204 break;
5205 case IEMMODE_32BIT:
5206 switch (pVCpu->iem.s.enmEffAddrMode)
5207 {
5208 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5209 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5210 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 case IEMMODE_64BIT:
5214 switch (pVCpu->iem.s.enmEffAddrMode)
5215 {
5216 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5217 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5218 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5220 }
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5222 }
5223 }
5224
5225 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5226 {
5227 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5228 switch (pVCpu->iem.s.enmEffOpSize)
5229 {
5230 case IEMMODE_16BIT:
5231 switch (pVCpu->iem.s.enmEffAddrMode)
5232 {
5233 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5234 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5235 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5237 }
5238 break;
5239 case IEMMODE_32BIT:
5240 switch (pVCpu->iem.s.enmEffAddrMode)
5241 {
5242 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5243 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5244 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5246 }
5247 case IEMMODE_64BIT:
5248 switch (pVCpu->iem.s.enmEffAddrMode)
5249 {
5250 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5251 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5252 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5254 }
5255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5256 }
5257 }
5258
5259 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5260
5261 /*
5262 * Annoying double switch here.
5263 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5264 */
5265 switch (pVCpu->iem.s.enmEffOpSize)
5266 {
5267 case IEMMODE_16BIT:
5268 switch (pVCpu->iem.s.enmEffAddrMode)
5269 {
5270 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5271 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5272 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5274 }
5275 break;
5276
5277 case IEMMODE_32BIT:
5278 switch (pVCpu->iem.s.enmEffAddrMode)
5279 {
5280 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5281 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5282 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5284 }
5285 break;
5286
5287 case IEMMODE_64BIT:
5288 switch (pVCpu->iem.s.enmEffAddrMode)
5289 {
5290 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5291 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5292 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5294 }
5295 break;
5296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5297 }
5298 return VINF_SUCCESS;
5299
5300}
5301
5302#undef IEM_CMPS_CASE
5303
5304/**
5305 * @opcode 0xa8
5306 */
5307FNIEMOP_DEF(iemOp_test_AL_Ib)
5308{
5309 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5310 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5311 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5312}
5313
5314
5315/**
5316 * @opcode 0xa9
5317 */
5318FNIEMOP_DEF(iemOp_test_eAX_Iz)
5319{
5320 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5323}
5324
5325
5326/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5327#define IEM_STOS_CASE(ValBits, AddrBits) \
5328 IEM_MC_BEGIN(0, 2); \
5329 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5330 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5331 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5332 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5333 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5334 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5335 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5336 } IEM_MC_ELSE() { \
5337 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5338 } IEM_MC_ENDIF(); \
5339 IEM_MC_ADVANCE_RIP(); \
5340 IEM_MC_END(); \
5341
5342/**
5343 * @opcode 0xaa
5344 */
5345FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5346{
5347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5348
5349 /*
5350 * Use the C implementation if a repeat prefix is encountered.
5351 */
5352 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5353 {
5354 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5355 switch (pVCpu->iem.s.enmEffAddrMode)
5356 {
5357 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5358 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5359 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5361 }
5362 }
5363 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5364
5365 /*
5366 * Sharing case implementation with stos[wdq] below.
5367 */
5368 switch (pVCpu->iem.s.enmEffAddrMode)
5369 {
5370 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5371 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5372 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5374 }
5375 return VINF_SUCCESS;
5376}
5377
5378
5379/**
5380 * @opcode 0xab
5381 */
5382FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5383{
5384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5385
5386 /*
5387 * Use the C implementation if a repeat prefix is encountered.
5388 */
5389 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5390 {
5391 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5392 switch (pVCpu->iem.s.enmEffOpSize)
5393 {
5394 case IEMMODE_16BIT:
5395 switch (pVCpu->iem.s.enmEffAddrMode)
5396 {
5397 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5398 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5399 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5401 }
5402 break;
5403 case IEMMODE_32BIT:
5404 switch (pVCpu->iem.s.enmEffAddrMode)
5405 {
5406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5410 }
5411 case IEMMODE_64BIT:
5412 switch (pVCpu->iem.s.enmEffAddrMode)
5413 {
5414 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5415 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5416 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5418 }
5419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5420 }
5421 }
5422 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5423
5424 /*
5425 * Annoying double switch here.
5426 * Using ugly macro for implementing the cases, sharing it with stosb.
5427 */
5428 switch (pVCpu->iem.s.enmEffOpSize)
5429 {
5430 case IEMMODE_16BIT:
5431 switch (pVCpu->iem.s.enmEffAddrMode)
5432 {
5433 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5434 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5435 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5437 }
5438 break;
5439
5440 case IEMMODE_32BIT:
5441 switch (pVCpu->iem.s.enmEffAddrMode)
5442 {
5443 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5444 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5445 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5447 }
5448 break;
5449
5450 case IEMMODE_64BIT:
5451 switch (pVCpu->iem.s.enmEffAddrMode)
5452 {
5453 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5454 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5455 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5457 }
5458 break;
5459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5460 }
5461 return VINF_SUCCESS;
5462}
5463
5464#undef IEM_STOS_CASE
5465
5466/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5467#define IEM_LODS_CASE(ValBits, AddrBits) \
5468 IEM_MC_BEGIN(0, 2); \
5469 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5470 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5471 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5472 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5473 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5475 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5476 } IEM_MC_ELSE() { \
5477 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5478 } IEM_MC_ENDIF(); \
5479 IEM_MC_ADVANCE_RIP(); \
5480 IEM_MC_END();
5481
5482/**
5483 * @opcode 0xac
5484 */
5485FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5486{
5487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5488
5489 /*
5490 * Use the C implementation if a repeat prefix is encountered.
5491 */
5492 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5493 {
5494 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5495 switch (pVCpu->iem.s.enmEffAddrMode)
5496 {
5497 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5498 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5499 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5501 }
5502 }
5503 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5504
5505 /*
5506 * Sharing case implementation with stos[wdq] below.
5507 */
5508 switch (pVCpu->iem.s.enmEffAddrMode)
5509 {
5510 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5511 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5512 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5514 }
5515 return VINF_SUCCESS;
5516}
5517
5518
5519/**
5520 * @opcode 0xad
5521 */
5522FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5523{
5524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5525
5526 /*
5527 * Use the C implementation if a repeat prefix is encountered.
5528 */
5529 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5530 {
5531 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5532 switch (pVCpu->iem.s.enmEffOpSize)
5533 {
5534 case IEMMODE_16BIT:
5535 switch (pVCpu->iem.s.enmEffAddrMode)
5536 {
5537 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5538 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5539 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5541 }
5542 break;
5543 case IEMMODE_32BIT:
5544 switch (pVCpu->iem.s.enmEffAddrMode)
5545 {
5546 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5547 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5548 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5550 }
5551 case IEMMODE_64BIT:
5552 switch (pVCpu->iem.s.enmEffAddrMode)
5553 {
5554 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5555 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5556 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5558 }
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 }
5562 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5563
5564 /*
5565 * Annoying double switch here.
5566 * Using ugly macro for implementing the cases, sharing it with lodsb.
5567 */
5568 switch (pVCpu->iem.s.enmEffOpSize)
5569 {
5570 case IEMMODE_16BIT:
5571 switch (pVCpu->iem.s.enmEffAddrMode)
5572 {
5573 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5574 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5575 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5577 }
5578 break;
5579
5580 case IEMMODE_32BIT:
5581 switch (pVCpu->iem.s.enmEffAddrMode)
5582 {
5583 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5584 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5585 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5587 }
5588 break;
5589
5590 case IEMMODE_64BIT:
5591 switch (pVCpu->iem.s.enmEffAddrMode)
5592 {
5593 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5594 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5595 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5597 }
5598 break;
5599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5600 }
5601 return VINF_SUCCESS;
5602}
5603
5604#undef IEM_LODS_CASE
5605
5606/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5607#define IEM_SCAS_CASE(ValBits, AddrBits) \
5608 IEM_MC_BEGIN(3, 2); \
5609 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5610 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5611 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5612 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5613 \
5614 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5615 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5616 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5617 IEM_MC_REF_EFLAGS(pEFlags); \
5618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5619 \
5620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5621 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5622 } IEM_MC_ELSE() { \
5623 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5624 } IEM_MC_ENDIF(); \
5625 IEM_MC_ADVANCE_RIP(); \
5626 IEM_MC_END();
5627
5628/**
5629 * @opcode 0xae
5630 */
5631FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5632{
5633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5634
5635 /*
5636 * Use the C implementation if a repeat prefix is encountered.
5637 */
5638 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5639 {
5640 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5641 switch (pVCpu->iem.s.enmEffAddrMode)
5642 {
5643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5647 }
5648 }
5649 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5650 {
5651 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5652 switch (pVCpu->iem.s.enmEffAddrMode)
5653 {
5654 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5655 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5656 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5658 }
5659 }
5660 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5661
5662 /*
5663 * Sharing case implementation with stos[wdq] below.
5664 */
5665 switch (pVCpu->iem.s.enmEffAddrMode)
5666 {
5667 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5668 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5669 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 return VINF_SUCCESS;
5673}
5674
5675
5676/**
5677 * @opcode 0xaf
5678 */
5679FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5680{
5681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5682
5683 /*
5684 * Use the C implementation if a repeat prefix is encountered.
5685 */
5686 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5687 {
5688 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5689 switch (pVCpu->iem.s.enmEffOpSize)
5690 {
5691 case IEMMODE_16BIT:
5692 switch (pVCpu->iem.s.enmEffAddrMode)
5693 {
5694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5698 }
5699 break;
5700 case IEMMODE_32BIT:
5701 switch (pVCpu->iem.s.enmEffAddrMode)
5702 {
5703 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5704 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5705 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5707 }
5708 case IEMMODE_64BIT:
5709 switch (pVCpu->iem.s.enmEffAddrMode)
5710 {
5711 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5712 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5713 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5715 }
5716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5717 }
5718 }
5719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5720 {
5721 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5722 switch (pVCpu->iem.s.enmEffOpSize)
5723 {
5724 case IEMMODE_16BIT:
5725 switch (pVCpu->iem.s.enmEffAddrMode)
5726 {
5727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5731 }
5732 break;
5733 case IEMMODE_32BIT:
5734 switch (pVCpu->iem.s.enmEffAddrMode)
5735 {
5736 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5737 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5738 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5740 }
5741 case IEMMODE_64BIT:
5742 switch (pVCpu->iem.s.enmEffAddrMode)
5743 {
5744 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5748 }
5749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5750 }
5751 }
5752 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5753
5754 /*
5755 * Annoying double switch here.
5756 * Using ugly macro for implementing the cases, sharing it with scasb.
5757 */
5758 switch (pVCpu->iem.s.enmEffOpSize)
5759 {
5760 case IEMMODE_16BIT:
5761 switch (pVCpu->iem.s.enmEffAddrMode)
5762 {
5763 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5764 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5765 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5767 }
5768 break;
5769
5770 case IEMMODE_32BIT:
5771 switch (pVCpu->iem.s.enmEffAddrMode)
5772 {
5773 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5774 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5775 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5777 }
5778 break;
5779
5780 case IEMMODE_64BIT:
5781 switch (pVCpu->iem.s.enmEffAddrMode)
5782 {
5783 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5784 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5785 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5787 }
5788 break;
5789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5790 }
5791 return VINF_SUCCESS;
5792}
5793
5794#undef IEM_SCAS_CASE
5795
5796/**
5797 * Common 'mov r8, imm8' helper.
5798 */
5799FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5800{
5801 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803
5804 IEM_MC_BEGIN(0, 1);
5805 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5806 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809
5810 return VINF_SUCCESS;
5811}
5812
5813
5814/**
5815 * @opcode 0xb0
5816 */
5817FNIEMOP_DEF(iemOp_mov_AL_Ib)
5818{
5819 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5820 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5821}
5822
5823
5824/**
5825 * @opcode 0xb1
5826 */
5827FNIEMOP_DEF(iemOp_CL_Ib)
5828{
5829 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5830 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5831}
5832
5833
5834/**
5835 * @opcode 0xb2
5836 */
5837FNIEMOP_DEF(iemOp_DL_Ib)
5838{
5839 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5840 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5841}
5842
5843
5844/**
5845 * @opcode 0xb3
5846 */
5847FNIEMOP_DEF(iemOp_BL_Ib)
5848{
5849 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5850 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5851}
5852
5853
5854/**
5855 * @opcode 0xb4
5856 */
5857FNIEMOP_DEF(iemOp_mov_AH_Ib)
5858{
5859 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5860 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5861}
5862
5863
5864/**
5865 * @opcode 0xb5
5866 */
5867FNIEMOP_DEF(iemOp_CH_Ib)
5868{
5869 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5870 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5871}
5872
5873
5874/**
5875 * @opcode 0xb6
5876 */
5877FNIEMOP_DEF(iemOp_DH_Ib)
5878{
5879 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5880 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5881}
5882
5883
5884/**
5885 * @opcode 0xb7
5886 */
5887FNIEMOP_DEF(iemOp_BH_Ib)
5888{
5889 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5890 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5891}
5892
5893
5894/**
5895 * Common 'mov regX,immX' helper.
5896 */
5897FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5898{
5899 switch (pVCpu->iem.s.enmEffOpSize)
5900 {
5901 case IEMMODE_16BIT:
5902 {
5903 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5905
5906 IEM_MC_BEGIN(0, 1);
5907 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5908 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5909 IEM_MC_ADVANCE_RIP();
5910 IEM_MC_END();
5911 break;
5912 }
5913
5914 case IEMMODE_32BIT:
5915 {
5916 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5918
5919 IEM_MC_BEGIN(0, 1);
5920 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5921 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5922 IEM_MC_ADVANCE_RIP();
5923 IEM_MC_END();
5924 break;
5925 }
5926 case IEMMODE_64BIT:
5927 {
5928 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5930
5931 IEM_MC_BEGIN(0, 1);
5932 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5933 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 break;
5937 }
5938 }
5939
5940 return VINF_SUCCESS;
5941}
5942
5943
5944/**
5945 * @opcode 0xb8
5946 */
5947FNIEMOP_DEF(iemOp_eAX_Iv)
5948{
5949 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5950 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5951}
5952
5953
5954/**
5955 * @opcode 0xb9
5956 */
5957FNIEMOP_DEF(iemOp_eCX_Iv)
5958{
5959 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5960 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5961}
5962
5963
5964/**
5965 * @opcode 0xba
5966 */
5967FNIEMOP_DEF(iemOp_eDX_Iv)
5968{
5969 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5970 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5971}
5972
5973
5974/**
5975 * @opcode 0xbb
5976 */
5977FNIEMOP_DEF(iemOp_eBX_Iv)
5978{
5979 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5980 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5981}
5982
5983
5984/**
5985 * @opcode 0xbc
5986 */
5987FNIEMOP_DEF(iemOp_eSP_Iv)
5988{
5989 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5990 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5991}
5992
5993
5994/**
5995 * @opcode 0xbd
5996 */
5997FNIEMOP_DEF(iemOp_eBP_Iv)
5998{
5999 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6000 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6001}
6002
6003
6004/**
6005 * @opcode 0xbe
6006 */
6007FNIEMOP_DEF(iemOp_eSI_Iv)
6008{
6009 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6010 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6011}
6012
6013
6014/**
6015 * @opcode 0xbf
6016 */
6017FNIEMOP_DEF(iemOp_eDI_Iv)
6018{
6019 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6020 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6021}
6022
6023
6024/**
6025 * @opcode 0xc0
6026 */
6027FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6028{
6029 IEMOP_HLP_MIN_186();
6030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6031 PCIEMOPSHIFTSIZES pImpl;
6032 switch (IEM_GET_MODRM_REG_8(bRm))
6033 {
6034 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6035 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6036 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6037 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6038 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6039 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6040 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6041 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6042 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6043 }
6044 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6045
6046 if (IEM_IS_MODRM_REG_MODE(bRm))
6047 {
6048 /* register */
6049 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6051 IEM_MC_BEGIN(3, 0);
6052 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6053 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6054 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6055 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6056 IEM_MC_REF_EFLAGS(pEFlags);
6057 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6058 IEM_MC_ADVANCE_RIP();
6059 IEM_MC_END();
6060 }
6061 else
6062 {
6063 /* memory */
6064 IEM_MC_BEGIN(3, 2);
6065 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6066 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6067 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6069
6070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6071 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6072 IEM_MC_ASSIGN(cShiftArg, cShift);
6073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6074 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6075 IEM_MC_FETCH_EFLAGS(EFlags);
6076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6077
6078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6079 IEM_MC_COMMIT_EFLAGS(EFlags);
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 }
6083 return VINF_SUCCESS;
6084}
6085
6086
6087/**
6088 * @opcode 0xc1
6089 */
6090FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6091{
6092 IEMOP_HLP_MIN_186();
6093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6094 PCIEMOPSHIFTSIZES pImpl;
6095 switch (IEM_GET_MODRM_REG_8(bRm))
6096 {
6097 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6098 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6099 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6100 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6101 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6102 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6103 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6104 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6105 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6106 }
6107 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6108
6109 if (IEM_IS_MODRM_REG_MODE(bRm))
6110 {
6111 /* register */
6112 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6114 switch (pVCpu->iem.s.enmEffOpSize)
6115 {
6116 case IEMMODE_16BIT:
6117 IEM_MC_BEGIN(3, 0);
6118 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6119 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6120 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6121 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6122 IEM_MC_REF_EFLAGS(pEFlags);
6123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6124 IEM_MC_ADVANCE_RIP();
6125 IEM_MC_END();
6126 return VINF_SUCCESS;
6127
6128 case IEMMODE_32BIT:
6129 IEM_MC_BEGIN(3, 0);
6130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6131 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6133 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6134 IEM_MC_REF_EFLAGS(pEFlags);
6135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6137 IEM_MC_ADVANCE_RIP();
6138 IEM_MC_END();
6139 return VINF_SUCCESS;
6140
6141 case IEMMODE_64BIT:
6142 IEM_MC_BEGIN(3, 0);
6143 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6144 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6146 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6147 IEM_MC_REF_EFLAGS(pEFlags);
6148 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6149 IEM_MC_ADVANCE_RIP();
6150 IEM_MC_END();
6151 return VINF_SUCCESS;
6152
6153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6154 }
6155 }
6156 else
6157 {
6158 /* memory */
6159 switch (pVCpu->iem.s.enmEffOpSize)
6160 {
6161 case IEMMODE_16BIT:
6162 IEM_MC_BEGIN(3, 2);
6163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6164 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6167
6168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6169 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6170 IEM_MC_ASSIGN(cShiftArg, cShift);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6173 IEM_MC_FETCH_EFLAGS(EFlags);
6174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6175
6176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6177 IEM_MC_COMMIT_EFLAGS(EFlags);
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 return VINF_SUCCESS;
6181
6182 case IEMMODE_32BIT:
6183 IEM_MC_BEGIN(3, 2);
6184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6185 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6186 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6188
6189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6190 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6191 IEM_MC_ASSIGN(cShiftArg, cShift);
6192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6193 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6194 IEM_MC_FETCH_EFLAGS(EFlags);
6195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6196
6197 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6198 IEM_MC_COMMIT_EFLAGS(EFlags);
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 case IEMMODE_64BIT:
6204 IEM_MC_BEGIN(3, 2);
6205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6206 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6207 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6209
6210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6211 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6212 IEM_MC_ASSIGN(cShiftArg, cShift);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6215 IEM_MC_FETCH_EFLAGS(EFlags);
6216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6217
6218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6219 IEM_MC_COMMIT_EFLAGS(EFlags);
6220 IEM_MC_ADVANCE_RIP();
6221 IEM_MC_END();
6222 return VINF_SUCCESS;
6223
6224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6225 }
6226 }
6227}
6228
6229
6230/**
6231 * @opcode 0xc2
6232 */
6233FNIEMOP_DEF(iemOp_retn_Iw)
6234{
6235 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6236 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6239 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6240}
6241
6242
6243/**
6244 * @opcode 0xc3
6245 */
6246FNIEMOP_DEF(iemOp_retn)
6247{
6248 IEMOP_MNEMONIC(retn, "retn");
6249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6252}
6253
6254
6255/**
6256 * @opcode 0xc4
6257 */
6258FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6259{
6260 /* The LDS instruction is invalid 64-bit mode. In legacy and
6261 compatability mode it is invalid with MOD=3.
6262 The use as a VEX prefix is made possible by assigning the inverted
6263 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6264 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6266 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6267 || IEM_IS_MODRM_REG_MODE(bRm) )
6268 {
6269 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6270 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6271 {
6272 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6273 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6274 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6275 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6276 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6277 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6278 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6279 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6280 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6281 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6282 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6283 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6284 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6285
6286 switch (bRm & 0x1f)
6287 {
6288 case 1: /* 0x0f lead opcode byte. */
6289#ifdef IEM_WITH_VEX
6290 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6291#else
6292 IEMOP_BITCH_ABOUT_STUB();
6293 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6294#endif
6295
6296 case 2: /* 0x0f 0x38 lead opcode bytes. */
6297#ifdef IEM_WITH_VEX
6298 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6299#else
6300 IEMOP_BITCH_ABOUT_STUB();
6301 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6302#endif
6303
6304 case 3: /* 0x0f 0x3a lead opcode bytes. */
6305#ifdef IEM_WITH_VEX
6306 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6307#else
6308 IEMOP_BITCH_ABOUT_STUB();
6309 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6310#endif
6311
6312 default:
6313 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6314 return IEMOP_RAISE_INVALID_OPCODE();
6315 }
6316 }
6317 Log(("VEX3: AVX support disabled!\n"));
6318 return IEMOP_RAISE_INVALID_OPCODE();
6319 }
6320
6321 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6322 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6323}
6324
6325
6326/**
6327 * @opcode 0xc5
6328 */
6329FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6330{
6331 /* The LES instruction is invalid 64-bit mode. In legacy and
6332 compatability mode it is invalid with MOD=3.
6333 The use as a VEX prefix is made possible by assigning the inverted
6334 REX.R to the top MOD bit, and the top bit in the inverted register
6335 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6336 to accessing registers 0..7 in this VEX form. */
6337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6338 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6339 || IEM_IS_MODRM_REG_MODE(bRm))
6340 {
6341 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6342 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6343 {
6344 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6345 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6346 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6347 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6348 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6349 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6350 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6351 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6352
6353#ifdef IEM_WITH_VEX
6354 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6355#else
6356 IEMOP_BITCH_ABOUT_STUB();
6357 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6358#endif
6359 }
6360
6361 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6362 Log(("VEX2: AVX support disabled!\n"));
6363 return IEMOP_RAISE_INVALID_OPCODE();
6364 }
6365
6366 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6367 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6368}
6369
6370
6371/**
6372 * @opcode 0xc6
6373 */
6374FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6375{
6376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6377 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6378 return IEMOP_RAISE_INVALID_OPCODE();
6379 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6380
6381 if (IEM_IS_MODRM_REG_MODE(bRm))
6382 {
6383 /* register access */
6384 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6386 IEM_MC_BEGIN(0, 0);
6387 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6388 IEM_MC_ADVANCE_RIP();
6389 IEM_MC_END();
6390 }
6391 else
6392 {
6393 /* memory access. */
6394 IEM_MC_BEGIN(0, 1);
6395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6397 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6399 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6400 IEM_MC_ADVANCE_RIP();
6401 IEM_MC_END();
6402 }
6403 return VINF_SUCCESS;
6404}
6405
6406
6407/**
6408 * @opcode 0xc7
6409 */
6410FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6411{
6412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6413 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6414 return IEMOP_RAISE_INVALID_OPCODE();
6415 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6416
6417 if (IEM_IS_MODRM_REG_MODE(bRm))
6418 {
6419 /* register access */
6420 switch (pVCpu->iem.s.enmEffOpSize)
6421 {
6422 case IEMMODE_16BIT:
6423 IEM_MC_BEGIN(0, 0);
6424 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 case IEMMODE_32BIT:
6432 IEM_MC_BEGIN(0, 0);
6433 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6435 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6436 IEM_MC_ADVANCE_RIP();
6437 IEM_MC_END();
6438 return VINF_SUCCESS;
6439
6440 case IEMMODE_64BIT:
6441 IEM_MC_BEGIN(0, 0);
6442 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6444 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 return VINF_SUCCESS;
6448
6449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6450 }
6451 }
6452 else
6453 {
6454 /* memory access. */
6455 switch (pVCpu->iem.s.enmEffOpSize)
6456 {
6457 case IEMMODE_16BIT:
6458 IEM_MC_BEGIN(0, 1);
6459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6461 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6463 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6464 IEM_MC_ADVANCE_RIP();
6465 IEM_MC_END();
6466 return VINF_SUCCESS;
6467
6468 case IEMMODE_32BIT:
6469 IEM_MC_BEGIN(0, 1);
6470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6472 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6474 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6475 IEM_MC_ADVANCE_RIP();
6476 IEM_MC_END();
6477 return VINF_SUCCESS;
6478
6479 case IEMMODE_64BIT:
6480 IEM_MC_BEGIN(0, 1);
6481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6483 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6486 IEM_MC_ADVANCE_RIP();
6487 IEM_MC_END();
6488 return VINF_SUCCESS;
6489
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6491 }
6492 }
6493}
6494
6495
6496
6497
6498/**
6499 * @opcode 0xc8
6500 */
6501FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6502{
6503 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6504 IEMOP_HLP_MIN_186();
6505 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6506 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6507 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6509 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6510}
6511
6512
6513/**
6514 * @opcode 0xc9
6515 */
6516FNIEMOP_DEF(iemOp_leave)
6517{
6518 IEMOP_MNEMONIC(leave, "leave");
6519 IEMOP_HLP_MIN_186();
6520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6522 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6523}
6524
6525
6526/**
6527 * @opcode 0xca
6528 */
6529FNIEMOP_DEF(iemOp_retf_Iw)
6530{
6531 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6532 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6535 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6536}
6537
6538
6539/**
6540 * @opcode 0xcb
6541 */
6542FNIEMOP_DEF(iemOp_retf)
6543{
6544 IEMOP_MNEMONIC(retf, "retf");
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6547 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6548}
6549
6550
6551/**
6552 * @opcode 0xcc
6553 */
6554FNIEMOP_DEF(iemOp_int3)
6555{
6556 IEMOP_MNEMONIC(int3, "int3");
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6559}
6560
6561
6562/**
6563 * @opcode 0xcd
6564 */
6565FNIEMOP_DEF(iemOp_int_Ib)
6566{
6567 IEMOP_MNEMONIC(int_Ib, "int Ib");
6568 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6570 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6571}
6572
6573
6574/**
6575 * @opcode 0xce
6576 */
6577FNIEMOP_DEF(iemOp_into)
6578{
6579 IEMOP_MNEMONIC(into, "into");
6580 IEMOP_HLP_NO_64BIT();
6581
6582 IEM_MC_BEGIN(2, 0);
6583 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6584 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6585 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588}
6589
6590
6591/**
6592 * @opcode 0xcf
6593 */
6594FNIEMOP_DEF(iemOp_iret)
6595{
6596 IEMOP_MNEMONIC(iret, "iret");
6597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6598 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6599}
6600
6601
6602/**
6603 * @opcode 0xd0
6604 */
6605FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6606{
6607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6608 PCIEMOPSHIFTSIZES pImpl;
6609 switch (IEM_GET_MODRM_REG_8(bRm))
6610 {
6611 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6612 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6613 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6614 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6615 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6616 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6617 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6618 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6619 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6620 }
6621 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6622
6623 if (IEM_IS_MODRM_REG_MODE(bRm))
6624 {
6625 /* register */
6626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6627 IEM_MC_BEGIN(3, 0);
6628 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6629 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6631 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6632 IEM_MC_REF_EFLAGS(pEFlags);
6633 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6634 IEM_MC_ADVANCE_RIP();
6635 IEM_MC_END();
6636 }
6637 else
6638 {
6639 /* memory */
6640 IEM_MC_BEGIN(3, 2);
6641 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6642 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6643 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6645
6646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6648 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6649 IEM_MC_FETCH_EFLAGS(EFlags);
6650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6651
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_ADVANCE_RIP();
6655 IEM_MC_END();
6656 }
6657 return VINF_SUCCESS;
6658}
6659
6660
6661
6662/**
6663 * @opcode 0xd1
6664 */
6665FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6666{
6667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6668 PCIEMOPSHIFTSIZES pImpl;
6669 switch (IEM_GET_MODRM_REG_8(bRm))
6670 {
6671 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6672 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6673 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6674 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6675 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6676 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6677 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6678 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6680 }
6681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6682
6683 if (IEM_IS_MODRM_REG_MODE(bRm))
6684 {
6685 /* register */
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 switch (pVCpu->iem.s.enmEffOpSize)
6688 {
6689 case IEMMODE_16BIT:
6690 IEM_MC_BEGIN(3, 0);
6691 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6692 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6693 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6694 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6695 IEM_MC_REF_EFLAGS(pEFlags);
6696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 return VINF_SUCCESS;
6700
6701 case IEMMODE_32BIT:
6702 IEM_MC_BEGIN(3, 0);
6703 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6704 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6705 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6706 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6707 IEM_MC_REF_EFLAGS(pEFlags);
6708 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6709 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712 return VINF_SUCCESS;
6713
6714 case IEMMODE_64BIT:
6715 IEM_MC_BEGIN(3, 0);
6716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6717 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6719 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6720 IEM_MC_REF_EFLAGS(pEFlags);
6721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6722 IEM_MC_ADVANCE_RIP();
6723 IEM_MC_END();
6724 return VINF_SUCCESS;
6725
6726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6727 }
6728 }
6729 else
6730 {
6731 /* memory */
6732 switch (pVCpu->iem.s.enmEffOpSize)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_BEGIN(3, 2);
6736 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6737 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6738 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6740
6741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6743 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6744 IEM_MC_FETCH_EFLAGS(EFlags);
6745 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6746
6747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6748 IEM_MC_COMMIT_EFLAGS(EFlags);
6749 IEM_MC_ADVANCE_RIP();
6750 IEM_MC_END();
6751 return VINF_SUCCESS;
6752
6753 case IEMMODE_32BIT:
6754 IEM_MC_BEGIN(3, 2);
6755 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6756 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6757 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6759
6760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6762 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6763 IEM_MC_FETCH_EFLAGS(EFlags);
6764 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6765
6766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6767 IEM_MC_COMMIT_EFLAGS(EFlags);
6768 IEM_MC_ADVANCE_RIP();
6769 IEM_MC_END();
6770 return VINF_SUCCESS;
6771
6772 case IEMMODE_64BIT:
6773 IEM_MC_BEGIN(3, 2);
6774 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6775 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6776 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6778
6779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6781 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6782 IEM_MC_FETCH_EFLAGS(EFlags);
6783 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6784
6785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6786 IEM_MC_COMMIT_EFLAGS(EFlags);
6787 IEM_MC_ADVANCE_RIP();
6788 IEM_MC_END();
6789 return VINF_SUCCESS;
6790
6791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6792 }
6793 }
6794}
6795
6796
6797/**
6798 * @opcode 0xd2
6799 */
6800FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6801{
6802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6803 PCIEMOPSHIFTSIZES pImpl;
6804 switch (IEM_GET_MODRM_REG_8(bRm))
6805 {
6806 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6807 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6808 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6809 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6810 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6811 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6812 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6813 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6814 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6815 }
6816 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6817
6818 if (IEM_IS_MODRM_REG_MODE(bRm))
6819 {
6820 /* register */
6821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6822 IEM_MC_BEGIN(3, 0);
6823 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6824 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6825 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6826 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6827 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6828 IEM_MC_REF_EFLAGS(pEFlags);
6829 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6830 IEM_MC_ADVANCE_RIP();
6831 IEM_MC_END();
6832 }
6833 else
6834 {
6835 /* memory */
6836 IEM_MC_BEGIN(3, 2);
6837 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6838 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6839 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6841
6842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6845 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6846 IEM_MC_FETCH_EFLAGS(EFlags);
6847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6848
6849 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6850 IEM_MC_COMMIT_EFLAGS(EFlags);
6851 IEM_MC_ADVANCE_RIP();
6852 IEM_MC_END();
6853 }
6854 return VINF_SUCCESS;
6855}
6856
6857
6858/**
6859 * @opcode 0xd3
6860 */
6861FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6862{
6863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6864 PCIEMOPSHIFTSIZES pImpl;
6865 switch (IEM_GET_MODRM_REG_8(bRm))
6866 {
6867 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6868 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6869 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6870 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6871 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6872 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6873 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6874 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6876 }
6877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6878
6879 if (IEM_IS_MODRM_REG_MODE(bRm))
6880 {
6881 /* register */
6882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6883 switch (pVCpu->iem.s.enmEffOpSize)
6884 {
6885 case IEMMODE_16BIT:
6886 IEM_MC_BEGIN(3, 0);
6887 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6888 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6889 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6890 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6891 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6892 IEM_MC_REF_EFLAGS(pEFlags);
6893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6894 IEM_MC_ADVANCE_RIP();
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897
6898 case IEMMODE_32BIT:
6899 IEM_MC_BEGIN(3, 0);
6900 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6901 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6903 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6904 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6905 IEM_MC_REF_EFLAGS(pEFlags);
6906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6907 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6908 IEM_MC_ADVANCE_RIP();
6909 IEM_MC_END();
6910 return VINF_SUCCESS;
6911
6912 case IEMMODE_64BIT:
6913 IEM_MC_BEGIN(3, 0);
6914 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6915 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6916 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6917 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6918 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6919 IEM_MC_REF_EFLAGS(pEFlags);
6920 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6921 IEM_MC_ADVANCE_RIP();
6922 IEM_MC_END();
6923 return VINF_SUCCESS;
6924
6925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6926 }
6927 }
6928 else
6929 {
6930 /* memory */
6931 switch (pVCpu->iem.s.enmEffOpSize)
6932 {
6933 case IEMMODE_16BIT:
6934 IEM_MC_BEGIN(3, 2);
6935 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6936 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6937 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6939
6940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6943 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6944 IEM_MC_FETCH_EFLAGS(EFlags);
6945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6946
6947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6948 IEM_MC_COMMIT_EFLAGS(EFlags);
6949 IEM_MC_ADVANCE_RIP();
6950 IEM_MC_END();
6951 return VINF_SUCCESS;
6952
6953 case IEMMODE_32BIT:
6954 IEM_MC_BEGIN(3, 2);
6955 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6956 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6957 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6959
6960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6962 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6963 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6964 IEM_MC_FETCH_EFLAGS(EFlags);
6965 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6966
6967 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6968 IEM_MC_COMMIT_EFLAGS(EFlags);
6969 IEM_MC_ADVANCE_RIP();
6970 IEM_MC_END();
6971 return VINF_SUCCESS;
6972
6973 case IEMMODE_64BIT:
6974 IEM_MC_BEGIN(3, 2);
6975 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6976 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6977 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6979
6980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6982 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6983 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6984 IEM_MC_FETCH_EFLAGS(EFlags);
6985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6986
6987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6988 IEM_MC_COMMIT_EFLAGS(EFlags);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 return VINF_SUCCESS;
6992
6993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6994 }
6995 }
6996}
6997
6998/**
6999 * @opcode 0xd4
7000 */
7001FNIEMOP_DEF(iemOp_aam_Ib)
7002{
7003 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7004 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 IEMOP_HLP_NO_64BIT();
7007 if (!bImm)
7008 return IEMOP_RAISE_DIVIDE_ERROR();
7009 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
7010}
7011
7012
7013/**
7014 * @opcode 0xd5
7015 */
7016FNIEMOP_DEF(iemOp_aad_Ib)
7017{
7018 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7019 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 IEMOP_HLP_NO_64BIT();
7022 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7023}
7024
7025
7026/**
7027 * @opcode 0xd6
7028 */
7029FNIEMOP_DEF(iemOp_salc)
7030{
7031 IEMOP_MNEMONIC(salc, "salc");
7032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7033 IEMOP_HLP_NO_64BIT();
7034
7035 IEM_MC_BEGIN(0, 0);
7036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7037 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7038 } IEM_MC_ELSE() {
7039 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7040 } IEM_MC_ENDIF();
7041 IEM_MC_ADVANCE_RIP();
7042 IEM_MC_END();
7043 return VINF_SUCCESS;
7044}
7045
7046
7047/**
7048 * @opcode 0xd7
7049 */
7050FNIEMOP_DEF(iemOp_xlat)
7051{
7052 IEMOP_MNEMONIC(xlat, "xlat");
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054 switch (pVCpu->iem.s.enmEffAddrMode)
7055 {
7056 case IEMMODE_16BIT:
7057 IEM_MC_BEGIN(2, 0);
7058 IEM_MC_LOCAL(uint8_t, u8Tmp);
7059 IEM_MC_LOCAL(uint16_t, u16Addr);
7060 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7061 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7062 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7063 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7064 IEM_MC_ADVANCE_RIP();
7065 IEM_MC_END();
7066 return VINF_SUCCESS;
7067
7068 case IEMMODE_32BIT:
7069 IEM_MC_BEGIN(2, 0);
7070 IEM_MC_LOCAL(uint8_t, u8Tmp);
7071 IEM_MC_LOCAL(uint32_t, u32Addr);
7072 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7073 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7074 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7075 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7076 IEM_MC_ADVANCE_RIP();
7077 IEM_MC_END();
7078 return VINF_SUCCESS;
7079
7080 case IEMMODE_64BIT:
7081 IEM_MC_BEGIN(2, 0);
7082 IEM_MC_LOCAL(uint8_t, u8Tmp);
7083 IEM_MC_LOCAL(uint64_t, u64Addr);
7084 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7085 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7086 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7087 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7088 IEM_MC_ADVANCE_RIP();
7089 IEM_MC_END();
7090 return VINF_SUCCESS;
7091
7092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7093 }
7094}
7095
7096
7097/**
7098 * Common worker for FPU instructions working on ST0 and STn, and storing the
7099 * result in ST0.
7100 *
7101 * @param bRm Mod R/M byte.
7102 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7103 */
7104FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7105{
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107
7108 IEM_MC_BEGIN(3, 1);
7109 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7110 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7111 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7113
7114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7116 IEM_MC_PREPARE_FPU_USAGE();
7117 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7118 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7119 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7120 IEM_MC_ELSE()
7121 IEM_MC_FPU_STACK_UNDERFLOW(0);
7122 IEM_MC_ENDIF();
7123 IEM_MC_ADVANCE_RIP();
7124
7125 IEM_MC_END();
7126 return VINF_SUCCESS;
7127}
7128
7129
7130/**
7131 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7132 * flags.
7133 *
7134 * @param bRm Mod R/M byte.
7135 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7136 */
7137FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7138{
7139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7140
7141 IEM_MC_BEGIN(3, 1);
7142 IEM_MC_LOCAL(uint16_t, u16Fsw);
7143 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7144 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7146
7147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7149 IEM_MC_PREPARE_FPU_USAGE();
7150 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7151 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7152 IEM_MC_UPDATE_FSW(u16Fsw);
7153 IEM_MC_ELSE()
7154 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7155 IEM_MC_ENDIF();
7156 IEM_MC_ADVANCE_RIP();
7157
7158 IEM_MC_END();
7159 return VINF_SUCCESS;
7160}
7161
7162
7163/**
7164 * Common worker for FPU instructions working on ST0 and STn, only affecting
7165 * flags, and popping when done.
7166 *
7167 * @param bRm Mod R/M byte.
7168 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7169 */
7170FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7171{
7172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7173
7174 IEM_MC_BEGIN(3, 1);
7175 IEM_MC_LOCAL(uint16_t, u16Fsw);
7176 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7177 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7178 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7179
7180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7182 IEM_MC_PREPARE_FPU_USAGE();
7183 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7184 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7185 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7186 IEM_MC_ELSE()
7187 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7188 IEM_MC_ENDIF();
7189 IEM_MC_ADVANCE_RIP();
7190
7191 IEM_MC_END();
7192 return VINF_SUCCESS;
7193}
7194
7195
7196/** Opcode 0xd8 11/0. */
7197FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7198{
7199 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7201}
7202
7203
7204/** Opcode 0xd8 11/1. */
7205FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7206{
7207 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7208 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7209}
7210
7211
7212/** Opcode 0xd8 11/2. */
7213FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7214{
7215 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7216 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7217}
7218
7219
7220/** Opcode 0xd8 11/3. */
7221FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7222{
7223 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7224 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7225}
7226
7227
7228/** Opcode 0xd8 11/4. */
7229FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7230{
7231 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7232 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7233}
7234
7235
7236/** Opcode 0xd8 11/5. */
7237FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7238{
7239 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7240 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7241}
7242
7243
7244/** Opcode 0xd8 11/6. */
7245FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7246{
7247 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7248 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7249}
7250
7251
7252/** Opcode 0xd8 11/7. */
7253FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7254{
7255 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7256 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7257}
7258
7259
7260/**
7261 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7262 * the result in ST0.
7263 *
7264 * @param bRm Mod R/M byte.
7265 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7266 */
7267FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7268{
7269 IEM_MC_BEGIN(3, 3);
7270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7271 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7272 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7273 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7275 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7276
7277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7279
7280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7282 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7283
7284 IEM_MC_PREPARE_FPU_USAGE();
7285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7286 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7287 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7288 IEM_MC_ELSE()
7289 IEM_MC_FPU_STACK_UNDERFLOW(0);
7290 IEM_MC_ENDIF();
7291 IEM_MC_ADVANCE_RIP();
7292
7293 IEM_MC_END();
7294 return VINF_SUCCESS;
7295}
7296
7297
7298/** Opcode 0xd8 !11/0. */
7299FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7300{
7301 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7302 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7303}
7304
7305
7306/** Opcode 0xd8 !11/1. */
7307FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7308{
7309 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7310 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7311}
7312
7313
7314/** Opcode 0xd8 !11/2. */
7315FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7316{
7317 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7318
7319 IEM_MC_BEGIN(3, 3);
7320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7321 IEM_MC_LOCAL(uint16_t, u16Fsw);
7322 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7323 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7325 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7326
7327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7329
7330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7332 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7333
7334 IEM_MC_PREPARE_FPU_USAGE();
7335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7336 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7337 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7338 IEM_MC_ELSE()
7339 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7340 IEM_MC_ENDIF();
7341 IEM_MC_ADVANCE_RIP();
7342
7343 IEM_MC_END();
7344 return VINF_SUCCESS;
7345}
7346
7347
7348/** Opcode 0xd8 !11/3. */
7349FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7350{
7351 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7352
7353 IEM_MC_BEGIN(3, 3);
7354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7355 IEM_MC_LOCAL(uint16_t, u16Fsw);
7356 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7359 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7360
7361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7363
7364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7366 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7367
7368 IEM_MC_PREPARE_FPU_USAGE();
7369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7371 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7372 IEM_MC_ELSE()
7373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7374 IEM_MC_ENDIF();
7375 IEM_MC_ADVANCE_RIP();
7376
7377 IEM_MC_END();
7378 return VINF_SUCCESS;
7379}
7380
7381
7382/** Opcode 0xd8 !11/4. */
7383FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7384{
7385 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7386 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7387}
7388
7389
7390/** Opcode 0xd8 !11/5. */
7391FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7392{
7393 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7394 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7395}
7396
7397
7398/** Opcode 0xd8 !11/6. */
7399FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7400{
7401 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7402 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7403}
7404
7405
7406/** Opcode 0xd8 !11/7. */
7407FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7408{
7409 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7410 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7411}
7412
7413
7414/**
7415 * @opcode 0xd8
7416 */
7417FNIEMOP_DEF(iemOp_EscF0)
7418{
7419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7420 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7421
7422 if (IEM_IS_MODRM_REG_MODE(bRm))
7423 {
7424 switch (IEM_GET_MODRM_REG_8(bRm))
7425 {
7426 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7427 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7428 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7429 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7430 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7431 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7432 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7433 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7435 }
7436 }
7437 else
7438 {
7439 switch (IEM_GET_MODRM_REG_8(bRm))
7440 {
7441 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7442 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7443 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7444 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7445 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7446 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7447 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7448 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7450 }
7451 }
7452}
7453
7454
7455/** Opcode 0xd9 /0 mem32real
7456 * @sa iemOp_fld_m64r */
7457FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7458{
7459 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7460
7461 IEM_MC_BEGIN(2, 3);
7462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7463 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7464 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7465 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7466 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7467
7468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7470
7471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7473 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7474
7475 IEM_MC_PREPARE_FPU_USAGE();
7476 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7477 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7478 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7479 IEM_MC_ELSE()
7480 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7481 IEM_MC_ENDIF();
7482 IEM_MC_ADVANCE_RIP();
7483
7484 IEM_MC_END();
7485 return VINF_SUCCESS;
7486}
7487
7488
7489/** Opcode 0xd9 !11/2 mem32real */
7490FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7491{
7492 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7493 IEM_MC_BEGIN(3, 2);
7494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7495 IEM_MC_LOCAL(uint16_t, u16Fsw);
7496 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7497 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7499
7500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7504
7505 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7506 IEM_MC_PREPARE_FPU_USAGE();
7507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7508 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7509 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7510 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7511 IEM_MC_ELSE()
7512 IEM_MC_IF_FCW_IM()
7513 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7514 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7515 IEM_MC_ENDIF();
7516 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7517 IEM_MC_ENDIF();
7518 IEM_MC_ADVANCE_RIP();
7519
7520 IEM_MC_END();
7521 return VINF_SUCCESS;
7522}
7523
7524
7525/** Opcode 0xd9 !11/3 */
7526FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7527{
7528 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7529 IEM_MC_BEGIN(3, 2);
7530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7531 IEM_MC_LOCAL(uint16_t, u16Fsw);
7532 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7533 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7534 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7535
7536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7540
7541 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7542 IEM_MC_PREPARE_FPU_USAGE();
7543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7544 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7545 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7546 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7547 IEM_MC_ELSE()
7548 IEM_MC_IF_FCW_IM()
7549 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7550 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7551 IEM_MC_ENDIF();
7552 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7553 IEM_MC_ENDIF();
7554 IEM_MC_ADVANCE_RIP();
7555
7556 IEM_MC_END();
7557 return VINF_SUCCESS;
7558}
7559
7560
7561/** Opcode 0xd9 !11/4 */
7562FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7563{
7564 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7565 IEM_MC_BEGIN(3, 0);
7566 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7567 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7568 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7572 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7573 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7574 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7575 IEM_MC_END();
7576 return VINF_SUCCESS;
7577}
7578
7579
7580/** Opcode 0xd9 !11/5 */
7581FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7582{
7583 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7584 IEM_MC_BEGIN(1, 1);
7585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7586 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7590 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7591 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7592 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7593 IEM_MC_END();
7594 return VINF_SUCCESS;
7595}
7596
7597
7598/** Opcode 0xd9 !11/6 */
7599FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7600{
7601 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7602 IEM_MC_BEGIN(3, 0);
7603 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7604 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7605 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7609 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7610 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7611 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614}
7615
7616
7617/** Opcode 0xd9 !11/7 */
7618FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7619{
7620 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7621 IEM_MC_BEGIN(2, 0);
7622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7623 IEM_MC_LOCAL(uint16_t, u16Fcw);
7624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7627 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7628 IEM_MC_FETCH_FCW(u16Fcw);
7629 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7630 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7631 IEM_MC_END();
7632 return VINF_SUCCESS;
7633}
7634
7635
7636/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7637FNIEMOP_DEF(iemOp_fnop)
7638{
7639 IEMOP_MNEMONIC(fnop, "fnop");
7640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7641
7642 IEM_MC_BEGIN(0, 0);
7643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7644 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7646 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7647 * intel optimizations. Investigate. */
7648 IEM_MC_UPDATE_FPU_OPCODE_IP();
7649 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7650 IEM_MC_END();
7651 return VINF_SUCCESS;
7652}
7653
7654
7655/** Opcode 0xd9 11/0 stN */
7656FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7657{
7658 IEMOP_MNEMONIC(fld_stN, "fld stN");
7659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7660
7661 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7662 * indicates that it does. */
7663 IEM_MC_BEGIN(0, 2);
7664 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7665 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7668
7669 IEM_MC_PREPARE_FPU_USAGE();
7670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm))
7671 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7672 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7673 IEM_MC_ELSE()
7674 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7675 IEM_MC_ENDIF();
7676
7677 IEM_MC_ADVANCE_RIP();
7678 IEM_MC_END();
7679
7680 return VINF_SUCCESS;
7681}
7682
7683
7684/** Opcode 0xd9 11/3 stN */
7685FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7686{
7687 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689
7690 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7691 * indicates that it does. */
7692 IEM_MC_BEGIN(1, 3);
7693 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7694 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7695 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7696 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7699
7700 IEM_MC_PREPARE_FPU_USAGE();
7701 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7702 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7703 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7704 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7705 IEM_MC_ELSE()
7706 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7707 IEM_MC_ENDIF();
7708
7709 IEM_MC_ADVANCE_RIP();
7710 IEM_MC_END();
7711
7712 return VINF_SUCCESS;
7713}
7714
7715
7716/** Opcode 0xd9 11/4, 0xdd 11/2. */
7717FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7718{
7719 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7721
7722 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7723 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7724 if (!iDstReg)
7725 {
7726 IEM_MC_BEGIN(0, 1);
7727 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7730
7731 IEM_MC_PREPARE_FPU_USAGE();
7732 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7733 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7734 IEM_MC_ELSE()
7735 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7736 IEM_MC_ENDIF();
7737
7738 IEM_MC_ADVANCE_RIP();
7739 IEM_MC_END();
7740 }
7741 else
7742 {
7743 IEM_MC_BEGIN(0, 2);
7744 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7745 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7748
7749 IEM_MC_PREPARE_FPU_USAGE();
7750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7751 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7752 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7753 IEM_MC_ELSE()
7754 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7755 IEM_MC_ENDIF();
7756
7757 IEM_MC_ADVANCE_RIP();
7758 IEM_MC_END();
7759 }
7760 return VINF_SUCCESS;
7761}
7762
7763
7764/**
7765 * Common worker for FPU instructions working on ST0 and replaces it with the
7766 * result, i.e. unary operators.
7767 *
7768 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7769 */
7770FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7771{
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773
7774 IEM_MC_BEGIN(2, 1);
7775 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7776 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7778
7779 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7780 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7781 IEM_MC_PREPARE_FPU_USAGE();
7782 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7783 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7784 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7785 IEM_MC_ELSE()
7786 IEM_MC_FPU_STACK_UNDERFLOW(0);
7787 IEM_MC_ENDIF();
7788 IEM_MC_ADVANCE_RIP();
7789
7790 IEM_MC_END();
7791 return VINF_SUCCESS;
7792}
7793
7794
7795/** Opcode 0xd9 0xe0. */
7796FNIEMOP_DEF(iemOp_fchs)
7797{
7798 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7799 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7800}
7801
7802
7803/** Opcode 0xd9 0xe1. */
7804FNIEMOP_DEF(iemOp_fabs)
7805{
7806 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7807 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7808}
7809
7810
7811/** Opcode 0xd9 0xe4. */
7812FNIEMOP_DEF(iemOp_ftst)
7813{
7814 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7816
7817 IEM_MC_BEGIN(2, 1);
7818 IEM_MC_LOCAL(uint16_t, u16Fsw);
7819 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7821
7822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7824 IEM_MC_PREPARE_FPU_USAGE();
7825 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7826 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7827 IEM_MC_UPDATE_FSW(u16Fsw);
7828 IEM_MC_ELSE()
7829 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7830 IEM_MC_ENDIF();
7831 IEM_MC_ADVANCE_RIP();
7832
7833 IEM_MC_END();
7834 return VINF_SUCCESS;
7835}
7836
7837
7838/** Opcode 0xd9 0xe5. */
7839FNIEMOP_DEF(iemOp_fxam)
7840{
7841 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843
7844 IEM_MC_BEGIN(2, 1);
7845 IEM_MC_LOCAL(uint16_t, u16Fsw);
7846 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7847 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7848
7849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7850 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7851 IEM_MC_PREPARE_FPU_USAGE();
7852 IEM_MC_REF_FPUREG(pr80Value, 0);
7853 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7854 IEM_MC_UPDATE_FSW(u16Fsw);
7855 IEM_MC_ADVANCE_RIP();
7856
7857 IEM_MC_END();
7858 return VINF_SUCCESS;
7859}
7860
7861
7862/**
7863 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7864 *
7865 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7866 */
7867FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7868{
7869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7870
7871 IEM_MC_BEGIN(1, 1);
7872 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7873 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7874
7875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7877 IEM_MC_PREPARE_FPU_USAGE();
7878 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7879 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7880 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7881 IEM_MC_ELSE()
7882 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7883 IEM_MC_ENDIF();
7884 IEM_MC_ADVANCE_RIP();
7885
7886 IEM_MC_END();
7887 return VINF_SUCCESS;
7888}
7889
7890
7891/** Opcode 0xd9 0xe8. */
7892FNIEMOP_DEF(iemOp_fld1)
7893{
7894 IEMOP_MNEMONIC(fld1, "fld1");
7895 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7896}
7897
7898
7899/** Opcode 0xd9 0xe9. */
7900FNIEMOP_DEF(iemOp_fldl2t)
7901{
7902 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7903 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7904}
7905
7906
7907/** Opcode 0xd9 0xea. */
7908FNIEMOP_DEF(iemOp_fldl2e)
7909{
7910 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7911 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7912}
7913
7914/** Opcode 0xd9 0xeb. */
7915FNIEMOP_DEF(iemOp_fldpi)
7916{
7917 IEMOP_MNEMONIC(fldpi, "fldpi");
7918 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7919}
7920
7921
7922/** Opcode 0xd9 0xec. */
7923FNIEMOP_DEF(iemOp_fldlg2)
7924{
7925 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7926 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7927}
7928
7929/** Opcode 0xd9 0xed. */
7930FNIEMOP_DEF(iemOp_fldln2)
7931{
7932 IEMOP_MNEMONIC(fldln2, "fldln2");
7933 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7934}
7935
7936
7937/** Opcode 0xd9 0xee. */
7938FNIEMOP_DEF(iemOp_fldz)
7939{
7940 IEMOP_MNEMONIC(fldz, "fldz");
7941 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7942}
7943
7944
7945/** Opcode 0xd9 0xf0.
7946 *
7947 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7948 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7949 * to produce proper results for +Inf and -Inf.
7950 *
7951 * This is probably usful in the implementation pow() and similar.
7952 */
7953FNIEMOP_DEF(iemOp_f2xm1)
7954{
7955 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7956 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7957}
7958
7959
7960/**
7961 * Common worker for FPU instructions working on STn and ST0, storing the result
7962 * in STn, and popping the stack unless IE, DE or ZE was raised.
7963 *
7964 * @param bRm Mod R/M byte.
7965 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7966 */
7967FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7968{
7969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7970
7971 IEM_MC_BEGIN(3, 1);
7972 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7973 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7974 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7976
7977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7979
7980 IEM_MC_PREPARE_FPU_USAGE();
7981 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
7982 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7983 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7984 IEM_MC_ELSE()
7985 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7986 IEM_MC_ENDIF();
7987 IEM_MC_ADVANCE_RIP();
7988
7989 IEM_MC_END();
7990 return VINF_SUCCESS;
7991}
7992
7993
7994/** Opcode 0xd9 0xf1. */
7995FNIEMOP_DEF(iemOp_fyl2x)
7996{
7997 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7998 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7999}
8000
8001
8002/**
8003 * Common worker for FPU instructions working on ST0 and having two outputs, one
8004 * replacing ST0 and one pushed onto the stack.
8005 *
8006 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8007 */
8008FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8009{
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011
8012 IEM_MC_BEGIN(2, 1);
8013 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8014 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8015 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8016
8017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8019 IEM_MC_PREPARE_FPU_USAGE();
8020 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8021 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8022 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8023 IEM_MC_ELSE()
8024 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8025 IEM_MC_ENDIF();
8026 IEM_MC_ADVANCE_RIP();
8027
8028 IEM_MC_END();
8029 return VINF_SUCCESS;
8030}
8031
8032
8033/** Opcode 0xd9 0xf2. */
8034FNIEMOP_DEF(iemOp_fptan)
8035{
8036 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8037 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8038}
8039
8040
8041/** Opcode 0xd9 0xf3. */
8042FNIEMOP_DEF(iemOp_fpatan)
8043{
8044 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8045 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8046}
8047
8048
8049/** Opcode 0xd9 0xf4. */
8050FNIEMOP_DEF(iemOp_fxtract)
8051{
8052 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8053 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8054}
8055
8056
8057/** Opcode 0xd9 0xf5. */
8058FNIEMOP_DEF(iemOp_fprem1)
8059{
8060 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8061 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8062}
8063
8064
8065/** Opcode 0xd9 0xf6. */
8066FNIEMOP_DEF(iemOp_fdecstp)
8067{
8068 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8070 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8071 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8072 * FINCSTP and FDECSTP. */
8073
8074 IEM_MC_BEGIN(0,0);
8075
8076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8078
8079 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8080 IEM_MC_FPU_STACK_DEC_TOP();
8081 IEM_MC_UPDATE_FSW_CONST(0);
8082
8083 IEM_MC_ADVANCE_RIP();
8084 IEM_MC_END();
8085 return VINF_SUCCESS;
8086}
8087
8088
8089/** Opcode 0xd9 0xf7. */
8090FNIEMOP_DEF(iemOp_fincstp)
8091{
8092 IEMOP_MNEMONIC(fincstp, "fincstp");
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8095 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8096 * FINCSTP and FDECSTP. */
8097
8098 IEM_MC_BEGIN(0,0);
8099
8100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8102
8103 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8104 IEM_MC_FPU_STACK_INC_TOP();
8105 IEM_MC_UPDATE_FSW_CONST(0);
8106
8107 IEM_MC_ADVANCE_RIP();
8108 IEM_MC_END();
8109 return VINF_SUCCESS;
8110}
8111
8112
8113/** Opcode 0xd9 0xf8. */
8114FNIEMOP_DEF(iemOp_fprem)
8115{
8116 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8117 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8118}
8119
8120
8121/** Opcode 0xd9 0xf9. */
8122FNIEMOP_DEF(iemOp_fyl2xp1)
8123{
8124 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8125 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8126}
8127
8128
8129/** Opcode 0xd9 0xfa. */
8130FNIEMOP_DEF(iemOp_fsqrt)
8131{
8132 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8133 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8134}
8135
8136
8137/** Opcode 0xd9 0xfb. */
8138FNIEMOP_DEF(iemOp_fsincos)
8139{
8140 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8141 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8142}
8143
8144
8145/** Opcode 0xd9 0xfc. */
8146FNIEMOP_DEF(iemOp_frndint)
8147{
8148 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8149 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8150}
8151
8152
8153/** Opcode 0xd9 0xfd. */
8154FNIEMOP_DEF(iemOp_fscale)
8155{
8156 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8157 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8158}
8159
8160
8161/** Opcode 0xd9 0xfe. */
8162FNIEMOP_DEF(iemOp_fsin)
8163{
8164 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8165 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8166}
8167
8168
8169/** Opcode 0xd9 0xff. */
8170FNIEMOP_DEF(iemOp_fcos)
8171{
8172 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8173 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8174}
8175
8176
8177/** Used by iemOp_EscF1. */
8178IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8179{
8180 /* 0xe0 */ iemOp_fchs,
8181 /* 0xe1 */ iemOp_fabs,
8182 /* 0xe2 */ iemOp_Invalid,
8183 /* 0xe3 */ iemOp_Invalid,
8184 /* 0xe4 */ iemOp_ftst,
8185 /* 0xe5 */ iemOp_fxam,
8186 /* 0xe6 */ iemOp_Invalid,
8187 /* 0xe7 */ iemOp_Invalid,
8188 /* 0xe8 */ iemOp_fld1,
8189 /* 0xe9 */ iemOp_fldl2t,
8190 /* 0xea */ iemOp_fldl2e,
8191 /* 0xeb */ iemOp_fldpi,
8192 /* 0xec */ iemOp_fldlg2,
8193 /* 0xed */ iemOp_fldln2,
8194 /* 0xee */ iemOp_fldz,
8195 /* 0xef */ iemOp_Invalid,
8196 /* 0xf0 */ iemOp_f2xm1,
8197 /* 0xf1 */ iemOp_fyl2x,
8198 /* 0xf2 */ iemOp_fptan,
8199 /* 0xf3 */ iemOp_fpatan,
8200 /* 0xf4 */ iemOp_fxtract,
8201 /* 0xf5 */ iemOp_fprem1,
8202 /* 0xf6 */ iemOp_fdecstp,
8203 /* 0xf7 */ iemOp_fincstp,
8204 /* 0xf8 */ iemOp_fprem,
8205 /* 0xf9 */ iemOp_fyl2xp1,
8206 /* 0xfa */ iemOp_fsqrt,
8207 /* 0xfb */ iemOp_fsincos,
8208 /* 0xfc */ iemOp_frndint,
8209 /* 0xfd */ iemOp_fscale,
8210 /* 0xfe */ iemOp_fsin,
8211 /* 0xff */ iemOp_fcos
8212};
8213
8214
8215/**
8216 * @opcode 0xd9
8217 */
8218FNIEMOP_DEF(iemOp_EscF1)
8219{
8220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8221 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8222
8223 if (IEM_IS_MODRM_REG_MODE(bRm))
8224 {
8225 switch (IEM_GET_MODRM_REG_8(bRm))
8226 {
8227 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8228 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8229 case 2:
8230 if (bRm == 0xd0)
8231 return FNIEMOP_CALL(iemOp_fnop);
8232 return IEMOP_RAISE_INVALID_OPCODE();
8233 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8234 case 4:
8235 case 5:
8236 case 6:
8237 case 7:
8238 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8239 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8241 }
8242 }
8243 else
8244 {
8245 switch (IEM_GET_MODRM_REG_8(bRm))
8246 {
8247 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8248 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8249 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8250 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8251 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8252 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8253 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8254 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8256 }
8257 }
8258}
8259
8260
8261/** Opcode 0xda 11/0. */
8262FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8263{
8264 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8266
8267 IEM_MC_BEGIN(0, 1);
8268 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8269
8270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8272
8273 IEM_MC_PREPARE_FPU_USAGE();
8274 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8275 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8276 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8277 IEM_MC_ENDIF();
8278 IEM_MC_UPDATE_FPU_OPCODE_IP();
8279 IEM_MC_ELSE()
8280 IEM_MC_FPU_STACK_UNDERFLOW(0);
8281 IEM_MC_ENDIF();
8282 IEM_MC_ADVANCE_RIP();
8283
8284 IEM_MC_END();
8285 return VINF_SUCCESS;
8286}
8287
8288
8289/** Opcode 0xda 11/1. */
8290FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8291{
8292 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8294
8295 IEM_MC_BEGIN(0, 1);
8296 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8297
8298 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8299 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8300
8301 IEM_MC_PREPARE_FPU_USAGE();
8302 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8304 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8305 IEM_MC_ENDIF();
8306 IEM_MC_UPDATE_FPU_OPCODE_IP();
8307 IEM_MC_ELSE()
8308 IEM_MC_FPU_STACK_UNDERFLOW(0);
8309 IEM_MC_ENDIF();
8310 IEM_MC_ADVANCE_RIP();
8311
8312 IEM_MC_END();
8313 return VINF_SUCCESS;
8314}
8315
8316
8317/** Opcode 0xda 11/2. */
8318FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8319{
8320 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322
8323 IEM_MC_BEGIN(0, 1);
8324 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8325
8326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8327 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8328
8329 IEM_MC_PREPARE_FPU_USAGE();
8330 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8331 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8332 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8333 IEM_MC_ENDIF();
8334 IEM_MC_UPDATE_FPU_OPCODE_IP();
8335 IEM_MC_ELSE()
8336 IEM_MC_FPU_STACK_UNDERFLOW(0);
8337 IEM_MC_ENDIF();
8338 IEM_MC_ADVANCE_RIP();
8339
8340 IEM_MC_END();
8341 return VINF_SUCCESS;
8342}
8343
8344
8345/** Opcode 0xda 11/3. */
8346FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8347{
8348 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8350
8351 IEM_MC_BEGIN(0, 1);
8352 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8353
8354 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8355 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8356
8357 IEM_MC_PREPARE_FPU_USAGE();
8358 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8360 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8361 IEM_MC_ENDIF();
8362 IEM_MC_UPDATE_FPU_OPCODE_IP();
8363 IEM_MC_ELSE()
8364 IEM_MC_FPU_STACK_UNDERFLOW(0);
8365 IEM_MC_ENDIF();
8366 IEM_MC_ADVANCE_RIP();
8367
8368 IEM_MC_END();
8369 return VINF_SUCCESS;
8370}
8371
8372
8373/**
8374 * Common worker for FPU instructions working on ST0 and STn, only affecting
8375 * flags, and popping twice when done.
8376 *
8377 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8378 */
8379FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8380{
8381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8382
8383 IEM_MC_BEGIN(3, 1);
8384 IEM_MC_LOCAL(uint16_t, u16Fsw);
8385 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8386 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8387 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8388
8389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8391
8392 IEM_MC_PREPARE_FPU_USAGE();
8393 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8394 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8395 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8396 IEM_MC_ELSE()
8397 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8398 IEM_MC_ENDIF();
8399 IEM_MC_ADVANCE_RIP();
8400
8401 IEM_MC_END();
8402 return VINF_SUCCESS;
8403}
8404
8405
8406/** Opcode 0xda 0xe9. */
8407FNIEMOP_DEF(iemOp_fucompp)
8408{
8409 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8410 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8411}
8412
8413
8414/**
8415 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8416 * the result in ST0.
8417 *
8418 * @param bRm Mod R/M byte.
8419 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8420 */
8421FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8422{
8423 IEM_MC_BEGIN(3, 3);
8424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8425 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8426 IEM_MC_LOCAL(int32_t, i32Val2);
8427 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8428 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8429 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8430
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8433
8434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8436 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8437
8438 IEM_MC_PREPARE_FPU_USAGE();
8439 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8440 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8441 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8442 IEM_MC_ELSE()
8443 IEM_MC_FPU_STACK_UNDERFLOW(0);
8444 IEM_MC_ENDIF();
8445 IEM_MC_ADVANCE_RIP();
8446
8447 IEM_MC_END();
8448 return VINF_SUCCESS;
8449}
8450
8451
8452/** Opcode 0xda !11/0. */
8453FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8454{
8455 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8456 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8457}
8458
8459
8460/** Opcode 0xda !11/1. */
8461FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8462{
8463 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8464 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8465}
8466
8467
8468/** Opcode 0xda !11/2. */
8469FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8470{
8471 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8472
8473 IEM_MC_BEGIN(3, 3);
8474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8475 IEM_MC_LOCAL(uint16_t, u16Fsw);
8476 IEM_MC_LOCAL(int32_t, i32Val2);
8477 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8478 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8479 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8480
8481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483
8484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8486 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8487
8488 IEM_MC_PREPARE_FPU_USAGE();
8489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8490 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8491 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8492 IEM_MC_ELSE()
8493 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8494 IEM_MC_ENDIF();
8495 IEM_MC_ADVANCE_RIP();
8496
8497 IEM_MC_END();
8498 return VINF_SUCCESS;
8499}
8500
8501
8502/** Opcode 0xda !11/3. */
8503FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8504{
8505 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8506
8507 IEM_MC_BEGIN(3, 3);
8508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8509 IEM_MC_LOCAL(uint16_t, u16Fsw);
8510 IEM_MC_LOCAL(int32_t, i32Val2);
8511 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8512 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8513 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8514
8515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8517
8518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8520 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8521
8522 IEM_MC_PREPARE_FPU_USAGE();
8523 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8524 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8525 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8526 IEM_MC_ELSE()
8527 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8528 IEM_MC_ENDIF();
8529 IEM_MC_ADVANCE_RIP();
8530
8531 IEM_MC_END();
8532 return VINF_SUCCESS;
8533}
8534
8535
8536/** Opcode 0xda !11/4. */
8537FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8538{
8539 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8541}
8542
8543
8544/** Opcode 0xda !11/5. */
8545FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8546{
8547 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8549}
8550
8551
8552/** Opcode 0xda !11/6. */
8553FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8554{
8555 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8556 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8557}
8558
8559
8560/** Opcode 0xda !11/7. */
8561FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8562{
8563 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8564 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8565}
8566
8567
8568/**
8569 * @opcode 0xda
8570 */
8571FNIEMOP_DEF(iemOp_EscF2)
8572{
8573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8574 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8575 if (IEM_IS_MODRM_REG_MODE(bRm))
8576 {
8577 switch (IEM_GET_MODRM_REG_8(bRm))
8578 {
8579 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8580 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8581 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8582 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8583 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8584 case 5:
8585 if (bRm == 0xe9)
8586 return FNIEMOP_CALL(iemOp_fucompp);
8587 return IEMOP_RAISE_INVALID_OPCODE();
8588 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8589 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8591 }
8592 }
8593 else
8594 {
8595 switch (IEM_GET_MODRM_REG_8(bRm))
8596 {
8597 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8598 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8599 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8600 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8601 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8602 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8603 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8604 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8606 }
8607 }
8608}
8609
8610
8611/** Opcode 0xdb !11/0. */
8612FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8613{
8614 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8615
8616 IEM_MC_BEGIN(2, 3);
8617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8618 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8619 IEM_MC_LOCAL(int32_t, i32Val);
8620 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8621 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8622
8623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8625
8626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8628 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8629
8630 IEM_MC_PREPARE_FPU_USAGE();
8631 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8632 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8633 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8634 IEM_MC_ELSE()
8635 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8636 IEM_MC_ENDIF();
8637 IEM_MC_ADVANCE_RIP();
8638
8639 IEM_MC_END();
8640 return VINF_SUCCESS;
8641}
8642
8643
8644/** Opcode 0xdb !11/1. */
8645FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8646{
8647 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8648 IEM_MC_BEGIN(3, 2);
8649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8650 IEM_MC_LOCAL(uint16_t, u16Fsw);
8651 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8652 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8653 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8654
8655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8659
8660 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8661 IEM_MC_PREPARE_FPU_USAGE();
8662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8663 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8664 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8665 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8666 IEM_MC_ELSE()
8667 IEM_MC_IF_FCW_IM()
8668 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8669 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8670 IEM_MC_ENDIF();
8671 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8672 IEM_MC_ENDIF();
8673 IEM_MC_ADVANCE_RIP();
8674
8675 IEM_MC_END();
8676 return VINF_SUCCESS;
8677}
8678
8679
8680/** Opcode 0xdb !11/2. */
8681FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8682{
8683 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8684 IEM_MC_BEGIN(3, 2);
8685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8686 IEM_MC_LOCAL(uint16_t, u16Fsw);
8687 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8688 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8690
8691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8695
8696 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8697 IEM_MC_PREPARE_FPU_USAGE();
8698 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8699 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8700 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8701 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8702 IEM_MC_ELSE()
8703 IEM_MC_IF_FCW_IM()
8704 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8705 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8706 IEM_MC_ENDIF();
8707 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8708 IEM_MC_ENDIF();
8709 IEM_MC_ADVANCE_RIP();
8710
8711 IEM_MC_END();
8712 return VINF_SUCCESS;
8713}
8714
8715
8716/** Opcode 0xdb !11/3. */
8717FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8718{
8719 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8720 IEM_MC_BEGIN(3, 2);
8721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8722 IEM_MC_LOCAL(uint16_t, u16Fsw);
8723 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8724 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8726
8727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8731
8732 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8733 IEM_MC_PREPARE_FPU_USAGE();
8734 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8735 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8736 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8737 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8738 IEM_MC_ELSE()
8739 IEM_MC_IF_FCW_IM()
8740 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8741 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8742 IEM_MC_ENDIF();
8743 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8744 IEM_MC_ENDIF();
8745 IEM_MC_ADVANCE_RIP();
8746
8747 IEM_MC_END();
8748 return VINF_SUCCESS;
8749}
8750
8751
8752/** Opcode 0xdb !11/5. */
8753FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8754{
8755 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8756
8757 IEM_MC_BEGIN(2, 3);
8758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8759 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8760 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8761 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8762 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8763
8764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8766
8767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8769 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8770
8771 IEM_MC_PREPARE_FPU_USAGE();
8772 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8773 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8774 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8775 IEM_MC_ELSE()
8776 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8777 IEM_MC_ENDIF();
8778 IEM_MC_ADVANCE_RIP();
8779
8780 IEM_MC_END();
8781 return VINF_SUCCESS;
8782}
8783
8784
8785/** Opcode 0xdb !11/7. */
8786FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8787{
8788 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8789 IEM_MC_BEGIN(3, 2);
8790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8791 IEM_MC_LOCAL(uint16_t, u16Fsw);
8792 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8793 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8795
8796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8799 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8800
8801 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8802 IEM_MC_PREPARE_FPU_USAGE();
8803 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8804 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8805 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8806 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8807 IEM_MC_ELSE()
8808 IEM_MC_IF_FCW_IM()
8809 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8810 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8811 IEM_MC_ENDIF();
8812 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8813 IEM_MC_ENDIF();
8814 IEM_MC_ADVANCE_RIP();
8815
8816 IEM_MC_END();
8817 return VINF_SUCCESS;
8818}
8819
8820
8821/** Opcode 0xdb 11/0. */
8822FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8823{
8824 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8826
8827 IEM_MC_BEGIN(0, 1);
8828 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8829
8830 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8831 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8832
8833 IEM_MC_PREPARE_FPU_USAGE();
8834 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8835 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8836 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8837 IEM_MC_ENDIF();
8838 IEM_MC_UPDATE_FPU_OPCODE_IP();
8839 IEM_MC_ELSE()
8840 IEM_MC_FPU_STACK_UNDERFLOW(0);
8841 IEM_MC_ENDIF();
8842 IEM_MC_ADVANCE_RIP();
8843
8844 IEM_MC_END();
8845 return VINF_SUCCESS;
8846}
8847
8848
8849/** Opcode 0xdb 11/1. */
8850FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8851{
8852 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8854
8855 IEM_MC_BEGIN(0, 1);
8856 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8857
8858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8860
8861 IEM_MC_PREPARE_FPU_USAGE();
8862 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8863 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8864 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8865 IEM_MC_ENDIF();
8866 IEM_MC_UPDATE_FPU_OPCODE_IP();
8867 IEM_MC_ELSE()
8868 IEM_MC_FPU_STACK_UNDERFLOW(0);
8869 IEM_MC_ENDIF();
8870 IEM_MC_ADVANCE_RIP();
8871
8872 IEM_MC_END();
8873 return VINF_SUCCESS;
8874}
8875
8876
8877/** Opcode 0xdb 11/2. */
8878FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8879{
8880 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8882
8883 IEM_MC_BEGIN(0, 1);
8884 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8885
8886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8888
8889 IEM_MC_PREPARE_FPU_USAGE();
8890 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8891 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8892 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8893 IEM_MC_ENDIF();
8894 IEM_MC_UPDATE_FPU_OPCODE_IP();
8895 IEM_MC_ELSE()
8896 IEM_MC_FPU_STACK_UNDERFLOW(0);
8897 IEM_MC_ENDIF();
8898 IEM_MC_ADVANCE_RIP();
8899
8900 IEM_MC_END();
8901 return VINF_SUCCESS;
8902}
8903
8904
8905/** Opcode 0xdb 11/3. */
8906FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8907{
8908 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8910
8911 IEM_MC_BEGIN(0, 1);
8912 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8913
8914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8916
8917 IEM_MC_PREPARE_FPU_USAGE();
8918 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8919 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8920 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8921 IEM_MC_ENDIF();
8922 IEM_MC_UPDATE_FPU_OPCODE_IP();
8923 IEM_MC_ELSE()
8924 IEM_MC_FPU_STACK_UNDERFLOW(0);
8925 IEM_MC_ENDIF();
8926 IEM_MC_ADVANCE_RIP();
8927
8928 IEM_MC_END();
8929 return VINF_SUCCESS;
8930}
8931
8932
8933/** Opcode 0xdb 0xe0. */
8934FNIEMOP_DEF(iemOp_fneni)
8935{
8936 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_BEGIN(0,0);
8939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8940 IEM_MC_ADVANCE_RIP();
8941 IEM_MC_END();
8942 return VINF_SUCCESS;
8943}
8944
8945
8946/** Opcode 0xdb 0xe1. */
8947FNIEMOP_DEF(iemOp_fndisi)
8948{
8949 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8951 IEM_MC_BEGIN(0,0);
8952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8953 IEM_MC_ADVANCE_RIP();
8954 IEM_MC_END();
8955 return VINF_SUCCESS;
8956}
8957
8958
8959/** Opcode 0xdb 0xe2. */
8960FNIEMOP_DEF(iemOp_fnclex)
8961{
8962 IEMOP_MNEMONIC(fnclex, "fnclex");
8963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8964
8965 IEM_MC_BEGIN(0,0);
8966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8967 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8968 IEM_MC_CLEAR_FSW_EX();
8969 IEM_MC_ADVANCE_RIP();
8970 IEM_MC_END();
8971 return VINF_SUCCESS;
8972}
8973
8974
8975/** Opcode 0xdb 0xe3. */
8976FNIEMOP_DEF(iemOp_fninit)
8977{
8978 IEMOP_MNEMONIC(fninit, "fninit");
8979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8980 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8981}
8982
8983
8984/** Opcode 0xdb 0xe4. */
8985FNIEMOP_DEF(iemOp_fnsetpm)
8986{
8987 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8989 IEM_MC_BEGIN(0,0);
8990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8991 IEM_MC_ADVANCE_RIP();
8992 IEM_MC_END();
8993 return VINF_SUCCESS;
8994}
8995
8996
8997/** Opcode 0xdb 0xe5. */
8998FNIEMOP_DEF(iemOp_frstpm)
8999{
9000 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9001#if 0 /* #UDs on newer CPUs */
9002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9003 IEM_MC_BEGIN(0,0);
9004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9005 IEM_MC_ADVANCE_RIP();
9006 IEM_MC_END();
9007 return VINF_SUCCESS;
9008#else
9009 return IEMOP_RAISE_INVALID_OPCODE();
9010#endif
9011}
9012
9013
9014/** Opcode 0xdb 11/5. */
9015FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9016{
9017 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9018 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
9019}
9020
9021
9022/** Opcode 0xdb 11/6. */
9023FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9024{
9025 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9026 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
9027}
9028
9029
9030/**
9031 * @opcode 0xdb
9032 */
9033FNIEMOP_DEF(iemOp_EscF3)
9034{
9035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9036 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9037 if (IEM_IS_MODRM_REG_MODE(bRm))
9038 {
9039 switch (IEM_GET_MODRM_REG_8(bRm))
9040 {
9041 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9042 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9043 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9044 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9045 case 4:
9046 switch (bRm)
9047 {
9048 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9049 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9050 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9051 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9052 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9053 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9054 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9055 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9057 }
9058 break;
9059 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9060 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9061 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9063 }
9064 }
9065 else
9066 {
9067 switch (IEM_GET_MODRM_REG_8(bRm))
9068 {
9069 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9070 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9071 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9072 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9073 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9074 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9075 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9076 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9078 }
9079 }
9080}
9081
9082
9083/**
9084 * Common worker for FPU instructions working on STn and ST0, and storing the
9085 * result in STn unless IE, DE or ZE was raised.
9086 *
9087 * @param bRm Mod R/M byte.
9088 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9089 */
9090FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9091{
9092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9093
9094 IEM_MC_BEGIN(3, 1);
9095 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9096 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9097 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9098 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9099
9100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9102
9103 IEM_MC_PREPARE_FPU_USAGE();
9104 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
9105 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9106 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9107 IEM_MC_ELSE()
9108 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9109 IEM_MC_ENDIF();
9110 IEM_MC_ADVANCE_RIP();
9111
9112 IEM_MC_END();
9113 return VINF_SUCCESS;
9114}
9115
9116
9117/** Opcode 0xdc 11/0. */
9118FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9119{
9120 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9121 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9122}
9123
9124
9125/** Opcode 0xdc 11/1. */
9126FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9127{
9128 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9129 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9130}
9131
9132
9133/** Opcode 0xdc 11/4. */
9134FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9135{
9136 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9137 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9138}
9139
9140
9141/** Opcode 0xdc 11/5. */
9142FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9143{
9144 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9145 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9146}
9147
9148
9149/** Opcode 0xdc 11/6. */
9150FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9151{
9152 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9153 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9154}
9155
9156
9157/** Opcode 0xdc 11/7. */
9158FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9159{
9160 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9161 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9162}
9163
9164
9165/**
9166 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9167 * memory operand, and storing the result in ST0.
9168 *
9169 * @param bRm Mod R/M byte.
9170 * @param pfnImpl Pointer to the instruction implementation (assembly).
9171 */
9172FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9173{
9174 IEM_MC_BEGIN(3, 3);
9175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9176 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9177 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9178 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9179 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9180 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9181
9182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9184 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9185 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9186
9187 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9188 IEM_MC_PREPARE_FPU_USAGE();
9189 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9190 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9191 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9192 IEM_MC_ELSE()
9193 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9194 IEM_MC_ENDIF();
9195 IEM_MC_ADVANCE_RIP();
9196
9197 IEM_MC_END();
9198 return VINF_SUCCESS;
9199}
9200
9201
9202/** Opcode 0xdc !11/0. */
9203FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9204{
9205 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9206 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9207}
9208
9209
9210/** Opcode 0xdc !11/1. */
9211FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9212{
9213 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9214 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9215}
9216
9217
9218/** Opcode 0xdc !11/2. */
9219FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9220{
9221 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9222
9223 IEM_MC_BEGIN(3, 3);
9224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9225 IEM_MC_LOCAL(uint16_t, u16Fsw);
9226 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9227 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9228 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9229 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9230
9231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9233
9234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9235 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9236 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9237
9238 IEM_MC_PREPARE_FPU_USAGE();
9239 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9240 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9241 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9242 IEM_MC_ELSE()
9243 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9244 IEM_MC_ENDIF();
9245 IEM_MC_ADVANCE_RIP();
9246
9247 IEM_MC_END();
9248 return VINF_SUCCESS;
9249}
9250
9251
9252/** Opcode 0xdc !11/3. */
9253FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9254{
9255 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9256
9257 IEM_MC_BEGIN(3, 3);
9258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9259 IEM_MC_LOCAL(uint16_t, u16Fsw);
9260 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9261 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9262 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9264
9265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9267
9268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9269 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9270 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9271
9272 IEM_MC_PREPARE_FPU_USAGE();
9273 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9274 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9275 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9276 IEM_MC_ELSE()
9277 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9278 IEM_MC_ENDIF();
9279 IEM_MC_ADVANCE_RIP();
9280
9281 IEM_MC_END();
9282 return VINF_SUCCESS;
9283}
9284
9285
9286/** Opcode 0xdc !11/4. */
9287FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9290 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9291}
9292
9293
9294/** Opcode 0xdc !11/5. */
9295FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9296{
9297 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9298 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9299}
9300
9301
9302/** Opcode 0xdc !11/6. */
9303FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9304{
9305 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9306 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9307}
9308
9309
9310/** Opcode 0xdc !11/7. */
9311FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9312{
9313 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9314 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9315}
9316
9317
9318/**
9319 * @opcode 0xdc
9320 */
9321FNIEMOP_DEF(iemOp_EscF4)
9322{
9323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9324 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9325 if (IEM_IS_MODRM_REG_MODE(bRm))
9326 {
9327 switch (IEM_GET_MODRM_REG_8(bRm))
9328 {
9329 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9330 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9331 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9332 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9333 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9334 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9335 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9336 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9338 }
9339 }
9340 else
9341 {
9342 switch (IEM_GET_MODRM_REG_8(bRm))
9343 {
9344 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9345 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9346 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9347 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9348 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9349 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9350 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9351 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9353 }
9354 }
9355}
9356
9357
9358/** Opcode 0xdd !11/0.
9359 * @sa iemOp_fld_m32r */
9360FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9361{
9362 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9363
9364 IEM_MC_BEGIN(2, 3);
9365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9366 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9367 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9368 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9369 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9370
9371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9375
9376 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9377 IEM_MC_PREPARE_FPU_USAGE();
9378 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9379 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9380 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9381 IEM_MC_ELSE()
9382 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9383 IEM_MC_ENDIF();
9384 IEM_MC_ADVANCE_RIP();
9385
9386 IEM_MC_END();
9387 return VINF_SUCCESS;
9388}
9389
9390
9391/** Opcode 0xdd !11/0. */
9392FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9393{
9394 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9395 IEM_MC_BEGIN(3, 2);
9396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9397 IEM_MC_LOCAL(uint16_t, u16Fsw);
9398 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9399 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9400 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9401
9402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9404 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9405 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9406
9407 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9408 IEM_MC_PREPARE_FPU_USAGE();
9409 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9410 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9411 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9412 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9413 IEM_MC_ELSE()
9414 IEM_MC_IF_FCW_IM()
9415 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9416 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9417 IEM_MC_ENDIF();
9418 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9419 IEM_MC_ENDIF();
9420 IEM_MC_ADVANCE_RIP();
9421
9422 IEM_MC_END();
9423 return VINF_SUCCESS;
9424}
9425
9426
9427/** Opcode 0xdd !11/0. */
9428FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9429{
9430 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9431 IEM_MC_BEGIN(3, 2);
9432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9433 IEM_MC_LOCAL(uint16_t, u16Fsw);
9434 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9435 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9436 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9437
9438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9442
9443 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9444 IEM_MC_PREPARE_FPU_USAGE();
9445 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9446 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9447 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9448 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9449 IEM_MC_ELSE()
9450 IEM_MC_IF_FCW_IM()
9451 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9452 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9453 IEM_MC_ENDIF();
9454 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9455 IEM_MC_ENDIF();
9456 IEM_MC_ADVANCE_RIP();
9457
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460}
9461
9462
9463
9464
9465/** Opcode 0xdd !11/0. */
9466FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9467{
9468 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9469 IEM_MC_BEGIN(3, 2);
9470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9471 IEM_MC_LOCAL(uint16_t, u16Fsw);
9472 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9473 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9475
9476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9480
9481 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9482 IEM_MC_PREPARE_FPU_USAGE();
9483 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9484 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9485 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9486 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9487 IEM_MC_ELSE()
9488 IEM_MC_IF_FCW_IM()
9489 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9490 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9491 IEM_MC_ENDIF();
9492 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9493 IEM_MC_ENDIF();
9494 IEM_MC_ADVANCE_RIP();
9495
9496 IEM_MC_END();
9497 return VINF_SUCCESS;
9498}
9499
9500
9501/** Opcode 0xdd !11/0. */
9502FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9503{
9504 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9505 IEM_MC_BEGIN(3, 0);
9506 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9507 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9508 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9512 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9514 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9515 IEM_MC_END();
9516 return VINF_SUCCESS;
9517}
9518
9519
9520/** Opcode 0xdd !11/0. */
9521FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9522{
9523 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9524 IEM_MC_BEGIN(3, 0);
9525 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9526 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9527 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9531 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9532 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9533 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9534 IEM_MC_END();
9535 return VINF_SUCCESS;
9536
9537}
9538
9539/** Opcode 0xdd !11/0. */
9540FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9541{
9542 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9543
9544 IEM_MC_BEGIN(0, 2);
9545 IEM_MC_LOCAL(uint16_t, u16Tmp);
9546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9547
9548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9551
9552 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9553 IEM_MC_FETCH_FSW(u16Tmp);
9554 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9555 IEM_MC_ADVANCE_RIP();
9556
9557/** @todo Debug / drop a hint to the verifier that things may differ
9558 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9559 * NT4SP1. (X86_FSW_PE) */
9560 IEM_MC_END();
9561 return VINF_SUCCESS;
9562}
9563
9564
9565/** Opcode 0xdd 11/0. */
9566FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9567{
9568 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9570 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9571 unmodified. */
9572
9573 IEM_MC_BEGIN(0, 0);
9574
9575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9577
9578 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9579 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9580 IEM_MC_UPDATE_FPU_OPCODE_IP();
9581
9582 IEM_MC_ADVANCE_RIP();
9583 IEM_MC_END();
9584 return VINF_SUCCESS;
9585}
9586
9587
9588/** Opcode 0xdd 11/1. */
9589FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9590{
9591 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9593
9594 IEM_MC_BEGIN(0, 2);
9595 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9596 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9597 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9598 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9599
9600 IEM_MC_PREPARE_FPU_USAGE();
9601 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9602 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9603 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9604 IEM_MC_ELSE()
9605 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9606 IEM_MC_ENDIF();
9607
9608 IEM_MC_ADVANCE_RIP();
9609 IEM_MC_END();
9610 return VINF_SUCCESS;
9611}
9612
9613
9614/** Opcode 0xdd 11/3. */
9615FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9616{
9617 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9618 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9619}
9620
9621
9622/** Opcode 0xdd 11/4. */
9623FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9624{
9625 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9626 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9627}
9628
9629
9630/**
9631 * @opcode 0xdd
9632 */
9633FNIEMOP_DEF(iemOp_EscF5)
9634{
9635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9636 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9637 if (IEM_IS_MODRM_REG_MODE(bRm))
9638 {
9639 switch (IEM_GET_MODRM_REG_8(bRm))
9640 {
9641 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9642 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9643 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9644 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9645 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9646 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9647 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9648 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9650 }
9651 }
9652 else
9653 {
9654 switch (IEM_GET_MODRM_REG_8(bRm))
9655 {
9656 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9657 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9658 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9659 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9660 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9661 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9662 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9663 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9665 }
9666 }
9667}
9668
9669
9670/** Opcode 0xde 11/0. */
9671FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9672{
9673 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9674 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9675}
9676
9677
9678/** Opcode 0xde 11/0. */
9679FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9680{
9681 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9682 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9683}
9684
9685
9686/** Opcode 0xde 0xd9. */
9687FNIEMOP_DEF(iemOp_fcompp)
9688{
9689 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9690 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9691}
9692
9693
9694/** Opcode 0xde 11/4. */
9695FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9696{
9697 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9698 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9699}
9700
9701
9702/** Opcode 0xde 11/5. */
9703FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9704{
9705 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9706 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9707}
9708
9709
9710/** Opcode 0xde 11/6. */
9711FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9712{
9713 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9714 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9715}
9716
9717
9718/** Opcode 0xde 11/7. */
9719FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9720{
9721 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9722 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9723}
9724
9725
9726/**
9727 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9728 * the result in ST0.
9729 *
9730 * @param bRm Mod R/M byte.
9731 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9732 */
9733FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9734{
9735 IEM_MC_BEGIN(3, 3);
9736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9737 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9738 IEM_MC_LOCAL(int16_t, i16Val2);
9739 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9740 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9741 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9742
9743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9745
9746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9748 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9749
9750 IEM_MC_PREPARE_FPU_USAGE();
9751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9752 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9753 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9754 IEM_MC_ELSE()
9755 IEM_MC_FPU_STACK_UNDERFLOW(0);
9756 IEM_MC_ENDIF();
9757 IEM_MC_ADVANCE_RIP();
9758
9759 IEM_MC_END();
9760 return VINF_SUCCESS;
9761}
9762
9763
9764/** Opcode 0xde !11/0. */
9765FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9766{
9767 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9768 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9769}
9770
9771
9772/** Opcode 0xde !11/1. */
9773FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9774{
9775 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9776 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9777}
9778
9779
9780/** Opcode 0xde !11/2. */
9781FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9782{
9783 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9784
9785 IEM_MC_BEGIN(3, 3);
9786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9787 IEM_MC_LOCAL(uint16_t, u16Fsw);
9788 IEM_MC_LOCAL(int16_t, i16Val2);
9789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9790 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9791 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9792
9793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9795
9796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9798 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9799
9800 IEM_MC_PREPARE_FPU_USAGE();
9801 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9802 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9803 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9804 IEM_MC_ELSE()
9805 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9806 IEM_MC_ENDIF();
9807 IEM_MC_ADVANCE_RIP();
9808
9809 IEM_MC_END();
9810 return VINF_SUCCESS;
9811}
9812
9813
9814/** Opcode 0xde !11/3. */
9815FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9816{
9817 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9818
9819 IEM_MC_BEGIN(3, 3);
9820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9821 IEM_MC_LOCAL(uint16_t, u16Fsw);
9822 IEM_MC_LOCAL(int16_t, i16Val2);
9823 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9824 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9825 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9826
9827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9829
9830 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9831 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9832 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9833
9834 IEM_MC_PREPARE_FPU_USAGE();
9835 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9836 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9837 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9838 IEM_MC_ELSE()
9839 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9840 IEM_MC_ENDIF();
9841 IEM_MC_ADVANCE_RIP();
9842
9843 IEM_MC_END();
9844 return VINF_SUCCESS;
9845}
9846
9847
9848/** Opcode 0xde !11/4. */
9849FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9850{
9851 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9853}
9854
9855
9856/** Opcode 0xde !11/5. */
9857FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9858{
9859 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9860 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9861}
9862
9863
9864/** Opcode 0xde !11/6. */
9865FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9866{
9867 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9868 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9869}
9870
9871
9872/** Opcode 0xde !11/7. */
9873FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9874{
9875 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9876 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9877}
9878
9879
9880/**
9881 * @opcode 0xde
9882 */
9883FNIEMOP_DEF(iemOp_EscF6)
9884{
9885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9886 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9887 if (IEM_IS_MODRM_REG_MODE(bRm))
9888 {
9889 switch (IEM_GET_MODRM_REG_8(bRm))
9890 {
9891 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9892 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9893 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9894 case 3: if (bRm == 0xd9)
9895 return FNIEMOP_CALL(iemOp_fcompp);
9896 return IEMOP_RAISE_INVALID_OPCODE();
9897 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9898 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9899 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9900 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9902 }
9903 }
9904 else
9905 {
9906 switch (IEM_GET_MODRM_REG_8(bRm))
9907 {
9908 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9909 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9910 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9911 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9912 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9913 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9914 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9915 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9917 }
9918 }
9919}
9920
9921
9922/** Opcode 0xdf 11/0.
9923 * Undocument instruction, assumed to work like ffree + fincstp. */
9924FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9925{
9926 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9928
9929 IEM_MC_BEGIN(0, 0);
9930
9931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9933
9934 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9935 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9936 IEM_MC_FPU_STACK_INC_TOP();
9937 IEM_MC_UPDATE_FPU_OPCODE_IP();
9938
9939 IEM_MC_ADVANCE_RIP();
9940 IEM_MC_END();
9941 return VINF_SUCCESS;
9942}
9943
9944
9945/** Opcode 0xdf 0xe0. */
9946FNIEMOP_DEF(iemOp_fnstsw_ax)
9947{
9948 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9950
9951 IEM_MC_BEGIN(0, 1);
9952 IEM_MC_LOCAL(uint16_t, u16Tmp);
9953 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9954 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9955 IEM_MC_FETCH_FSW(u16Tmp);
9956 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9957 IEM_MC_ADVANCE_RIP();
9958 IEM_MC_END();
9959 return VINF_SUCCESS;
9960}
9961
9962
9963/** Opcode 0xdf 11/5. */
9964FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9965{
9966 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9967 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9968}
9969
9970
9971/** Opcode 0xdf 11/6. */
9972FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9975 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9976}
9977
9978
9979/** Opcode 0xdf !11/0. */
9980FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9981{
9982 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9983
9984 IEM_MC_BEGIN(2, 3);
9985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9986 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9987 IEM_MC_LOCAL(int16_t, i16Val);
9988 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9989 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9990
9991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9993
9994 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9995 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9996 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9997
9998 IEM_MC_PREPARE_FPU_USAGE();
9999 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10000 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10001 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10002 IEM_MC_ELSE()
10003 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10004 IEM_MC_ENDIF();
10005 IEM_MC_ADVANCE_RIP();
10006
10007 IEM_MC_END();
10008 return VINF_SUCCESS;
10009}
10010
10011
10012/** Opcode 0xdf !11/1. */
10013FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10014{
10015 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10016 IEM_MC_BEGIN(3, 2);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018 IEM_MC_LOCAL(uint16_t, u16Fsw);
10019 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10020 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10021 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10022
10023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10027
10028 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10029 IEM_MC_PREPARE_FPU_USAGE();
10030 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10031 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10032 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10033 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10034 IEM_MC_ELSE()
10035 IEM_MC_IF_FCW_IM()
10036 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10037 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10038 IEM_MC_ENDIF();
10039 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10040 IEM_MC_ENDIF();
10041 IEM_MC_ADVANCE_RIP();
10042
10043 IEM_MC_END();
10044 return VINF_SUCCESS;
10045}
10046
10047
10048/** Opcode 0xdf !11/2. */
10049FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10050{
10051 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10052 IEM_MC_BEGIN(3, 2);
10053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10054 IEM_MC_LOCAL(uint16_t, u16Fsw);
10055 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10056 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10057 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10058
10059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10063
10064 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10065 IEM_MC_PREPARE_FPU_USAGE();
10066 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10067 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10068 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10069 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10070 IEM_MC_ELSE()
10071 IEM_MC_IF_FCW_IM()
10072 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10073 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10074 IEM_MC_ENDIF();
10075 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10076 IEM_MC_ENDIF();
10077 IEM_MC_ADVANCE_RIP();
10078
10079 IEM_MC_END();
10080 return VINF_SUCCESS;
10081}
10082
10083
10084/** Opcode 0xdf !11/3. */
10085FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10086{
10087 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10088 IEM_MC_BEGIN(3, 2);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10090 IEM_MC_LOCAL(uint16_t, u16Fsw);
10091 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10092 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10094
10095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10099
10100 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10101 IEM_MC_PREPARE_FPU_USAGE();
10102 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10103 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10104 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10105 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10106 IEM_MC_ELSE()
10107 IEM_MC_IF_FCW_IM()
10108 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10109 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10110 IEM_MC_ENDIF();
10111 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10112 IEM_MC_ENDIF();
10113 IEM_MC_ADVANCE_RIP();
10114
10115 IEM_MC_END();
10116 return VINF_SUCCESS;
10117}
10118
10119
10120/** Opcode 0xdf !11/4. */
10121FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10122{
10123 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10124
10125 IEM_MC_BEGIN(2, 3);
10126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10127 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10128 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10129 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10130 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10131
10132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10134
10135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10137 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10138
10139 IEM_MC_PREPARE_FPU_USAGE();
10140 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10141 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10142 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10143 IEM_MC_ELSE()
10144 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10145 IEM_MC_ENDIF();
10146 IEM_MC_ADVANCE_RIP();
10147
10148 IEM_MC_END();
10149 return VINF_SUCCESS;
10150}
10151
10152
10153/** Opcode 0xdf !11/5. */
10154FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10155{
10156 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10157
10158 IEM_MC_BEGIN(2, 3);
10159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10160 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10161 IEM_MC_LOCAL(int64_t, i64Val);
10162 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10163 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10164
10165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10167
10168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10170 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10171
10172 IEM_MC_PREPARE_FPU_USAGE();
10173 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10174 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10175 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10176 IEM_MC_ELSE()
10177 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10178 IEM_MC_ENDIF();
10179 IEM_MC_ADVANCE_RIP();
10180
10181 IEM_MC_END();
10182 return VINF_SUCCESS;
10183}
10184
10185
10186/** Opcode 0xdf !11/6. */
10187FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10188{
10189 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10190 IEM_MC_BEGIN(3, 2);
10191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10192 IEM_MC_LOCAL(uint16_t, u16Fsw);
10193 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10194 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10196
10197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10201
10202 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10203 IEM_MC_PREPARE_FPU_USAGE();
10204 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10205 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10206 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10207 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10208 IEM_MC_ELSE()
10209 IEM_MC_IF_FCW_IM()
10210 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10211 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10212 IEM_MC_ENDIF();
10213 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10214 IEM_MC_ENDIF();
10215 IEM_MC_ADVANCE_RIP();
10216
10217 IEM_MC_END();
10218 return VINF_SUCCESS;
10219}
10220
10221
10222/** Opcode 0xdf !11/7. */
10223FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10224{
10225 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10226 IEM_MC_BEGIN(3, 2);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10228 IEM_MC_LOCAL(uint16_t, u16Fsw);
10229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10230 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10231 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10232
10233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10237
10238 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10239 IEM_MC_PREPARE_FPU_USAGE();
10240 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10241 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10242 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10243 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10244 IEM_MC_ELSE()
10245 IEM_MC_IF_FCW_IM()
10246 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10247 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10248 IEM_MC_ENDIF();
10249 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10250 IEM_MC_ENDIF();
10251 IEM_MC_ADVANCE_RIP();
10252
10253 IEM_MC_END();
10254 return VINF_SUCCESS;
10255}
10256
10257
10258/**
10259 * @opcode 0xdf
10260 */
10261FNIEMOP_DEF(iemOp_EscF7)
10262{
10263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10264 if (IEM_IS_MODRM_REG_MODE(bRm))
10265 {
10266 switch (IEM_GET_MODRM_REG_8(bRm))
10267 {
10268 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10269 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10270 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10271 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10272 case 4: if (bRm == 0xe0)
10273 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10274 return IEMOP_RAISE_INVALID_OPCODE();
10275 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10276 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10277 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10279 }
10280 }
10281 else
10282 {
10283 switch (IEM_GET_MODRM_REG_8(bRm))
10284 {
10285 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10286 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10287 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10288 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10289 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10290 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10291 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10292 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10294 }
10295 }
10296}
10297
10298
10299/**
10300 * @opcode 0xe0
10301 */
10302FNIEMOP_DEF(iemOp_loopne_Jb)
10303{
10304 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10305 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10308
10309 switch (pVCpu->iem.s.enmEffAddrMode)
10310 {
10311 case IEMMODE_16BIT:
10312 IEM_MC_BEGIN(0,0);
10313 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10314 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10315 IEM_MC_REL_JMP_S8(i8Imm);
10316 } IEM_MC_ELSE() {
10317 IEM_MC_ADVANCE_RIP();
10318 } IEM_MC_ENDIF();
10319 IEM_MC_END();
10320 return VINF_SUCCESS;
10321
10322 case IEMMODE_32BIT:
10323 IEM_MC_BEGIN(0,0);
10324 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10325 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10326 IEM_MC_REL_JMP_S8(i8Imm);
10327 } IEM_MC_ELSE() {
10328 IEM_MC_ADVANCE_RIP();
10329 } IEM_MC_ENDIF();
10330 IEM_MC_END();
10331 return VINF_SUCCESS;
10332
10333 case IEMMODE_64BIT:
10334 IEM_MC_BEGIN(0,0);
10335 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10336 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10337 IEM_MC_REL_JMP_S8(i8Imm);
10338 } IEM_MC_ELSE() {
10339 IEM_MC_ADVANCE_RIP();
10340 } IEM_MC_ENDIF();
10341 IEM_MC_END();
10342 return VINF_SUCCESS;
10343
10344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10345 }
10346}
10347
10348
10349/**
10350 * @opcode 0xe1
10351 */
10352FNIEMOP_DEF(iemOp_loope_Jb)
10353{
10354 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10355 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10358
10359 switch (pVCpu->iem.s.enmEffAddrMode)
10360 {
10361 case IEMMODE_16BIT:
10362 IEM_MC_BEGIN(0,0);
10363 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10364 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10365 IEM_MC_REL_JMP_S8(i8Imm);
10366 } IEM_MC_ELSE() {
10367 IEM_MC_ADVANCE_RIP();
10368 } IEM_MC_ENDIF();
10369 IEM_MC_END();
10370 return VINF_SUCCESS;
10371
10372 case IEMMODE_32BIT:
10373 IEM_MC_BEGIN(0,0);
10374 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10375 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10376 IEM_MC_REL_JMP_S8(i8Imm);
10377 } IEM_MC_ELSE() {
10378 IEM_MC_ADVANCE_RIP();
10379 } IEM_MC_ENDIF();
10380 IEM_MC_END();
10381 return VINF_SUCCESS;
10382
10383 case IEMMODE_64BIT:
10384 IEM_MC_BEGIN(0,0);
10385 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10386 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10387 IEM_MC_REL_JMP_S8(i8Imm);
10388 } IEM_MC_ELSE() {
10389 IEM_MC_ADVANCE_RIP();
10390 } IEM_MC_ENDIF();
10391 IEM_MC_END();
10392 return VINF_SUCCESS;
10393
10394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10395 }
10396}
10397
10398
10399/**
10400 * @opcode 0xe2
10401 */
10402FNIEMOP_DEF(iemOp_loop_Jb)
10403{
10404 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10405 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10408
10409 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10410 * using the 32-bit operand size override. How can that be restarted? See
10411 * weird pseudo code in intel manual. */
10412
10413 /** NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10414 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10415 * the loop causes guest crashes, but when logging it's nice to skip a few million
10416 * lines of useless output. */
10417#if defined(LOG_ENABLED)
10418 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10419 switch (pVCpu->iem.s.enmEffAddrMode)
10420 {
10421 case IEMMODE_16BIT:
10422 IEM_MC_BEGIN(0,0);
10423 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10424 IEM_MC_ADVANCE_RIP();
10425 IEM_MC_END();
10426 return VINF_SUCCESS;
10427
10428 case IEMMODE_32BIT:
10429 IEM_MC_BEGIN(0,0);
10430 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10431 IEM_MC_ADVANCE_RIP();
10432 IEM_MC_END();
10433 return VINF_SUCCESS;
10434
10435 case IEMMODE_64BIT:
10436 IEM_MC_BEGIN(0,0);
10437 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10438 IEM_MC_ADVANCE_RIP();
10439 IEM_MC_END();
10440 return VINF_SUCCESS;
10441
10442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10443 }
10444#endif
10445
10446 switch (pVCpu->iem.s.enmEffAddrMode)
10447 {
10448 case IEMMODE_16BIT:
10449 IEM_MC_BEGIN(0,0);
10450
10451 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10452 IEM_MC_IF_CX_IS_NZ() {
10453 IEM_MC_REL_JMP_S8(i8Imm);
10454 } IEM_MC_ELSE() {
10455 IEM_MC_ADVANCE_RIP();
10456 } IEM_MC_ENDIF();
10457 IEM_MC_END();
10458 return VINF_SUCCESS;
10459
10460 case IEMMODE_32BIT:
10461 IEM_MC_BEGIN(0,0);
10462 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10463 IEM_MC_IF_ECX_IS_NZ() {
10464 IEM_MC_REL_JMP_S8(i8Imm);
10465 } IEM_MC_ELSE() {
10466 IEM_MC_ADVANCE_RIP();
10467 } IEM_MC_ENDIF();
10468 IEM_MC_END();
10469 return VINF_SUCCESS;
10470
10471 case IEMMODE_64BIT:
10472 IEM_MC_BEGIN(0,0);
10473 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10474 IEM_MC_IF_RCX_IS_NZ() {
10475 IEM_MC_REL_JMP_S8(i8Imm);
10476 } IEM_MC_ELSE() {
10477 IEM_MC_ADVANCE_RIP();
10478 } IEM_MC_ENDIF();
10479 IEM_MC_END();
10480 return VINF_SUCCESS;
10481
10482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10483 }
10484}
10485
10486
10487/**
10488 * @opcode 0xe3
10489 */
10490FNIEMOP_DEF(iemOp_jecxz_Jb)
10491{
10492 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10493 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10496
10497 switch (pVCpu->iem.s.enmEffAddrMode)
10498 {
10499 case IEMMODE_16BIT:
10500 IEM_MC_BEGIN(0,0);
10501 IEM_MC_IF_CX_IS_NZ() {
10502 IEM_MC_ADVANCE_RIP();
10503 } IEM_MC_ELSE() {
10504 IEM_MC_REL_JMP_S8(i8Imm);
10505 } IEM_MC_ENDIF();
10506 IEM_MC_END();
10507 return VINF_SUCCESS;
10508
10509 case IEMMODE_32BIT:
10510 IEM_MC_BEGIN(0,0);
10511 IEM_MC_IF_ECX_IS_NZ() {
10512 IEM_MC_ADVANCE_RIP();
10513 } IEM_MC_ELSE() {
10514 IEM_MC_REL_JMP_S8(i8Imm);
10515 } IEM_MC_ENDIF();
10516 IEM_MC_END();
10517 return VINF_SUCCESS;
10518
10519 case IEMMODE_64BIT:
10520 IEM_MC_BEGIN(0,0);
10521 IEM_MC_IF_RCX_IS_NZ() {
10522 IEM_MC_ADVANCE_RIP();
10523 } IEM_MC_ELSE() {
10524 IEM_MC_REL_JMP_S8(i8Imm);
10525 } IEM_MC_ENDIF();
10526 IEM_MC_END();
10527 return VINF_SUCCESS;
10528
10529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10530 }
10531}
10532
10533
10534/** Opcode 0xe4 */
10535FNIEMOP_DEF(iemOp_in_AL_Ib)
10536{
10537 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10540 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10541}
10542
10543
10544/** Opcode 0xe5 */
10545FNIEMOP_DEF(iemOp_in_eAX_Ib)
10546{
10547 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10548 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10550 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10551}
10552
10553
10554/** Opcode 0xe6 */
10555FNIEMOP_DEF(iemOp_out_Ib_AL)
10556{
10557 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10558 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10560 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10561}
10562
10563
10564/** Opcode 0xe7 */
10565FNIEMOP_DEF(iemOp_out_Ib_eAX)
10566{
10567 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10568 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10570 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10571}
10572
10573
10574/**
10575 * @opcode 0xe8
10576 */
10577FNIEMOP_DEF(iemOp_call_Jv)
10578{
10579 IEMOP_MNEMONIC(call_Jv, "call Jv");
10580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10581 switch (pVCpu->iem.s.enmEffOpSize)
10582 {
10583 case IEMMODE_16BIT:
10584 {
10585 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10586 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10587 }
10588
10589 case IEMMODE_32BIT:
10590 {
10591 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10592 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10593 }
10594
10595 case IEMMODE_64BIT:
10596 {
10597 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10598 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10599 }
10600
10601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10602 }
10603}
10604
10605
10606/**
10607 * @opcode 0xe9
10608 */
10609FNIEMOP_DEF(iemOp_jmp_Jv)
10610{
10611 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10613 switch (pVCpu->iem.s.enmEffOpSize)
10614 {
10615 case IEMMODE_16BIT:
10616 {
10617 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10618 IEM_MC_BEGIN(0, 0);
10619 IEM_MC_REL_JMP_S16(i16Imm);
10620 IEM_MC_END();
10621 return VINF_SUCCESS;
10622 }
10623
10624 case IEMMODE_64BIT:
10625 case IEMMODE_32BIT:
10626 {
10627 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10628 IEM_MC_BEGIN(0, 0);
10629 IEM_MC_REL_JMP_S32(i32Imm);
10630 IEM_MC_END();
10631 return VINF_SUCCESS;
10632 }
10633
10634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10635 }
10636}
10637
10638
10639/**
10640 * @opcode 0xea
10641 */
10642FNIEMOP_DEF(iemOp_jmp_Ap)
10643{
10644 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10645 IEMOP_HLP_NO_64BIT();
10646
10647 /* Decode the far pointer address and pass it on to the far call C implementation. */
10648 uint32_t offSeg;
10649 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10650 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10651 else
10652 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10653 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10655 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10656}
10657
10658
10659/**
10660 * @opcode 0xeb
10661 */
10662FNIEMOP_DEF(iemOp_jmp_Jb)
10663{
10664 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10665 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10668
10669 IEM_MC_BEGIN(0, 0);
10670 IEM_MC_REL_JMP_S8(i8Imm);
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673}
10674
10675
10676/** Opcode 0xec */
10677FNIEMOP_DEF(iemOp_in_AL_DX)
10678{
10679 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10681 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10682}
10683
10684
10685/** Opcode 0xed */
10686FNIEMOP_DEF(iemOp_in_eAX_DX)
10687{
10688 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10690 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10691}
10692
10693
10694/** Opcode 0xee */
10695FNIEMOP_DEF(iemOp_out_DX_AL)
10696{
10697 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10699 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10700}
10701
10702
10703/** Opcode 0xef */
10704FNIEMOP_DEF(iemOp_out_DX_eAX)
10705{
10706 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10708 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10709}
10710
10711
10712/**
10713 * @opcode 0xf0
10714 */
10715FNIEMOP_DEF(iemOp_lock)
10716{
10717 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10718 if (!pVCpu->iem.s.fDisregardLock)
10719 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10720
10721 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10722 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10723}
10724
10725
10726/**
10727 * @opcode 0xf1
10728 */
10729FNIEMOP_DEF(iemOp_int1)
10730{
10731 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10732 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10733 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10734 * LOADALL memo. Needs some testing. */
10735 IEMOP_HLP_MIN_386();
10736 /** @todo testcase! */
10737 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10738}
10739
10740
10741/**
10742 * @opcode 0xf2
10743 */
10744FNIEMOP_DEF(iemOp_repne)
10745{
10746 /* This overrides any previous REPE prefix. */
10747 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10748 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10749 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10750
10751 /* For the 4 entry opcode tables, REPNZ overrides any previous
10752 REPZ and operand size prefixes. */
10753 pVCpu->iem.s.idxPrefix = 3;
10754
10755 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10756 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10757}
10758
10759
10760/**
10761 * @opcode 0xf3
10762 */
10763FNIEMOP_DEF(iemOp_repe)
10764{
10765 /* This overrides any previous REPNE prefix. */
10766 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10767 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10768 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10769
10770 /* For the 4 entry opcode tables, REPNZ overrides any previous
10771 REPNZ and operand size prefixes. */
10772 pVCpu->iem.s.idxPrefix = 2;
10773
10774 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10775 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10776}
10777
10778
10779/**
10780 * @opcode 0xf4
10781 */
10782FNIEMOP_DEF(iemOp_hlt)
10783{
10784 IEMOP_MNEMONIC(hlt, "hlt");
10785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10787}
10788
10789
10790/**
10791 * @opcode 0xf5
10792 */
10793FNIEMOP_DEF(iemOp_cmc)
10794{
10795 IEMOP_MNEMONIC(cmc, "cmc");
10796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10797 IEM_MC_BEGIN(0, 0);
10798 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10799 IEM_MC_ADVANCE_RIP();
10800 IEM_MC_END();
10801 return VINF_SUCCESS;
10802}
10803
10804
10805/**
10806 * Common implementation of 'inc/dec/not/neg Eb'.
10807 *
10808 * @param bRm The RM byte.
10809 * @param pImpl The instruction implementation.
10810 */
10811FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10812{
10813 if (IEM_IS_MODRM_REG_MODE(bRm))
10814 {
10815 /* register access */
10816 IEM_MC_BEGIN(2, 0);
10817 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10818 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10819 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10820 IEM_MC_REF_EFLAGS(pEFlags);
10821 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10822 IEM_MC_ADVANCE_RIP();
10823 IEM_MC_END();
10824 }
10825 else
10826 {
10827 /* memory access. */
10828 IEM_MC_BEGIN(2, 2);
10829 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10832
10833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10834 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10835 IEM_MC_FETCH_EFLAGS(EFlags);
10836 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10837 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10838 else
10839 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10840
10841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10842 IEM_MC_COMMIT_EFLAGS(EFlags);
10843 IEM_MC_ADVANCE_RIP();
10844 IEM_MC_END();
10845 }
10846 return VINF_SUCCESS;
10847}
10848
10849
10850/**
10851 * Common implementation of 'inc/dec/not/neg Ev'.
10852 *
10853 * @param bRm The RM byte.
10854 * @param pImpl The instruction implementation.
10855 */
10856FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10857{
10858 /* Registers are handled by a common worker. */
10859 if (IEM_IS_MODRM_REG_MODE(bRm))
10860 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10861
10862 /* Memory we do here. */
10863 switch (pVCpu->iem.s.enmEffOpSize)
10864 {
10865 case IEMMODE_16BIT:
10866 IEM_MC_BEGIN(2, 2);
10867 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10868 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10870
10871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10872 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10873 IEM_MC_FETCH_EFLAGS(EFlags);
10874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10875 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10876 else
10877 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10878
10879 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10880 IEM_MC_COMMIT_EFLAGS(EFlags);
10881 IEM_MC_ADVANCE_RIP();
10882 IEM_MC_END();
10883 return VINF_SUCCESS;
10884
10885 case IEMMODE_32BIT:
10886 IEM_MC_BEGIN(2, 2);
10887 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10888 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10890
10891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10892 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10893 IEM_MC_FETCH_EFLAGS(EFlags);
10894 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10895 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10896 else
10897 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10898
10899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10900 IEM_MC_COMMIT_EFLAGS(EFlags);
10901 IEM_MC_ADVANCE_RIP();
10902 IEM_MC_END();
10903 return VINF_SUCCESS;
10904
10905 case IEMMODE_64BIT:
10906 IEM_MC_BEGIN(2, 2);
10907 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10908 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10910
10911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10912 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10913 IEM_MC_FETCH_EFLAGS(EFlags);
10914 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10915 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10916 else
10917 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10918
10919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10920 IEM_MC_COMMIT_EFLAGS(EFlags);
10921 IEM_MC_ADVANCE_RIP();
10922 IEM_MC_END();
10923 return VINF_SUCCESS;
10924
10925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10926 }
10927}
10928
10929
10930/** Opcode 0xf6 /0. */
10931FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10932{
10933 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10934 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10935
10936 if (IEM_IS_MODRM_REG_MODE(bRm))
10937 {
10938 /* register access */
10939 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10941
10942 IEM_MC_BEGIN(3, 0);
10943 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10944 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10946 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10947 IEM_MC_REF_EFLAGS(pEFlags);
10948 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10949 IEM_MC_ADVANCE_RIP();
10950 IEM_MC_END();
10951 }
10952 else
10953 {
10954 /* memory access. */
10955 IEM_MC_BEGIN(3, 2);
10956 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10957 IEM_MC_ARG(uint8_t, u8Src, 1);
10958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10960
10961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10962 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10963 IEM_MC_ASSIGN(u8Src, u8Imm);
10964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10965 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10966 IEM_MC_FETCH_EFLAGS(EFlags);
10967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10968
10969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10970 IEM_MC_COMMIT_EFLAGS(EFlags);
10971 IEM_MC_ADVANCE_RIP();
10972 IEM_MC_END();
10973 }
10974 return VINF_SUCCESS;
10975}
10976
10977
10978/** Opcode 0xf7 /0. */
10979FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10980{
10981 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10982 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10983
10984 if (IEM_IS_MODRM_REG_MODE(bRm))
10985 {
10986 /* register access */
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 switch (pVCpu->iem.s.enmEffOpSize)
10989 {
10990 case IEMMODE_16BIT:
10991 {
10992 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10993 IEM_MC_BEGIN(3, 0);
10994 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10995 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10997 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10998 IEM_MC_REF_EFLAGS(pEFlags);
10999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
11000 IEM_MC_ADVANCE_RIP();
11001 IEM_MC_END();
11002 return VINF_SUCCESS;
11003 }
11004
11005 case IEMMODE_32BIT:
11006 {
11007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11008 IEM_MC_BEGIN(3, 0);
11009 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11010 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
11011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11012 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11013 IEM_MC_REF_EFLAGS(pEFlags);
11014 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11015 /* No clearing the high dword here - test doesn't write back the result. */
11016 IEM_MC_ADVANCE_RIP();
11017 IEM_MC_END();
11018 return VINF_SUCCESS;
11019 }
11020
11021 case IEMMODE_64BIT:
11022 {
11023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11024 IEM_MC_BEGIN(3, 0);
11025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11026 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
11027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11028 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11029 IEM_MC_REF_EFLAGS(pEFlags);
11030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11031 IEM_MC_ADVANCE_RIP();
11032 IEM_MC_END();
11033 return VINF_SUCCESS;
11034 }
11035
11036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11037 }
11038 }
11039 else
11040 {
11041 /* memory access. */
11042 switch (pVCpu->iem.s.enmEffOpSize)
11043 {
11044 case IEMMODE_16BIT:
11045 {
11046 IEM_MC_BEGIN(3, 2);
11047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11048 IEM_MC_ARG(uint16_t, u16Src, 1);
11049 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11051
11052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
11053 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11054 IEM_MC_ASSIGN(u16Src, u16Imm);
11055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11056 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11057 IEM_MC_FETCH_EFLAGS(EFlags);
11058 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
11059
11060 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
11061 IEM_MC_COMMIT_EFLAGS(EFlags);
11062 IEM_MC_ADVANCE_RIP();
11063 IEM_MC_END();
11064 return VINF_SUCCESS;
11065 }
11066
11067 case IEMMODE_32BIT:
11068 {
11069 IEM_MC_BEGIN(3, 2);
11070 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11071 IEM_MC_ARG(uint32_t, u32Src, 1);
11072 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11074
11075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11076 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11077 IEM_MC_ASSIGN(u32Src, u32Imm);
11078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11079 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11080 IEM_MC_FETCH_EFLAGS(EFlags);
11081 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11082
11083 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
11084 IEM_MC_COMMIT_EFLAGS(EFlags);
11085 IEM_MC_ADVANCE_RIP();
11086 IEM_MC_END();
11087 return VINF_SUCCESS;
11088 }
11089
11090 case IEMMODE_64BIT:
11091 {
11092 IEM_MC_BEGIN(3, 2);
11093 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11094 IEM_MC_ARG(uint64_t, u64Src, 1);
11095 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11097
11098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11099 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11100 IEM_MC_ASSIGN(u64Src, u64Imm);
11101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11102 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11103 IEM_MC_FETCH_EFLAGS(EFlags);
11104 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11105
11106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11107 IEM_MC_COMMIT_EFLAGS(EFlags);
11108 IEM_MC_ADVANCE_RIP();
11109 IEM_MC_END();
11110 return VINF_SUCCESS;
11111 }
11112
11113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11114 }
11115 }
11116}
11117
11118
11119/** Opcode 0xf6 /4, /5, /6 and /7. */
11120FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11121{
11122 if (IEM_IS_MODRM_REG_MODE(bRm))
11123 {
11124 /* register access */
11125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11126 IEM_MC_BEGIN(3, 1);
11127 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11128 IEM_MC_ARG(uint8_t, u8Value, 1);
11129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11130 IEM_MC_LOCAL(int32_t, rc);
11131
11132 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11133 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11134 IEM_MC_REF_EFLAGS(pEFlags);
11135 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11136 IEM_MC_IF_LOCAL_IS_Z(rc) {
11137 IEM_MC_ADVANCE_RIP();
11138 } IEM_MC_ELSE() {
11139 IEM_MC_RAISE_DIVIDE_ERROR();
11140 } IEM_MC_ENDIF();
11141
11142 IEM_MC_END();
11143 }
11144 else
11145 {
11146 /* memory access. */
11147 IEM_MC_BEGIN(3, 2);
11148 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11149 IEM_MC_ARG(uint8_t, u8Value, 1);
11150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11152 IEM_MC_LOCAL(int32_t, rc);
11153
11154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11156 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11157 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11158 IEM_MC_REF_EFLAGS(pEFlags);
11159 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11160 IEM_MC_IF_LOCAL_IS_Z(rc) {
11161 IEM_MC_ADVANCE_RIP();
11162 } IEM_MC_ELSE() {
11163 IEM_MC_RAISE_DIVIDE_ERROR();
11164 } IEM_MC_ENDIF();
11165
11166 IEM_MC_END();
11167 }
11168 return VINF_SUCCESS;
11169}
11170
11171
11172/** Opcode 0xf7 /4, /5, /6 and /7. */
11173FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11174{
11175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11176
11177 if (IEM_IS_MODRM_REG_MODE(bRm))
11178 {
11179 /* register access */
11180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11181 switch (pVCpu->iem.s.enmEffOpSize)
11182 {
11183 case IEMMODE_16BIT:
11184 {
11185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11186 IEM_MC_BEGIN(4, 1);
11187 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11188 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11189 IEM_MC_ARG(uint16_t, u16Value, 2);
11190 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11191 IEM_MC_LOCAL(int32_t, rc);
11192
11193 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11194 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11195 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11196 IEM_MC_REF_EFLAGS(pEFlags);
11197 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11198 IEM_MC_IF_LOCAL_IS_Z(rc) {
11199 IEM_MC_ADVANCE_RIP();
11200 } IEM_MC_ELSE() {
11201 IEM_MC_RAISE_DIVIDE_ERROR();
11202 } IEM_MC_ENDIF();
11203
11204 IEM_MC_END();
11205 return VINF_SUCCESS;
11206 }
11207
11208 case IEMMODE_32BIT:
11209 {
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211 IEM_MC_BEGIN(4, 1);
11212 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11213 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11214 IEM_MC_ARG(uint32_t, u32Value, 2);
11215 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11216 IEM_MC_LOCAL(int32_t, rc);
11217
11218 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11219 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11220 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11221 IEM_MC_REF_EFLAGS(pEFlags);
11222 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11223 IEM_MC_IF_LOCAL_IS_Z(rc) {
11224 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11225 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11226 IEM_MC_ADVANCE_RIP();
11227 } IEM_MC_ELSE() {
11228 IEM_MC_RAISE_DIVIDE_ERROR();
11229 } IEM_MC_ENDIF();
11230
11231 IEM_MC_END();
11232 return VINF_SUCCESS;
11233 }
11234
11235 case IEMMODE_64BIT:
11236 {
11237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11238 IEM_MC_BEGIN(4, 1);
11239 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11240 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11241 IEM_MC_ARG(uint64_t, u64Value, 2);
11242 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11243 IEM_MC_LOCAL(int32_t, rc);
11244
11245 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11246 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11247 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11248 IEM_MC_REF_EFLAGS(pEFlags);
11249 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11250 IEM_MC_IF_LOCAL_IS_Z(rc) {
11251 IEM_MC_ADVANCE_RIP();
11252 } IEM_MC_ELSE() {
11253 IEM_MC_RAISE_DIVIDE_ERROR();
11254 } IEM_MC_ENDIF();
11255
11256 IEM_MC_END();
11257 return VINF_SUCCESS;
11258 }
11259
11260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11261 }
11262 }
11263 else
11264 {
11265 /* memory access. */
11266 switch (pVCpu->iem.s.enmEffOpSize)
11267 {
11268 case IEMMODE_16BIT:
11269 {
11270 IEM_MC_BEGIN(4, 2);
11271 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11272 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11273 IEM_MC_ARG(uint16_t, u16Value, 2);
11274 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11276 IEM_MC_LOCAL(int32_t, rc);
11277
11278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11280 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11281 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11282 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11283 IEM_MC_REF_EFLAGS(pEFlags);
11284 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11285 IEM_MC_IF_LOCAL_IS_Z(rc) {
11286 IEM_MC_ADVANCE_RIP();
11287 } IEM_MC_ELSE() {
11288 IEM_MC_RAISE_DIVIDE_ERROR();
11289 } IEM_MC_ENDIF();
11290
11291 IEM_MC_END();
11292 return VINF_SUCCESS;
11293 }
11294
11295 case IEMMODE_32BIT:
11296 {
11297 IEM_MC_BEGIN(4, 2);
11298 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11299 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11300 IEM_MC_ARG(uint32_t, u32Value, 2);
11301 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11303 IEM_MC_LOCAL(int32_t, rc);
11304
11305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11307 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11308 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11309 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11310 IEM_MC_REF_EFLAGS(pEFlags);
11311 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11312 IEM_MC_IF_LOCAL_IS_Z(rc) {
11313 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11314 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11315 IEM_MC_ADVANCE_RIP();
11316 } IEM_MC_ELSE() {
11317 IEM_MC_RAISE_DIVIDE_ERROR();
11318 } IEM_MC_ENDIF();
11319
11320 IEM_MC_END();
11321 return VINF_SUCCESS;
11322 }
11323
11324 case IEMMODE_64BIT:
11325 {
11326 IEM_MC_BEGIN(4, 2);
11327 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11328 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11329 IEM_MC_ARG(uint64_t, u64Value, 2);
11330 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11332 IEM_MC_LOCAL(int32_t, rc);
11333
11334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11336 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11337 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11338 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11339 IEM_MC_REF_EFLAGS(pEFlags);
11340 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11341 IEM_MC_IF_LOCAL_IS_Z(rc) {
11342 IEM_MC_ADVANCE_RIP();
11343 } IEM_MC_ELSE() {
11344 IEM_MC_RAISE_DIVIDE_ERROR();
11345 } IEM_MC_ENDIF();
11346
11347 IEM_MC_END();
11348 return VINF_SUCCESS;
11349 }
11350
11351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11352 }
11353 }
11354}
11355
11356/**
11357 * @opcode 0xf6
11358 */
11359FNIEMOP_DEF(iemOp_Grp3_Eb)
11360{
11361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11362 switch (IEM_GET_MODRM_REG_8(bRm))
11363 {
11364 case 0:
11365 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11366 case 1:
11367/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11368 return IEMOP_RAISE_INVALID_OPCODE();
11369 case 2:
11370 IEMOP_MNEMONIC(not_Eb, "not Eb");
11371 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11372 case 3:
11373 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11374 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11375 case 4:
11376 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11378 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11379 case 5:
11380 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11382 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11383 case 6:
11384 IEMOP_MNEMONIC(div_Eb, "div Eb");
11385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11386 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11387 case 7:
11388 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11389 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11390 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11392 }
11393}
11394
11395
11396/**
11397 * @opcode 0xf7
11398 */
11399FNIEMOP_DEF(iemOp_Grp3_Ev)
11400{
11401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11402 switch (IEM_GET_MODRM_REG_8(bRm))
11403 {
11404 case 0:
11405 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11406 case 1:
11407/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11408 return IEMOP_RAISE_INVALID_OPCODE();
11409 case 2:
11410 IEMOP_MNEMONIC(not_Ev, "not Ev");
11411 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11412 case 3:
11413 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11414 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11415 case 4:
11416 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11418 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11419 case 5:
11420 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11421 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11422 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11423 case 6:
11424 IEMOP_MNEMONIC(div_Ev, "div Ev");
11425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11426 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11427 case 7:
11428 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11429 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11430 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11432 }
11433}
11434
11435
11436/**
11437 * @opcode 0xf8
11438 */
11439FNIEMOP_DEF(iemOp_clc)
11440{
11441 IEMOP_MNEMONIC(clc, "clc");
11442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11443 IEM_MC_BEGIN(0, 0);
11444 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11445 IEM_MC_ADVANCE_RIP();
11446 IEM_MC_END();
11447 return VINF_SUCCESS;
11448}
11449
11450
11451/**
11452 * @opcode 0xf9
11453 */
11454FNIEMOP_DEF(iemOp_stc)
11455{
11456 IEMOP_MNEMONIC(stc, "stc");
11457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11458 IEM_MC_BEGIN(0, 0);
11459 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11460 IEM_MC_ADVANCE_RIP();
11461 IEM_MC_END();
11462 return VINF_SUCCESS;
11463}
11464
11465
11466/**
11467 * @opcode 0xfa
11468 */
11469FNIEMOP_DEF(iemOp_cli)
11470{
11471 IEMOP_MNEMONIC(cli, "cli");
11472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11473 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11474}
11475
11476
11477FNIEMOP_DEF(iemOp_sti)
11478{
11479 IEMOP_MNEMONIC(sti, "sti");
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11482}
11483
11484
11485/**
11486 * @opcode 0xfc
11487 */
11488FNIEMOP_DEF(iemOp_cld)
11489{
11490 IEMOP_MNEMONIC(cld, "cld");
11491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11492 IEM_MC_BEGIN(0, 0);
11493 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11494 IEM_MC_ADVANCE_RIP();
11495 IEM_MC_END();
11496 return VINF_SUCCESS;
11497}
11498
11499
11500/**
11501 * @opcode 0xfd
11502 */
11503FNIEMOP_DEF(iemOp_std)
11504{
11505 IEMOP_MNEMONIC(std, "std");
11506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11507 IEM_MC_BEGIN(0, 0);
11508 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11509 IEM_MC_ADVANCE_RIP();
11510 IEM_MC_END();
11511 return VINF_SUCCESS;
11512}
11513
11514
11515/**
11516 * @opcode 0xfe
11517 */
11518FNIEMOP_DEF(iemOp_Grp4)
11519{
11520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11521 switch (IEM_GET_MODRM_REG_8(bRm))
11522 {
11523 case 0:
11524 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11525 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11526 case 1:
11527 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11528 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11529 default:
11530 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11531 return IEMOP_RAISE_INVALID_OPCODE();
11532 }
11533}
11534
11535
11536/**
11537 * Opcode 0xff /2.
11538 * @param bRm The RM byte.
11539 */
11540FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11541{
11542 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11543 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11544
11545 if (IEM_IS_MODRM_REG_MODE(bRm))
11546 {
11547 /* The new RIP is taken from a register. */
11548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11549 switch (pVCpu->iem.s.enmEffOpSize)
11550 {
11551 case IEMMODE_16BIT:
11552 IEM_MC_BEGIN(1, 0);
11553 IEM_MC_ARG(uint16_t, u16Target, 0);
11554 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11555 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11556 IEM_MC_END()
11557 return VINF_SUCCESS;
11558
11559 case IEMMODE_32BIT:
11560 IEM_MC_BEGIN(1, 0);
11561 IEM_MC_ARG(uint32_t, u32Target, 0);
11562 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11563 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11564 IEM_MC_END()
11565 return VINF_SUCCESS;
11566
11567 case IEMMODE_64BIT:
11568 IEM_MC_BEGIN(1, 0);
11569 IEM_MC_ARG(uint64_t, u64Target, 0);
11570 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11571 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11572 IEM_MC_END()
11573 return VINF_SUCCESS;
11574
11575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11576 }
11577 }
11578 else
11579 {
11580 /* The new RIP is taken from a register. */
11581 switch (pVCpu->iem.s.enmEffOpSize)
11582 {
11583 case IEMMODE_16BIT:
11584 IEM_MC_BEGIN(1, 1);
11585 IEM_MC_ARG(uint16_t, u16Target, 0);
11586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11589 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11590 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11591 IEM_MC_END()
11592 return VINF_SUCCESS;
11593
11594 case IEMMODE_32BIT:
11595 IEM_MC_BEGIN(1, 1);
11596 IEM_MC_ARG(uint32_t, u32Target, 0);
11597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11600 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11601 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11602 IEM_MC_END()
11603 return VINF_SUCCESS;
11604
11605 case IEMMODE_64BIT:
11606 IEM_MC_BEGIN(1, 1);
11607 IEM_MC_ARG(uint64_t, u64Target, 0);
11608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11611 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11612 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11613 IEM_MC_END()
11614 return VINF_SUCCESS;
11615
11616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11617 }
11618 }
11619}
11620
11621typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11622
11623FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11624{
11625 /* Registers? How?? */
11626 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11627 { /* likely */ }
11628 else
11629 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11630
11631 /* Far pointer loaded from memory. */
11632 switch (pVCpu->iem.s.enmEffOpSize)
11633 {
11634 case IEMMODE_16BIT:
11635 IEM_MC_BEGIN(3, 1);
11636 IEM_MC_ARG(uint16_t, u16Sel, 0);
11637 IEM_MC_ARG(uint16_t, offSeg, 1);
11638 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11642 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11643 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11644 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11645 IEM_MC_END();
11646 return VINF_SUCCESS;
11647
11648 case IEMMODE_64BIT:
11649 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11650 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11651 * and call far qword [rsp] encodings. */
11652 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11653 {
11654 IEM_MC_BEGIN(3, 1);
11655 IEM_MC_ARG(uint16_t, u16Sel, 0);
11656 IEM_MC_ARG(uint64_t, offSeg, 1);
11657 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11661 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11662 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11663 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11664 IEM_MC_END();
11665 return VINF_SUCCESS;
11666 }
11667 /* AMD falls thru. */
11668 RT_FALL_THRU();
11669
11670 case IEMMODE_32BIT:
11671 IEM_MC_BEGIN(3, 1);
11672 IEM_MC_ARG(uint16_t, u16Sel, 0);
11673 IEM_MC_ARG(uint32_t, offSeg, 1);
11674 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11678 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11679 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11680 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11681 IEM_MC_END();
11682 return VINF_SUCCESS;
11683
11684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11685 }
11686}
11687
11688
11689/**
11690 * Opcode 0xff /3.
11691 * @param bRm The RM byte.
11692 */
11693FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11694{
11695 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11696 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11697}
11698
11699
11700/**
11701 * Opcode 0xff /4.
11702 * @param bRm The RM byte.
11703 */
11704FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11705{
11706 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11708
11709 if (IEM_IS_MODRM_REG_MODE(bRm))
11710 {
11711 /* The new RIP is taken from a register. */
11712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11713 switch (pVCpu->iem.s.enmEffOpSize)
11714 {
11715 case IEMMODE_16BIT:
11716 IEM_MC_BEGIN(0, 1);
11717 IEM_MC_LOCAL(uint16_t, u16Target);
11718 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11719 IEM_MC_SET_RIP_U16(u16Target);
11720 IEM_MC_END()
11721 return VINF_SUCCESS;
11722
11723 case IEMMODE_32BIT:
11724 IEM_MC_BEGIN(0, 1);
11725 IEM_MC_LOCAL(uint32_t, u32Target);
11726 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11727 IEM_MC_SET_RIP_U32(u32Target);
11728 IEM_MC_END()
11729 return VINF_SUCCESS;
11730
11731 case IEMMODE_64BIT:
11732 IEM_MC_BEGIN(0, 1);
11733 IEM_MC_LOCAL(uint64_t, u64Target);
11734 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11735 IEM_MC_SET_RIP_U64(u64Target);
11736 IEM_MC_END()
11737 return VINF_SUCCESS;
11738
11739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11740 }
11741 }
11742 else
11743 {
11744 /* The new RIP is taken from a memory location. */
11745 switch (pVCpu->iem.s.enmEffOpSize)
11746 {
11747 case IEMMODE_16BIT:
11748 IEM_MC_BEGIN(0, 2);
11749 IEM_MC_LOCAL(uint16_t, u16Target);
11750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11753 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11754 IEM_MC_SET_RIP_U16(u16Target);
11755 IEM_MC_END()
11756 return VINF_SUCCESS;
11757
11758 case IEMMODE_32BIT:
11759 IEM_MC_BEGIN(0, 2);
11760 IEM_MC_LOCAL(uint32_t, u32Target);
11761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11764 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11765 IEM_MC_SET_RIP_U32(u32Target);
11766 IEM_MC_END()
11767 return VINF_SUCCESS;
11768
11769 case IEMMODE_64BIT:
11770 IEM_MC_BEGIN(0, 2);
11771 IEM_MC_LOCAL(uint64_t, u64Target);
11772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11775 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11776 IEM_MC_SET_RIP_U64(u64Target);
11777 IEM_MC_END()
11778 return VINF_SUCCESS;
11779
11780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11781 }
11782 }
11783}
11784
11785
11786/**
11787 * Opcode 0xff /5.
11788 * @param bRm The RM byte.
11789 */
11790FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11791{
11792 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11793 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11794}
11795
11796
11797/**
11798 * Opcode 0xff /6.
11799 * @param bRm The RM byte.
11800 */
11801FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11802{
11803 IEMOP_MNEMONIC(push_Ev, "push Ev");
11804
11805 /* Registers are handled by a common worker. */
11806 if (IEM_IS_MODRM_REG_MODE(bRm))
11807 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11808
11809 /* Memory we do here. */
11810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11811 switch (pVCpu->iem.s.enmEffOpSize)
11812 {
11813 case IEMMODE_16BIT:
11814 IEM_MC_BEGIN(0, 2);
11815 IEM_MC_LOCAL(uint16_t, u16Src);
11816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11819 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11820 IEM_MC_PUSH_U16(u16Src);
11821 IEM_MC_ADVANCE_RIP();
11822 IEM_MC_END();
11823 return VINF_SUCCESS;
11824
11825 case IEMMODE_32BIT:
11826 IEM_MC_BEGIN(0, 2);
11827 IEM_MC_LOCAL(uint32_t, u32Src);
11828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11831 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11832 IEM_MC_PUSH_U32(u32Src);
11833 IEM_MC_ADVANCE_RIP();
11834 IEM_MC_END();
11835 return VINF_SUCCESS;
11836
11837 case IEMMODE_64BIT:
11838 IEM_MC_BEGIN(0, 2);
11839 IEM_MC_LOCAL(uint64_t, u64Src);
11840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11844 IEM_MC_PUSH_U64(u64Src);
11845 IEM_MC_ADVANCE_RIP();
11846 IEM_MC_END();
11847 return VINF_SUCCESS;
11848
11849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11850 }
11851}
11852
11853
11854/**
11855 * @opcode 0xff
11856 */
11857FNIEMOP_DEF(iemOp_Grp5)
11858{
11859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11860 switch (IEM_GET_MODRM_REG_8(bRm))
11861 {
11862 case 0:
11863 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11864 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11865 case 1:
11866 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11867 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11868 case 2:
11869 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11870 case 3:
11871 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11872 case 4:
11873 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11874 case 5:
11875 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11876 case 6:
11877 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11878 case 7:
11879 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11880 return IEMOP_RAISE_INVALID_OPCODE();
11881 }
11882 AssertFailedReturn(VERR_IEM_IPE_3);
11883}
11884
11885
11886
11887const PFNIEMOP g_apfnOneByteMap[256] =
11888{
11889 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11890 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11891 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11892 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11893 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11894 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11895 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11896 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11897 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11898 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11899 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11900 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11901 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11902 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11903 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11904 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11905 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11906 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11907 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11908 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11909 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11910 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11911 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11912 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11913 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11914 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11915 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11916 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11917 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11918 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11919 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11920 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11921 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11922 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11923 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11924 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11925 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11926 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11927 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11928 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11929 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11930 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11931 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11932 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11933 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11934 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11935 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11936 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11937 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11938 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11939 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11940 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11941 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11942 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11943 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11944 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11945 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11946 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11947 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11948 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11949 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11950 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11951 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11952 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11953};
11954
11955
11956/** @} */
11957
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use