- Timestamp:
- Jul 17, 2023 4:38:48 PM (15 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
-
VMMAll/IEMAllAImpl.asm (modified) (4 diffs)
-
VMMAll/IEMAllAImplC.cpp (modified) (1 diff)
-
VMMAll/IEMAllInstructionsVexMap3.cpp.h (modified) (2 diffs)
-
include/IEMInternal.h (modified) (1 diff)
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r100602 r100607 5238 5238 ; 5239 5239 ; @param 1 The instruction name. 5240 ; @param 2 Whether the instruction has a 256-bit variant (1) or not (0). 5240 ; @param 2 Whether the instruction has a 128-bit variant (1) or not (0). 5241 ; @param 3 Whether the instruction has a 256-bit variant (1) or not (0). 5241 5242 ; 5242 5243 ; @param A0 Pointer to the destination media register size operand (output). … … 5245 5246 ; @param A3 The 8-bit immediate 5246 5247 ; 5247 %macro IEMIMPL_MEDIA_AVX_INSN_IMM8_6 2 5248 %macro IEMIMPL_MEDIA_AVX_INSN_IMM8_6 3 5249 %if %2 == 1 5248 5250 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128, 16 5249 5251 PROLOGUE_4_ARGS … … 5276 5278 .immEnd: IEMCHECK_256_JUMP_ARRAY_SIZE (.immEnd - .imm0), 0x800 5277 5279 ENDPROC iemAImpl_ %+ %1 %+ _u128 5278 5279 %if %2 == 1 5280 %endif 5281 5282 %if %3 == 1 5280 5283 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u256, 16 5281 5284 PROLOGUE_4_ARGS … … 5311 5314 %endmacro 5312 5315 5313 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vblendps, 1 5314 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vblendpd, 1 5315 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vpblendw, 1 5316 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vpalignr, 1 5317 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vpclmulqdq, 0 5316 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vblendps, 1, 1 5317 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vblendpd, 1, 1 5318 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vpblendw, 1, 1 5319 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vpalignr, 1, 1 5320 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vpclmulqdq, 1, 0 5321 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vperm2i128, 0, 1 5322 IEMIMPL_MEDIA_AVX_INSN_IMM8_6 vperm2f128, 0, 1 5318 5323 5319 5324 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r100602 r100607 18447 18447 18448 18448 /** 18449 * VPERM2I128 18450 */ 18451 IEM_DECL_IMPL_DEF(void, iemAImpl_vperm2i128_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bImm)) 18452 { 18453 if (bImm & RT_BIT(3)) 18454 { 18455 puDst->au64[0] = 0; 18456 puDst->au64[1] = 0; 18457 } 18458 else 18459 { 18460 switch (bImm & 0x3) 18461 { 18462 case 0: 18463 puDst->au64[0] = puSrc1->au64[0]; 18464 puDst->au64[1] = puSrc1->au64[1]; 18465 break; 18466 case 1: 18467 puDst->au64[0] = puSrc1->au64[2]; 18468 puDst->au64[1] = puSrc1->au64[3]; 18469 break; 18470 case 2: 18471 puDst->au64[0] = puSrc2->au64[0]; 18472 puDst->au64[1] = puSrc2->au64[1]; 18473 break; 18474 case 3: 18475 puDst->au64[0] = puSrc2->au64[2]; 18476 puDst->au64[1] = puSrc2->au64[3]; 18477 break; 18478 } 18479 } 18480 18481 if (bImm & RT_BIT(7)) 18482 { 18483 puDst->au64[2] = 0; 18484 puDst->au64[3] = 0; 18485 } 18486 else 18487 { 18488 switch ((bImm >> 4) & 0x3) 18489 { 18490 case 0: 18491 puDst->au64[2] = puSrc1->au64[0]; 18492 puDst->au64[3] = puSrc1->au64[1]; 18493 break; 18494 case 1: 18495 puDst->au64[2] = puSrc1->au64[2]; 18496 puDst->au64[3] = puSrc1->au64[3]; 18497 break; 18498 case 2: 18499 puDst->au64[2] = puSrc2->au64[0]; 18500 puDst->au64[3] = puSrc2->au64[1]; 18501 break; 18502 case 3: 18503 puDst->au64[2] = puSrc2->au64[2]; 18504 puDst->au64[3] = puSrc2->au64[3]; 18505 break; 18506 } 18507 } 18508 } 18509 18510 18511 /** 18512 * VPERM2F128 18513 */ 18514 IEM_DECL_IMPL_DEF(void, iemAImpl_vperm2f128_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bImm)) 18515 { 18516 iemAImpl_vperm2i128_u256_fallback(puDst, puSrc1, puSrc2, bImm); 18517 } 18518 18519 18520 /** 18449 18521 * DPPS 18450 18522 */ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap3.cpp.h
r100579 r100607 279 279 /** Opcode VEX.66.0F3A 0x05. */ 280 280 FNIEMOP_STUB(iemOp_vpermilpd_Vx_Wx_Ib); 281 282 281 283 /** Opcode VEX.66.0F3A 0x06 (vex only) */ 282 FNIEMOP_STUB(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib); 284 FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib) 285 { 286 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2F128, vperm2f128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */ 287 288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 289 if (IEM_IS_MODRM_REG_MODE(bRm)) 290 { 291 /* 292 * Register, register. 293 */ 294 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 295 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2); 296 IEM_MC_BEGIN(4, 3); 297 IEM_MC_LOCAL(RTUINT256U, uDst); 298 IEM_MC_LOCAL(RTUINT256U, uSrc1); 299 IEM_MC_LOCAL(RTUINT256U, uSrc2); 300 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 301 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 302 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 303 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3); 304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 305 IEM_MC_PREPARE_AVX_USAGE(); 306 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 307 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 308 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback), 309 puDst, puSrc1, puSrc2, bImmArg); 310 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 311 IEM_MC_ADVANCE_RIP_AND_FINISH(); 312 IEM_MC_END(); 313 } 314 else 315 { 316 /* 317 * Register, memory. 318 */ 319 IEM_MC_BEGIN(4, 2); 320 IEM_MC_LOCAL(RTUINT256U, uDst); 321 IEM_MC_LOCAL(RTUINT256U, uSrc1); 322 IEM_MC_LOCAL(RTUINT256U, uSrc2); 323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 324 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 325 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 326 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 327 328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 329 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 330 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3); 331 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2); 332 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 333 IEM_MC_PREPARE_AVX_USAGE(); 334 335 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 336 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 337 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback), 338 puDst, puSrc1, puSrc2, bImmArg); 339 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 340 341 IEM_MC_ADVANCE_RIP_AND_FINISH(); 342 IEM_MC_END(); 343 } 344 } 345 346 283 347 /* Opcode VEX.66.0F3A 0x07 - invalid */ 284 348 /** Opcode VEX.66.0F3A 0x08. */ … … 574 638 575 639 /* Opcode VEX.66.0F3A 0x45 - invalid */ 640 641 576 642 /** Opcode VEX.66.0F3A 0x46 (vex only) */ 577 FNIEMOP_STUB(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib); 643 FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib) 644 { 645 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2I128, vperm2i128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */ 646 647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 648 if (IEM_IS_MODRM_REG_MODE(bRm)) 649 { 650 /* 651 * Register, register. 652 */ 653 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 654 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2); 655 IEM_MC_BEGIN(4, 3); 656 IEM_MC_LOCAL(RTUINT256U, uDst); 657 IEM_MC_LOCAL(RTUINT256U, uSrc1); 658 IEM_MC_LOCAL(RTUINT256U, uSrc2); 659 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 660 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 661 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 662 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3); 663 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 664 IEM_MC_PREPARE_AVX_USAGE(); 665 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 666 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 667 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback), 668 puDst, puSrc1, puSrc2, bImmArg); 669 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 670 IEM_MC_ADVANCE_RIP_AND_FINISH(); 671 IEM_MC_END(); 672 } 673 else 674 { 675 /* 676 * Register, memory. 677 */ 678 IEM_MC_BEGIN(4, 2); 679 IEM_MC_LOCAL(RTUINT256U, uDst); 680 IEM_MC_LOCAL(RTUINT256U, uSrc1); 681 IEM_MC_LOCAL(RTUINT256U, uSrc2); 682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 683 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 684 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 685 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 686 687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 688 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 689 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3); 690 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2); 691 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 692 IEM_MC_PREPARE_AVX_USAGE(); 693 694 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 695 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 696 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback), 697 puDst, puSrc1, puSrc2, bImmArg); 698 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 699 700 IEM_MC_ADVANCE_RIP_AND_FINISH(); 701 IEM_MC_END(); 702 } 703 } 704 705 578 706 /* Opcode VEX.66.0F3A 0x47 - invalid */ 579 707 /** Opcode VEX.66.0F3A 0x48 (AMD tables only). */ -
trunk/src/VBox/VMM/include/IEMInternal.h
r100602 r100607 2672 2672 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendps_u256, iemAImpl_vblendps_u256_fallback; 2673 2673 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendpd_u256, iemAImpl_vblendpd_u256_fallback; 2674 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback; 2675 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback; 2674 2676 2675 2677 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback;
Note:
See TracChangeset
for help on using the changeset viewer.

