Changeset 60676 in vbox
- Timestamp:
- Apr 24, 2016 11:04:57 AM (8 years ago)
- Location:
- trunk/src/VBox/ValidationKit/bootsectors
- Files:
-
- 5 added
- 26 edited
- 1 copied
-
bs3-cpu-basic-2-asm.asm (modified) (2 diffs)
-
bs3-cpu-basic-2-template.c (modified) (20 diffs)
-
bs3-cpu-basic-2-template.mac (modified) (2 diffs)
-
bs3kit/Makefile.kmk (modified) (4 diffs)
-
bs3kit/VBoxBs3ObjConverter.cpp (modified) (43 diffs)
-
bs3kit/bs3-bootsector.asm (modified) (1 diff)
-
bs3kit/bs3-c16-SwitchFromV86To16BitAndCallC.asm (added)
-
bs3kit/bs3-cmn-MemAlloc.c (modified) (1 diff)
-
bs3kit/bs3-cmn-MemCmp.asm (copied) (copied from trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-MemSet.asm ) (2 diffs)
-
bs3kit/bs3-cmn-MemGuardedTestPage.c (added)
-
bs3kit/bs3-cmn-PagingProtect.c (modified) (11 diffs)
-
bs3kit/bs3-cmn-RegCtxSetGrpSegFromCurPtr.c (added)
-
bs3kit/bs3-cmn-RegCtxSetGrpSegFromFlat.c (added)
-
bs3kit/bs3-cmn-RegCtxSetRipCsFromLnkPtr.c (added)
-
bs3kit/bs3-cmn-SelRealModeDataToFlat.asm (modified) (1 diff)
-
bs3kit/bs3-mode-EnteredMode.asm (modified) (1 diff)
-
bs3kit/bs3-mode-TestDoModesHlp.asm (modified) (3 diffs)
-
bs3kit/bs3-wc16-I8DQ.asm (modified) (3 diffs)
-
bs3kit/bs3-wc16-I8DR.asm (modified) (2 diffs)
-
bs3kit/bs3-wc16-I8RS.asm (modified) (1 diff)
-
bs3kit/bs3-wc16-U8DQ.asm (modified) (3 diffs)
-
bs3kit/bs3-wc16-U8DR.asm (modified) (3 diffs)
-
bs3kit/bs3-wc16-U8LS.asm (modified) (2 diffs)
-
bs3kit/bs3-wc16-U8RS.asm (modified) (1 diff)
-
bs3kit/bs3kit-autostubs.kmk (modified) (4 diffs)
-
bs3kit/bs3kit-mangling-code-define.h (modified) (4 diffs)
-
bs3kit/bs3kit-mangling-code-undef.h (modified) (4 diffs)
-
bs3kit/bs3kit-mangling-data.h (modified) (2 diffs)
-
bs3kit/bs3kit-template-footer.mac (modified) (1 diff)
-
bs3kit/bs3kit-template-header.mac (modified) (4 diffs)
-
bs3kit/bs3kit.h (modified) (12 diffs)
-
bs3kit/bs3kit.mac (modified) (9 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-asm.asm
r60609 r60676 37 37 BS3_BEGIN_DATA16 38 38 BS3_GLOBAL_DATA g_bs3CpuBasic2_ud2_FlatAddr, 4 39 dd bs3CpuBasic2_ud2 wrt FLAT39 dd _bs3CpuBasic2_ud2 wrt FLAT 40 40 41 41 … … 46 46 BS3_BEGIN_TEXT16 47 47 48 BS3_PROC_BEGIN bs3CpuBasic2_ud2, BS3_PB_WITH_US_ALIAS48 BS3_PROC_BEGIN _bs3CpuBasic2_ud2 49 49 .again: 50 50 ud2 51 51 jmp .again 52 BS3_PROC_END bs3CpuBasic2_ud252 BS3_PROC_END _bs3CpuBasic2_ud2 53 53 54 54 55 BS3_PROC_BEGIN bs3CpuBasic2_Int80, BS3_PB_WITH_US_ALIAS55 BS3_PROC_BEGIN _bs3CpuBasic2_Int80 56 56 int 80h 57 57 .again: ud2 58 58 jmp .again 59 BS3_PROC_END bs3CpuBasic2_Int8059 BS3_PROC_END _bs3CpuBasic2_Int80 60 60 61 61 62 BS3_PROC_BEGIN bs3CpuBasic2_Int81, BS3_PB_WITH_US_ALIAS62 BS3_PROC_BEGIN _bs3CpuBasic2_Int81 63 63 int 81h 64 64 .again: ud2 65 65 jmp .again 66 BS3_PROC_END bs3CpuBasic2_Int8166 BS3_PROC_END _bs3CpuBasic2_Int81 67 67 68 68 69 BS3_PROC_BEGIN bs3CpuBasic2_Int82, BS3_PB_WITH_US_ALIAS69 BS3_PROC_BEGIN _bs3CpuBasic2_Int82 70 70 int 82h 71 71 .again: ud2 72 72 jmp .again 73 BS3_PROC_END bs3CpuBasic2_Int8273 BS3_PROC_END _bs3CpuBasic2_Int82 74 74 75 75 76 BS3_PROC_BEGIN bs3CpuBasic2_Int83, BS3_PB_WITH_US_ALIAS76 BS3_PROC_BEGIN _bs3CpuBasic2_Int83 77 77 int 83h 78 78 .again: ud2 79 79 jmp .again 80 BS3_PROC_END bs3CpuBasic2_Int8380 BS3_PROC_END _bs3CpuBasic2_Int83 81 81 82 82 -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-template.c
r60669 r60676 82 82 uint8_t u1DescType; 83 83 } BS3CB2INVLDESCTYPE; 84 85 typedef struct BS3CB2SIDTSGDT 86 { 87 FPFNBS3FAR fpfnWorker; 88 uint8_t cbInstr; 89 bool fSs; 90 uint8_t bMode; 91 } BS3CB2SIDTSGDT; 84 92 #endif 85 93 … … 89 97 *********************************************************************************************************************************/ 90 98 #ifdef BS3_INSTANTIATING_CMN 91 extern BS3_DECL(void) bs3CpuBasic2_Int80(void); 92 extern BS3_DECL(void) bs3CpuBasic2_Int81(void); 93 extern BS3_DECL(void) bs3CpuBasic2_Int82(void); 94 extern BS3_DECL(void) bs3CpuBasic2_Int83(void); 95 extern BS3_DECL(void) bs3CpuBasic2_ud2(void); 96 #define bs3CpuBasic2_sidt_bx_ud2 BS3_CMN_NM(bs3CpuBasic2_sidt_bx_ud2) 97 extern BS3_DECL(void) bs3CpuBasic2_sidt_bx_ud2(void); 98 #define bs3CpuBasic2_lidt_bx_ud2 BS3_CMN_NM(bs3CpuBasic2_lidt_bx_ud2) 99 extern BS3_DECL(void) bs3CpuBasic2_lidt_bx_ud2(void); 100 #define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr) 101 extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr; 99 extern FNBS3FAR bs3CpuBasic2_Int80; 100 extern FNBS3FAR bs3CpuBasic2_Int81; 101 extern FNBS3FAR bs3CpuBasic2_Int82; 102 extern FNBS3FAR bs3CpuBasic2_Int83; 103 extern FNBS3FAR bs3CpuBasic2_ud2; 104 # define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr) 105 extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr; 106 107 extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16; 108 extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32; 109 extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64; 110 extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16; 111 extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32; 112 extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64; 113 extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16; 114 extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32; 115 extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64; 116 extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16; 117 extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32; 118 extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64; 119 120 extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16; 121 extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32; 122 extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64; 123 extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16; 124 extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32; 125 extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64; 126 extern FNBS3FAR bs3CpuBasic2_lidt_bx_ud2_c16; 127 extern FNBS3FAR bs3CpuBasic2_lidt_bx_ud2_c32; 128 extern FNBS3FAR bs3CpuBasic2_lidt_bx_ud2_c64; 102 129 #endif 103 130 … … 113 140 # define g_f16BitSys BS3_CMN_NM(g_f16BitSys) 114 141 static bool g_f16BitSys = 1; 142 143 144 static BS3CB2SIDTSGDT const g_aSidtWorkers[] = 145 { 146 { bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 }, 147 // { bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 }, 148 { bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 }, 149 // { bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 }, 150 { bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32 }, 151 // { bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32 }, 152 { bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32 }, 153 // { bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32 }, 154 { bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64 }, 155 { bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64 }, 156 { bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64 }, 157 { bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64 }, 158 }; 159 160 #if 0 161 static BS3CB2SIDTSGDT const g_aSgdtNormal[3] = 162 { 163 { bs3CpuBasic2_sgdt_bx_ud2_c16, bs3CpuBasic2_sgdt_ss_bx_ud2_c16, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 }, 164 { bs3CpuBasic2_sgdt_bx_ud2_c32, bs3CpuBasic2_sgdt_ss_bx_ud2_c32, BS3_MODE_CODE_32 }, 165 { bs3CpuBasic2_sgdt_bx_ud2_c64, bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, BS3_MODE_CODE_64 }, 166 }; 167 168 static BS3CB2SIDTSGDT const g_aSgdtOpSize[3] = 169 { 170 { bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, bs3CpuBasic2_sgdt_ss_bx_ud2_c16, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86 }, 171 { bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, bs3CpuBasic2_sgdt_ss_bx_ud2_c32, BS3_MODE_CODE_32 }, 172 { bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, BS3_MODE_CODE_64 }, 173 }; 174 #endif 175 115 176 116 177 /** Table containing invalid CS selector types. */ … … 1305 1366 1306 1367 1307 # define bs3CpuBasic2_sidt_Common BS3_CMN_NM(bs3CpuBasic2_sidt_Common) 1308 BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_Common(void) 1368 /** 1369 * Executes one round of SIDT and SGDT tests using one assembly worker. 1370 * 1371 * This is written with driving everything from the 16-bit or 32-bit worker in 1372 * mind, i.e. not assuming the test bitcount is the same as the current. 1373 */ 1374 # define bs3CpuBasic2_sidt_sgdt_One BS3_CMN_NM(bs3CpuBasic2_sidt_sgdt_One) 1375 BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, 1376 uint8_t const *pabExpected) 1309 1377 { 1310 1378 BS3TRAPFRAME TrapCtx; … … 1315 1383 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */ 1316 1384 uint8_t BS3_FAR *pbBuf = abBuf; 1317 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE( g_bTestMode) ? 2+8 : 2+4;1385 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4; 1318 1386 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286; 1319 1387 uint8_t bFiller; 1320 unsignedoff;1321 1322 g_usBs3TestStep = 0;1388 int off; 1389 uint8_t BS3_FAR *pbTest; 1390 Bs3TestPrintf("bs3CpuBasic2_sidt_sgdt_One: %p bTestMode=%#x\n", pWorker, bTestMode); 1323 1391 1324 1392 /* make sure they're allocated */ … … 1331 1399 /* Create a context, give this routine some more stack space, point the context 1332 1400 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */ 1333 Bs3RegCtxSaveEx(&Ctx, g_bTestMode, 256 /*cbExtraStack*/); 1334 Ctx.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_sidt_bx_ud2); 1335 # if TMPL_BITS == 32 1336 g_uBs3TrapEipHint = Ctx.rip.u32; 1337 # endif 1338 Ctx.rbx.u = BS3_FP_OFF(pbBuf); 1339 # if TMPL_BITS == 16 1340 Ctx.ds = BS3_FP_SEG(pbBuf); 1341 # endif 1401 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/); 1402 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, abBuf); 1403 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker); 1404 if (BS3_MODE_IS_16BIT_SYS(bTestMode)) 1405 g_uBs3TrapEipHint = Ctx.rip.u32; 1342 1406 1343 1407 /* For successful SIDT attempts, we'll stop at the UD2. */ 1344 1408 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx)); 1345 CtxUdExpected.rip.u += 3;1409 CtxUdExpected.rip.u += pWorker->cbInstr; 1346 1410 1347 1411 /* … … 1360 1424 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr)) 1361 1425 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf); 1426 if (Bs3MemCmp(abBuf, pabExpected, cbIdtr) != 0) 1427 Bs3TestFailedF("Mismatch (#1): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, abBuf); 1362 1428 g_usBs3TestStep++; 1363 1429 … … 1378 1444 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL) 1379 1445 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf); 1446 if (Bs3MemCmp(abBuf, pabExpected, cbIdtr) != 0) 1447 Bs3TestFailedF("Mismatch (#2): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, abBuf); 1380 1448 g_usBs3TestStep++; 1381 1449 … … 1386 1454 { 1387 1455 pbBuf = &abBuf[off]; 1388 CtxUdExpected.rbx.u = Ctx.rbx.u = BS3_FP_OFF(pbBuf); 1456 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, &abBuf[off]); 1457 CtxUdExpected.rbx.u = Ctx.rbx.u; 1389 1458 1390 1459 /* First with zero buffer. */ … … 1400 1469 if (f286 && abBuf[off + cbIdtr - 1] != 0xff) 1401 1470 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]); 1471 if (Bs3MemCmp(&abBuf[off], pabExpected, cbIdtr) != 0) 1472 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &abBuf[off]); 1402 1473 g_usBs3TestStep++; 1403 1474 … … 1417 1488 if (f286 && abBuf[off + cbIdtr - 1] != 0xff) 1418 1489 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]); 1490 if (Bs3MemCmp(&abBuf[off], pabExpected, cbIdtr) != 0) 1491 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &abBuf[off]); 1419 1492 g_usBs3TestStep++; 1420 1421 1493 } 1422 1494 pbBuf = abBuf; 1423 CtxUdExpected.rbx.u = Ctx.rbx.u = BS3_FP_OFF(pbBuf); 1495 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, abBuf); 1496 CtxUdExpected.rbx.u = Ctx.rbx.u; 1424 1497 1425 1498 /* … … 1427 1500 * We use BS3_SEL_TEST_PAGE_00 for this 1428 1501 */ 1429 if ( !BS3_MODE_IS_RM_OR_V86( g_bTestMode)1430 && !BS3_MODE_IS_64BIT_CODE( g_bTestMode))1502 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode) 1503 && !BS3_MODE_IS_64BIT_CODE(bTestMode)) 1431 1504 { 1432 1505 uint16_t cbLimit; 1433 uint16_t const uSavedDs = Ctx.ds; 1434 uint32_t uFlatBuf = Bs3SelPtrToFlat(pbBuf); 1506 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf); 1435 1507 Bs3GdteTestPage00 = Bs3Gdte_DATA16; 1436 1508 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf; … … 1455 1527 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n", 1456 1528 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf); 1529 if (Bs3MemCmp(&abBuf[off], pabExpected, cbIdtr) != 0) 1530 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &abBuf[off]); 1457 1531 if (f286 && abBuf[off + cbIdtr - 1] != 0xff) 1458 1532 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]); … … 1466 1540 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n", 1467 1541 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf); 1542 if (Bs3MemCmp(&abBuf[off], pabExpected, 2) != 0) 1543 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pabExpected, &abBuf[off]); 1468 1544 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller)) 1469 1545 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n", … … 1506 1582 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected); 1507 1583 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL) 1508 Bs3TestFailedF("Not all bytes touched (# 5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",1584 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n", 1509 1585 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf); 1586 if (Bs3MemCmp(&abBuf[off], pabExpected, cbIdtr) != 0) 1587 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &abBuf[off]); 1510 1588 if (f286 && abBuf[off + cbIdtr - 1] != 0xff) 1511 Bs3TestFailedF("286: Top base byte isn't 0xff (# 5): %#x\n", abBuf[off + cbIdtr - 1]);1589 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]); 1512 1590 } 1513 1591 else … … 1520 1598 1521 1599 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller)) 1522 Bs3TestFailedF("Leading bytes touched (# 7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",1600 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n", 1523 1601 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf); 1524 1602 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller)) 1525 Bs3TestFailedF("Trailing bytes touched (# 7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",1603 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n", 1526 1604 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf); 1527 1605 … … 1530 1608 } 1531 1609 1532 CtxUdExpected.ds = Ctx.ds = uSavedDs; 1533 CtxUdExpected.rbx.u = Ctx.rbx.u = BS3_FP_OFF(pbBuf); 1610 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf); 1534 1611 } 1535 1612 … … 1537 1614 * Play with the paging. 1538 1615 */ 1539 if (BS3_MODE_IS_PAGED(g_bTestMode)) 1540 { 1541 1542 1616 if ( BS3_MODE_IS_PAGED(bTestMode) 1617 1618 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL) 1619 { 1620 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest); 1621 1622 /* 1623 * Slide the buffer towards the trailing guard page. We'll observe the 1624 * first word being written entirely separately from the 2nd dword/qword. 1625 */ 1626 for (off = X86_PAGE_4K_SIZE - cbIdtr - 4; off < X86_PAGE_4K_SIZE + 4; off++) 1627 { 1628 Bs3MemSet(&pbTest[X86_PAGE_4K_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2); 1629 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, &pbTest[off]); 1630 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx); 1631 if (off + cbIdtr <= X86_PAGE_4K_SIZE) 1632 { 1633 CtxUdExpected.rbx = Ctx.rbx; 1634 CtxUdExpected.ds = Ctx.ds; 1635 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected); 1636 if (Bs3MemCmp(&pbTest[off], pabExpected, cbIdtr) != 0) 1637 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &pbTest[off]); 1638 } 1639 else 1640 { 1641 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl ? X86_TRAP_PF_US : 0), 1642 uFlatTest + RT_MAX(off, X86_PAGE_4K_SIZE)); 1643 if ( off <= X86_PAGE_4K_SIZE - 2 1644 && Bs3MemCmp(&pbTest[off], pabExpected, 2) != 0) 1645 Bs3TestPrintf("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n", 1646 pabExpected, &pbTest[off], off); 1647 if ( off < X86_PAGE_4K_SIZE - 2 1648 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_4K_SIZE - off - 2, bFiller)) 1649 Bs3TestPrintf("Wrote partial base on #PF (#10): Expected %.*Rhxs, got %.*Rhxs; off=%#x\n", 1650 X86_PAGE_4K_SIZE - off - 2, pabExpected, X86_PAGE_4K_SIZE - off - 2, &pbTest[off + 2], off); 1651 if (off == X86_PAGE_4K_SIZE - 1 && pbTest[off] != bFiller) 1652 Bs3TestPrintf("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]); 1653 } 1654 } 1655 1656 /* 1657 * Now, do it the other way around. It should look normal now since writing 1658 * the limit will #PF first and nothing should be written. 1659 */ 1660 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--) 1661 { 1662 Bs3MemSet(pbTest, bFiller, 32); 1663 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, &pbTest[off]); 1664 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx); 1665 if (off >= 0) 1666 { 1667 CtxUdExpected.rbx = Ctx.rbx; 1668 CtxUdExpected.ds = Ctx.ds; 1669 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected); 1670 if (Bs3MemCmp(&pbTest[off], pabExpected, cbIdtr) != 0) 1671 Bs3TestFailedF("Mismatch (#10): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &pbTest[off]); 1672 } 1673 else 1674 { 1675 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl ? X86_TRAP_PF_US : 0), 1676 uFlatTest + RT_MAX(off, X86_PAGE_4K_SIZE)); 1677 if ( -off < cbIdtr 1678 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller)) 1679 Bs3TestPrintf("Wrote partial content on #PF (#11): bFiller=%#x, found %.*Rhxs; off=%d\n", 1680 bFiller, cbIdtr + off, pbTest, off); 1681 } 1682 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller)) 1683 Bs3TestPrintf("Wrote beyond expected area (#12): bFiller=%#x, found %.16Rhxs; off=%d\n", 1684 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off); 1685 } 1686 1687 Bs3MemGuardedTestPageFree(pbTest); 1543 1688 } 1544 1689 1545 1690 } 1691 1692 # define bs3CpuBasic2_sidt_sgdt_Common BS3_CMN_NM(bs3CpuBasic2_sidt_sgdt_Common) 1693 BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers, 1694 uint8_t const *pabExpected) 1695 { 1696 unsigned idx; 1697 unsigned iStep = 0; 1698 1699 for (idx = 0; idx < cWorkers; idx++) 1700 if (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK)) 1701 { 1702 g_usBs3TestStep = iStep; 1703 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, pabExpected); 1704 iStep += 1000; 1705 } 1706 } 1707 1546 1708 1547 1709 # if ARCH_BITS != 64 … … 1794 1956 { 1795 1957 //if (bMode == BS3_MODE_PE16_V86) 1796 { 1958 if (bMode & BS3_MODE_CODE_V86) 1959 { 1960 union 1961 { 1962 RTIDTR Idtr; 1963 uint8_t ab[16]; 1964 } Expected; 1965 1797 1966 g_pszTestMode = TMPL_NM(g_szBs3ModeName); 1798 1967 g_bTestMode = bMode; … … 1804 1973 * Pass to common worker which is only compiled once per mode. 1805 1974 */ 1806 bs3CpuBasic2_sidt_Common(); 1975 Bs3MemZero(&Expected, sizeof(Expected)); 1976 ASMGetIDTR(&Expected.Idtr); 1977 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab); 1807 1978 1808 1979 /* -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-template.mac
r60657 r60676 92 92 %ifdef BS3_INSTANTIATING_CMN 93 93 94 ; 95 ; SIDT 96 ; 94 97 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sidt_bx_ud2, BS3_PBC_NEAR 95 98 sidt [xBX] … … 99 102 BS3_PROC_END_CMN bs3CpuBasic2_sidt_bx_ud2 100 103 104 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sidt_opsize_bx_ud2, BS3_PBC_NEAR 105 db X86_OP_PRF_SIZE_OP 106 sidt [xBX] 107 .again: ud2 108 jmp .again 109 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sidt_opsize_bx_ud2) == 4) 110 BS3_PROC_END_CMN bs3CpuBasic2_sidt_opsize_bx_ud2 101 111 112 %if TMPL_BITS == 64 113 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sidt_rexw_bx_ud2, BS3_PBC_NEAR 114 db X86_OP_REX_W 115 sidt [xBX] 116 .again: ud2 117 jmp .again 118 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sidt_rexw_bx_ud2) == 4) 119 BS3_PROC_END_CMN bs3CpuBasic2_sidt_rexw_bx_ud2 120 121 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sidt_opsize_rexw_bx_ud2, BS3_PBC_NEAR 122 db X86_OP_PRF_SIZE_OP 123 db X86_OP_REX_W 124 sidt [xBX] 125 .again: ud2 126 jmp .again 127 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sidt_opsize_rexw_bx_ud2) == 5) 128 BS3_PROC_END_CMN bs3CpuBasic2_sidt_opsize_rexw_bx_ud2 129 %endif 130 131 %if TMPL_BITS != 64 132 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sidt_ss_bx_ud2, BS3_PBC_NEAR 133 sidt [ss:xBX] 134 .again: ud2 135 jmp .again 136 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sidt_ss_bx_ud2) == 4) 137 BS3_PROC_END_CMN bs3CpuBasic2_sidt_ss_bx_ud2 138 139 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sidt_opsize_ss_bx_ud2, BS3_PBC_NEAR 140 db X86_OP_PRF_SIZE_OP 141 sidt [ss:xBX] 142 .again: ud2 143 jmp .again 144 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sidt_opsize_ss_bx_ud2) == 5) 145 BS3_PROC_END_CMN bs3CpuBasic2_sidt_opsize_ss_bx_ud2 146 %endif 147 148 ; 149 ; SGDT 150 ; 151 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sgdt_bx_ud2, BS3_PBC_NEAR 152 sgdt [xBX] 153 .again: ud2 154 jmp .again 155 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sgdt_bx_ud2) == 3) 156 BS3_PROC_END_CMN bs3CpuBasic2_sgdt_bx_ud2 157 158 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sgdt_opsize_bx_ud2, BS3_PBC_NEAR 159 db X86_OP_PRF_SIZE_OP 160 sgdt [xBX] 161 .again: ud2 162 jmp .again 163 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sgdt_opsize_bx_ud2) == 4) 164 BS3_PROC_END_CMN bs3CpuBasic2_sgdt_opsize_bx_ud2 165 166 %if TMPL_BITS == 64 167 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sgdt_rexw_bx_ud2, BS3_PBC_NEAR 168 db X86_OP_REX_W 169 sgdt [xBX] 170 .again: ud2 171 jmp .again 172 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sgdt_rexw_bx_ud2) == 4) 173 BS3_PROC_END_CMN bs3CpuBasic2_sgdt_rexw_bx_ud2 174 %endif 175 176 %if TMPL_BITS != 64 177 BS3_PROC_BEGIN_CMN bs3CpuBasic2_sgdt_ss_bx_ud2, BS3_PBC_NEAR 178 sgdt [ss:xBX] 179 .again: ud2 180 jmp .again 181 AssertCompile(.again - BS3_CMN_NM(bs3CpuBasic2_sgdt_ss_bx_ud2) == 4) 182 BS3_PROC_END_CMN bs3CpuBasic2_sgdt_ss_bx_ud2 183 %endif 184 185 186 ; 187 ; 188 ; 102 189 BS3_PROC_BEGIN_CMN bs3CpuBasic2_lidt_bx_ud2, BS3_PBC_NEAR 103 190 lidt [xBX] -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/Makefile.kmk
r60657 r60676 72 72 bs3-cmn-StrCpy.c \ 73 73 bs3-cmn-MemChr.asm \ 74 bs3-cmn-MemCmp.asm \ 74 75 bs3-cmn-MemCpy.c \ 75 76 bs3-cmn-MemPCpy.c \ … … 80 81 bs3-cmn-MemAllocZ.c \ 81 82 bs3-cmn-MemFree.c \ 83 bs3-cmn-MemGuardedTestPage.c \ 82 84 bs3-cmn-PagingData.c \ 83 85 bs3-cmn-PagingInitRootForPP.c \ … … 91 93 bs3-cmn-RegCtxSave.asm \ 92 94 bs3-cmn-RegCtxSaveEx.asm \ 95 bs3-cmn-RegCtxSetGrpSegFromCurPtr.c \ 96 bs3-cmn-RegCtxSetGrpSegFromFlat.c \ 97 bs3-cmn-RegCtxSetRipCsFromLnkPtr.c \ 93 98 bs3-cmn-SelFar32ToFlat32.c \ 94 99 bs3-cmn-SelFar32ToFlat32NoClobber.asm \ … … 193 198 bs3-wc16-U4D.asm \ 194 199 bs3-wc16-I4D.asm \ 200 bs3-c16-SwitchFromV86To16BitAndCallC.asm \ 195 201 bs3-c16-Trap16Generic.asm \ 196 202 bs3-c16-TrapRmV86Generic.asm \ -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/VBoxBs3ObjConverter.cpp
r60539 r60676 257 257 typedef OMFSYMBOL *POMFSYMBOL; 258 258 259 /** OMF Writer LNAME lookup record. */ 260 typedef struct OMFWRLNAME 261 { 262 /** Pointer to the next entry with the name hash. */ 263 struct OMFWRLNAME *pNext; 264 /** The LNAMES index number. */ 265 uint16_t idxName; 266 /** The name length. */ 267 uint8_t cchName; 268 /** The name (variable size). */ 269 char szName[1]; 270 } OMFWRLNAME; 271 /** Pointer to the a OMF writer LNAME lookup record. */ 272 typedef OMFWRLNAME *POMFWRLNAME; 273 259 274 /** 260 275 * OMF converter & writer instance. … … 312 327 /** The EXTDEF index of the __ImageBase symbol. */ 313 328 uint16_t idxExtImageBase; 329 330 /** LNAME lookup hash table. To avoid too many duplicates. */ 331 POMFWRLNAME apNameLookup[63]; 314 332 } OMFWRITE; 315 333 /** Pointer to an OMF writer. */ … … 353 371 { 354 372 free(pThis->paSymbols); 373 355 374 for (uint32_t i = 0; i < pThis->cSegments; i++) 356 375 if (pThis->paSegments[i].pszName) 357 376 free(pThis->paSegments[i].pszName); 377 358 378 free(pThis->paSegments); 379 380 uint32_t i = RT_ELEMENTS(pThis->apNameLookup); 381 while (i-- > 0) 382 { 383 POMFWRLNAME pNext = pThis->apNameLookup[i]; 384 pThis->apNameLookup[i] = NULL; 385 while (pNext) 386 { 387 POMFWRLNAME pFree = pNext; 388 pNext = pNext->pNext; 389 free(pFree); 390 } 391 } 392 359 393 free(pThis); 360 394 } … … 424 458 } 425 459 426 static bool omfWriter_RecAddStringN (POMFWRITER pThis, const char *pchString, size_t cchString)460 static bool omfWriter_RecAddStringNEx(POMFWRITER pThis, const char *pchString, size_t cchString, bool fPrependUnderscore) 427 461 { 428 462 if (cchString < 256) 429 463 { 430 return omfWriter_RecAddU8(pThis, (uint8_t)cchString) 464 return omfWriter_RecAddU8(pThis, (uint8_t)cchString + fPrependUnderscore) 465 && (!fPrependUnderscore || omfWriter_RecAddU8(pThis, '_')) 431 466 && omfWriter_RecAddBytes(pThis, pchString, cchString); 432 467 } … … 435 470 } 436 471 472 static bool omfWriter_RecAddStringN(POMFWRITER pThis, const char *pchString, size_t cchString) 473 { 474 return omfWriter_RecAddStringNEx(pThis, pchString, cchString, false /*fPrependUnderscore*/); 475 } 476 437 477 static bool omfWriter_RecAddString(POMFWRITER pThis, const char *pszString) 438 478 { 439 return omfWriter_RecAddStringN (pThis, pszString, strlen(pszString));479 return omfWriter_RecAddStringNEx(pThis, pszString, strlen(pszString), false /*fPrependUnderscore*/); 440 480 } 441 481 … … 471 511 } 472 512 513 514 /** 515 * Simple stupid string hashing function (for LNAMES) 516 * @returns 8-bit hash. 517 * @param pchName The string. 518 * @param cchName The string length. 519 */ 520 DECLINLINE(uint8_t) omfWriter_HashStrU8(const char *pchName, size_t cchName) 521 { 522 if (cchName) 523 return (uint8_t)(cchName + pchName[cchName >> 1]); 524 return 0; 525 } 526 527 /** 528 * Looks up a LNAME. 529 * 530 * @returns Index (0..32K) if found, UINT16_MAX if not found. 531 * @param pThis The OMF writer. 532 * @param pchName The name to look up. 533 * @param cchName The length of the name. 534 */ 535 static uint16_t omfWriter_LNamesLookupN(POMFWRITER pThis, const char *pchName, size_t cchName) 536 { 537 uint8_t uHash = omfWriter_HashStrU8(pchName, cchName); 538 uHash %= RT_ELEMENTS(pThis->apNameLookup); 539 540 POMFWRLNAME pCur = pThis->apNameLookup[uHash]; 541 while (pCur) 542 { 543 if ( pCur->cchName == cchName 544 && memcmp(pCur->szName, pchName, cchName) == 0) 545 return pCur->idxName; 546 pCur = pCur->pNext; 547 } 548 549 return UINT16_MAX; 550 } 551 552 /** 553 * Add a LNAME lookup record. 554 * 555 * @returns success indicator. 556 * @param pThis The OMF writer. 557 * @param pchName The name to look up. 558 * @param cchName The length of the name. 559 * @param idxName The name index. 560 */ 561 static bool omfWriter_LNamesAddLookup(POMFWRITER pThis, const char *pchName, size_t cchName, uint16_t idxName) 562 { 563 POMFWRLNAME pCur = (POMFWRLNAME)malloc(sizeof(*pCur) + cchName); 564 if (!pCur) 565 return error("???", "Out of memory!\n"); 566 567 pCur->idxName = idxName; 568 pCur->cchName = (uint8_t)cchName; 569 memcpy(pCur->szName, pchName, cchName); 570 pCur->szName[cchName] = '\0'; 571 572 uint8_t uHash = omfWriter_HashStrU8(pchName, cchName); 573 uHash %= RT_ELEMENTS(pThis->apNameLookup); 574 pCur->pNext = pThis->apNameLookup[uHash]; 575 pThis->apNameLookup[uHash] = pCur; 576 577 return true; 578 } 579 580 473 581 static bool omfWriter_LNamesAddN(POMFWRITER pThis, const char *pchName, size_t cchName, uint16_t *pidxName) 474 582 { 583 /* See if we've already got that name in the list. */ 584 uint16_t idxName; 585 if (pidxName) /* If pidxName is NULL, we assume the caller migth just be passing stuff thru. */ 586 { 587 idxName = omfWriter_LNamesLookupN(pThis, pchName, cchName); 588 if (idxName != UINT16_MAX) 589 { 590 *pidxName = idxName; 591 return true; 592 } 593 } 594 475 595 /* split? */ 476 596 if (pThis->cbRec + 1 /*len*/ + cchName + 1 /*crc*/ > OMF_MAX_RECORD_PAYLOAD) … … 483 603 } 484 604 605 idxName = pThis->idxNextName++; 485 606 if (pidxName) 486 *pidxName = pThis->idxNextName;487 pThis->idxNextName++;488 return omfWriter_RecAddStringN(pThis, pchName, cchName);607 *pidxName = idxName; 608 return omfWriter_RecAddStringN(pThis, pchName, cchName) 609 && omfWriter_LNamesAddLookup(pThis, pchName, cchName, idxName); 489 610 } 490 611 … … 548 669 } 549 670 550 static bool omfWriter_PubDefAddN(POMFWRITER pThis, uint32_t uValue, const char *pchString, size_t cchString) 671 static bool omfWriter_PubDefAddN(POMFWRITER pThis, uint32_t uValue, const char *pchString, size_t cchString, 672 bool fPrependUnderscore) 551 673 { 552 674 /* Split? */ 553 if (pThis->cbRec + 1 + cchString + 4 + 1 + 1 > OMF_MAX_RECORD_PAYLOAD)675 if (pThis->cbRec + 1 + cchString + 4 + 1 + 1 + fPrependUnderscore > OMF_MAX_RECORD_PAYLOAD) 554 676 { 555 677 if (cchString >= 256) … … 568 690 } 569 691 570 return omfWriter_RecAddStringN (pThis, pchString, cchString)692 return omfWriter_RecAddStringNEx(pThis, pchString, cchString, fPrependUnderscore) 571 693 && omfWriter_RecAddU32(pThis, uValue) 572 694 && omfWriter_RecAddIdx(pThis, 0); /* type */ 573 695 } 574 696 575 static bool omfWriter_PubDefAdd(POMFWRITER pThis, uint32_t uValue, const char *pszString )576 { 577 return omfWriter_PubDefAddN(pThis, uValue, pszString, strlen(pszString) );697 static bool omfWriter_PubDefAdd(POMFWRITER pThis, uint32_t uValue, const char *pszString, bool fPrependUnderscore) 698 { 699 return omfWriter_PubDefAddN(pThis, uValue, pszString, strlen(pszString), fPrependUnderscore); 578 700 } 579 701 … … 595 717 * EXTDEF - Add an entry, split record if necessary. 596 718 */ 597 static bool omfWriter_ExtDefAddN(POMFWRITER pThis, const char *pchString, size_t cchString, uint16_t idxType) 719 static bool omfWriter_ExtDefAddN(POMFWRITER pThis, const char *pchString, size_t cchString, uint16_t idxType, 720 bool fPrependUnderscore) 598 721 { 599 722 /* Split? */ 600 if (pThis->cbRec + 1 + cchString + 1 + 1 > OMF_MAX_RECORD_PAYLOAD)723 if (pThis->cbRec + 1 + cchString + 1 + 1 + fPrependUnderscore > OMF_MAX_RECORD_PAYLOAD) 601 724 { 602 725 if (cchString >= 256) … … 608 731 } 609 732 610 return omfWriter_RecAddStringN (pThis, pchString, cchString)733 return omfWriter_RecAddStringNEx(pThis, pchString, cchString, fPrependUnderscore) 611 734 && omfWriter_RecAddIdx(pThis, idxType); /* type */ 612 735 } … … 615 738 * EXTDEF - Add an entry, split record if necessary. 616 739 */ 617 static bool omfWriter_ExtDefAdd(POMFWRITER pThis, const char *pszString )618 { 619 return omfWriter_ExtDefAddN(pThis, pszString, strlen(pszString), 0 );740 static bool omfWriter_ExtDefAdd(POMFWRITER pThis, const char *pszString, bool fPrependUnderscore) 741 { 742 return omfWriter_ExtDefAddN(pThis, pszString, strlen(pszString), 0, fPrependUnderscore); 620 743 } 621 744 … … 1506 1629 && pThis->paSymbols[iSym].enmType == OMFSYMTYPE_PUBDEF) 1507 1630 { 1631 /* Underscore prefix all names not already underscored/mangled. */ 1508 1632 const char *pszName = &pElfStuff->pchStrTab[paSymbols[iSym].st_name]; 1509 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName ))1633 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName, pszName[0] != '_')) 1510 1634 return false; 1511 1512 /* If the symbol doesn't start with an underscore and is a _c64 or _lm64 symbol,1513 add an underscore prefixed alias to ease access from 16-bit and 32-bit code. */1514 size_t cchName = strlen(pszName);1515 if ( *pszName != '_'1516 && ( (cchName > 4 && strcmp(&pszName[cchName - 4], "_c64") == 0)1517 || (cchName > 5 && strcmp(&pszName[cchName - 5], "_lm64") == 0) ) )1518 {1519 char szCdeclName[512];1520 if (cchName > sizeof(szCdeclName) - 2)1521 cchName = sizeof(szCdeclName) - 2;1522 szCdeclName[0] = '_';1523 memcpy(&szCdeclName[1], pszName, cchName);1524 szCdeclName[cchName + 1] = '\0';1525 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, szCdeclName))1526 return false;1527 }1528 1529 1635 pThis->paSymbols[iSym].idx = idxPubDef++; 1530 1636 } … … 1542 1648 && pThis->paSymbols[iSym].enmType == OMFSYMTYPE_PUBDEF) 1543 1649 { 1650 /* Underscore prefix all names not already underscored/mangled. */ 1544 1651 const char *pszName = &pElfStuff->pchStrTab[paSymbols[iSym].st_name]; 1545 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName ))1652 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName, pszName[0] != '_')) 1546 1653 return false; 1547 1654 pThis->paSymbols[iSym].idx = idxPubDef++; … … 1560 1667 if (pThis->paSymbols[iSym].enmType == OMFSYMTYPE_EXTDEF) 1561 1668 { 1669 /* Underscore prefix all names not already underscored/mangled. */ 1562 1670 const char *pszName = &pElfStuff->pchStrTab[paSymbols[iSym].st_name]; 1563 if (!omfWriter_ExtDefAdd(pThis, pszName ))1671 if (!omfWriter_ExtDefAdd(pThis, pszName, *pszName != '_')) 1564 1672 return false; 1565 1673 pThis->paSymbols[iSym].idx = idxExtDef++; … … 2337 2445 && pThis->paSymbols[iSym].enmType == OMFSYMTYPE_PUBDEF) 2338 2446 { 2447 /* Underscore prefix all symbols not already underscored or mangled. */ 2339 2448 const char *pszName = coffGetSymbolName(&paSymbols[iSym], pchStrTab, cbStrTab, szShort); 2340 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].Value, pszName ))2449 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].Value, pszName, pszName[0] != '_' && pszName[0] != '?')) 2341 2450 return false; 2342 2343 /* If the symbol doesn't start with an underscore and is a _c64 or _lm64 symbol,2344 add an underscore prefixed alias to ease access from 16-bit and 32-bit code. */2345 size_t cchName = strlen(pszName);2346 if ( *pszName != '_'2347 && ( (cchName > 4 && strcmp(&pszName[cchName - 4], "_c64") == 0)2348 || (cchName > 5 && strcmp(&pszName[cchName - 5], "_lm64") == 0) ) )2349 {2350 char szCdeclName[512];2351 if (cchName > sizeof(szCdeclName) - 2)2352 cchName = sizeof(szCdeclName) - 2;2353 szCdeclName[0] = '_';2354 memcpy(&szCdeclName[1], pszName, cchName);2355 szCdeclName[cchName + 1] = '\0';2356 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].Value, szCdeclName))2357 return false;2358 }2359 2360 2451 pThis->paSymbols[iSym].idx = idxPubDef++; 2361 2452 } … … 2373 2464 && pThis->paSymbols[iSym].enmType == OMFSYMTYPE_PUBDEF) 2374 2465 { 2375 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].Value, 2376 coffGetSymbolName(&paSymbols[iSym], pchStrTab, cbStrTab, szShort)) ) 2466 /* Underscore prefix all symbols not already underscored or mangled. */ 2467 const char *pszName = coffGetSymbolName(&paSymbols[iSym], pchStrTab, cbStrTab, szShort); 2468 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].Value, pszName, pszName[0] != '_' && pszName[0] != '?') ) 2377 2469 return false; 2378 2470 pThis->paSymbols[iSym].idx = idxPubDef++; … … 2391 2483 if (pThis->paSymbols[iSym].enmType == OMFSYMTYPE_EXTDEF) 2392 2484 { 2393 if (!omfWriter_ExtDefAdd(pThis, coffGetSymbolName(&paSymbols[iSym], pchStrTab, cbStrTab, szShort))) 2485 /* Underscore prefix all symbols not already underscored or mangled. */ 2486 const char *pszName = coffGetSymbolName(&paSymbols[iSym], pchStrTab, cbStrTab, szShort); 2487 if (!omfWriter_ExtDefAdd(pThis, pszName, pszName[0] != '_' && pszName[0] != '?')) 2394 2488 return false; 2395 2489 pThis->paSymbols[iSym].idx = idxExtDef++; … … 2400 2494 if (iSymImageBase != UINT32_MAX) 2401 2495 pThis->idxExtImageBase = pThis->paSymbols[iSymImageBase].idx; 2402 else if (omfWriter_ExtDefAdd(pThis, "__ImageBase" ))2496 else if (omfWriter_ExtDefAdd(pThis, "__ImageBase", false /*fPrependUnderscore*/)) 2403 2497 pThis->idxExtImageBase = idxExtDef; 2404 2498 else … … 2724 2818 }; 2725 2819 2726 /** Macro for getting the size of a AMD64 ELFrelocation. */2727 #define ELF_AMD64_RELOC_SIZE(a_Type) ( (a_Type) < RT_ELEMENTS(g_acbElfAmd64RelTypes) ? g_acbElfAmd64RelTypes[(a_Type)] : 1)2728 2729 2730 typedef struct ELFDETAILS2820 /** Macro for getting the size of a AMD64 Mach-O relocation. */ 2821 #define MACHO_AMD64_RELOC_SIZE(a_Type) ( (a_Type) < RT_ELEMENTS(g_acbMachOAmd64RelTypes) ? g_acbMachOAmd64RelTypes[(a_Type)] : 1) 2822 2823 2824 typedef struct MACHODETAILS 2731 2825 { 2732 2826 /** The ELF header. */ … … 2752 2846 size_t cbStrTab; 2753 2847 2754 } ELFDETAILS;2755 typedef ELFDETAILS *PELFDETAILS;2756 typedef ELFDETAILS const *PCELFDETAILS;2757 2758 2759 static bool validate Elf(const char *pszFile, uint8_t const *pbFile, size_t cbFile, PELFDETAILS pElfStuff)2760 { 2761 /* 2762 * Initialize the ELFdetails structure.2763 */ 2764 memset(p ElfStuff, 0, sizeof(*pElfStuff));2765 p ElfStuff->iSymSh = UINT16_MAX;2766 p ElfStuff->iStrSh = UINT16_MAX;2848 } MACHODETAILS; 2849 typedef MACHODETAILS *PMACHODETAILS; 2850 typedef MACHODETAILS const *PCMACHODETAILS; 2851 2852 2853 static bool validateMacho(const char *pszFile, uint8_t const *pbFile, size_t cbFile, PMACHODETAILS pMachOStuff) 2854 { 2855 /* 2856 * Initialize the Mach-O details structure. 2857 */ 2858 memset(pMachOStuff, 0, sizeof(*pMachOStuff)); 2859 pMachOStuff->iSymSh = UINT16_MAX; 2860 pMachOStuff->iStrSh = UINT16_MAX; 2767 2861 2768 2862 /* … … 2770 2864 */ 2771 2865 Elf64_Ehdr const *pEhdr = (Elf64_Ehdr const *)pbFile; 2772 p ElfStuff->pEhdr = pEhdr;2866 pMachOStuff->pEhdr = pEhdr; 2773 2867 if ( pEhdr->e_ident[EI_CLASS] != ELFCLASS64 2774 2868 || pEhdr->e_ident[EI_DATA] != ELFDATA2LSB … … 2797 2891 */ 2798 2892 Elf64_Shdr const *paShdrs = (Elf64_Shdr const *)&pbFile[pEhdr->e_shoff]; 2799 p ElfStuff->paShdrs = paShdrs;2893 pMachOStuff->paShdrs = paShdrs; 2800 2894 2801 2895 Elf64_Xword const cbShStrTab = paShdrs[pEhdr->e_shstrndx].sh_size; … … 2807 2901 paShdrs[pEhdr->e_shstrndx].sh_offset, paShdrs[pEhdr->e_shstrndx].sh_size, (Elf64_Xword)cbFile); 2808 2902 const char *pchShStrTab = (const char *)&pbFile[paShdrs[pEhdr->e_shstrndx].sh_offset]; 2809 p ElfStuff->pchShStrTab = pchShStrTab;2903 pMachOStuff->pchShStrTab = pchShStrTab; 2810 2904 2811 2905 /* … … 2911 3005 i, pszShNm, paShdrs[i].sh_size, cSymbols); 2912 3006 2913 if (p ElfStuff->iSymSh == UINT16_MAX)2914 { 2915 p ElfStuff->iSymSh = (uint16_t)i;2916 p ElfStuff->paSymbols = (Elf64_Sym const *)&pbFile[paShdrs[i].sh_offset];2917 p ElfStuff->cSymbols = cSymbols;3007 if (pMachOStuff->iSymSh == UINT16_MAX) 3008 { 3009 pMachOStuff->iSymSh = (uint16_t)i; 3010 pMachOStuff->paSymbols = (Elf64_Sym const *)&pbFile[paShdrs[i].sh_offset]; 3011 pMachOStuff->cSymbols = cSymbols; 2918 3012 2919 3013 if (paShdrs[i].sh_link != 0) … … 2921 3015 /* Note! The symbol string table section header may not have been validated yet! */ 2922 3016 Elf64_Shdr const *pStrTabShdr = &paShdrs[paShdrs[i].sh_link]; 2923 p ElfStuff->iStrSh = paShdrs[i].sh_link;2924 p ElfStuff->pchStrTab = (const char *)&pbFile[pStrTabShdr->sh_offset];2925 p ElfStuff->cbStrTab = (size_t)pStrTabShdr->sh_size;3017 pMachOStuff->iStrSh = paShdrs[i].sh_link; 3018 pMachOStuff->pchStrTab = (const char *)&pbFile[pStrTabShdr->sh_offset]; 3019 pMachOStuff->cbStrTab = (size_t)pStrTabShdr->sh_size; 2926 3020 } 2927 3021 else … … 2931 3025 else 2932 3026 fRet = error(pszFile, "Section #%u '%s': Found additonal symbol table, previous in #%u\n", 2933 i, pszShNm, p ElfStuff->iSymSh);3027 i, pszShNm, pMachOStuff->iSymSh); 2934 3028 } 2935 3029 } … … 2937 3031 } 2938 3032 2939 static bool convert ElfSectionsToSegDefsAndGrpDefs(POMFWRITER pThis, PCELFDETAILS pElfStuff)3033 static bool convertMachoSectionsToSegDefsAndGrpDefs(POMFWRITER pThis, PCMACHODETAILS pMachOStuff) 2940 3034 { 2941 3035 /* … … 2954 3048 2955 3049 bool fHaveData = false; 2956 Elf64_Shdr const *pShdr = &p ElfStuff->paShdrs[1];2957 Elf64_Half const cSections = p ElfStuff->pEhdr->e_shnum;3050 Elf64_Shdr const *pShdr = &pMachOStuff->paShdrs[1]; 3051 Elf64_Half const cSections = pMachOStuff->pEhdr->e_shnum; 2958 3052 for (Elf64_Half i = 1; i < cSections; i++, pShdr++) 2959 3053 { 2960 const char *pszName = &p ElfStuff->pchShStrTab[pShdr->sh_name];3054 const char *pszName = &pMachOStuff->pchShStrTab[pShdr->sh_name]; 2961 3055 if (*pszName == '\0') 2962 3056 return error(pThis->pszSrc, "Section #%u has an empty name!\n", i); … … 3048 3142 */ 3049 3143 uint16_t iSegDef = 1; /* Start counting at 1. */ 3050 pShdr = &p ElfStuff->paShdrs[1];3144 pShdr = &pMachOStuff->paShdrs[1]; 3051 3145 for (Elf64_Half i = 1; i < cSections; i++, pShdr++) 3052 3146 { … … 3140 3234 } 3141 3235 3142 static bool convert ElfSymbolsToPubDefsAndExtDefs(POMFWRITER pThis, PCELFDETAILS pElfStuff)3143 { 3144 if (!p ElfStuff->cSymbols)3236 static bool convertMachOSymbolsToPubDefsAndExtDefs(POMFWRITER pThis, PCMACHODETAILS pMachOStuff) 3237 { 3238 if (!pMachOStuff->cSymbols) 3145 3239 return true; 3146 3240 … … 3154 3248 pThis->paSegments[iSeg].cPubDefs = 0; 3155 3249 3156 uint32_t const cSections = p ElfStuff->pEhdr->e_shnum;3157 uint32_t const cSymbols = p ElfStuff->cSymbols;3158 Elf64_Sym const * const paSymbols = p ElfStuff->paSymbols;3250 uint32_t const cSections = pMachOStuff->pEhdr->e_shnum; 3251 uint32_t const cSymbols = pMachOStuff->cSymbols; 3252 Elf64_Sym const * const paSymbols = pMachOStuff->paSymbols; 3159 3253 for (uint32_t iSym = 0; iSym < cSymbols; iSym++) 3160 3254 { 3161 3255 const uint8_t bBind = ELF64_ST_BIND(paSymbols[iSym].st_info); 3162 3256 const uint8_t bType = ELF64_ST_TYPE(paSymbols[iSym].st_info); 3163 const char *pszSymName = &p ElfStuff->pchStrTab[paSymbols[iSym].st_name];3257 const char *pszSymName = &pMachOStuff->pchStrTab[paSymbols[iSym].st_name]; 3164 3258 if ( *pszSymName == '\0' 3165 3259 && bType == STT_SECTION 3166 3260 && paSymbols[iSym].st_shndx < cSections) 3167 pszSymName = &p ElfStuff->pchShStrTab[pElfStuff->paShdrs[paSymbols[iSym].st_shndx].sh_name];3261 pszSymName = &pMachOStuff->pchShStrTab[pMachOStuff->paShdrs[paSymbols[iSym].st_shndx].sh_name]; 3168 3262 3169 3263 pThis->paSymbols[iSym].enmType = OMFSYMTYPE_IGNORED; … … 3230 3324 /* 3231 3325 * Emit the PUBDEFs the first time around (see order of records in TIS spec). 3326 * Note! We expect the os x compiler to always underscore symbols, so unlike the 3327 * other 64-bit converters we don't need to check for underscores and add them. 3232 3328 */ 3233 3329 uint16_t idxPubDef = 1; … … 3244 3340 && pThis->paSymbols[iSym].enmType == OMFSYMTYPE_PUBDEF) 3245 3341 { 3246 const char *pszName = &p ElfStuff->pchStrTab[paSymbols[iSym].st_name];3247 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName ))3342 const char *pszName = &pMachOStuff->pchStrTab[paSymbols[iSym].st_name]; 3343 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName, false /*fPrependUnderscore*/)) 3248 3344 return false; 3249 3250 /* If the symbol doesn't start with an underscore and is a _c64 or _lm64 symbol,3251 add an underscore prefixed alias to ease access from 16-bit and 32-bit code. */3252 size_t cchName = strlen(pszName);3253 if ( *pszName != '_'3254 && ( (cchName > 4 && strcmp(&pszName[cchName - 4], "_c64") == 0)3255 || (cchName > 5 && strcmp(&pszName[cchName - 5], "_lm64") == 0) ) )3256 {3257 char szCdeclName[512];3258 if (cchName > sizeof(szCdeclName) - 2)3259 cchName = sizeof(szCdeclName) - 2;3260 szCdeclName[0] = '_';3261 memcpy(&szCdeclName[1], pszName, cchName);3262 szCdeclName[cchName + 1] = '\0';3263 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, szCdeclName))3264 return false;3265 }3266 3267 3345 pThis->paSymbols[iSym].idx = idxPubDef++; 3268 3346 } … … 3280 3358 && pThis->paSymbols[iSym].enmType == OMFSYMTYPE_PUBDEF) 3281 3359 { 3282 const char *pszName = &p ElfStuff->pchStrTab[paSymbols[iSym].st_name];3283 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName ))3360 const char *pszName = &pMachOStuff->pchStrTab[paSymbols[iSym].st_name]; 3361 if (!omfWriter_PubDefAdd(pThis, paSymbols[iSym].st_value, pszName, false /*fPrependUnderscore*/)) 3284 3362 return false; 3285 3363 pThis->paSymbols[iSym].idx = idxPubDef++; … … 3298 3376 if (pThis->paSymbols[iSym].enmType == OMFSYMTYPE_EXTDEF) 3299 3377 { 3300 const char *pszName = &p ElfStuff->pchStrTab[paSymbols[iSym].st_name];3301 if (!omfWriter_ExtDefAdd(pThis, pszName ))3378 const char *pszName = &pMachOStuff->pchStrTab[paSymbols[iSym].st_name]; 3379 if (!omfWriter_ExtDefAdd(pThis, pszName, false /*fPrependUnderscore*/)) 3302 3380 return false; 3303 3381 pThis->paSymbols[iSym].idx = idxExtDef++; … … 3310 3388 } 3311 3389 3312 static bool convertElfSectionsToLeDataAndFixupps(POMFWRITER pThis, PCELFDETAILS pElfStuff, uint8_t const *pbFile, size_t cbFile) 3313 { 3314 Elf64_Sym const *paSymbols = pElfStuff->paSymbols; 3315 Elf64_Shdr const *paShdrs = pElfStuff->paShdrs; 3390 static bool convertMachOSectionsToLeDataAndFixupps(POMFWRITER pThis, PCMACHODETAILS pMachOStuff, 3391 uint8_t const *pbFile, size_t cbFile) 3392 { 3393 Elf64_Sym const *paSymbols = pMachOStuff->paSymbols; 3394 Elf64_Shdr const *paShdrs = pMachOStuff->paShdrs; 3316 3395 bool fRet = true; 3317 3396 for (uint32_t i = 1; i < pThis->cSegments; i++) … … 3320 3399 continue; 3321 3400 3322 const char *pszSegNm = &p ElfStuff->pchShStrTab[paShdrs[i].sh_name];3401 const char *pszSegNm = &pMachOStuff->pchShStrTab[paShdrs[i].sh_name]; 3323 3402 bool const fRelocs = i + 1 < pThis->cSegments && paShdrs[i + 1].sh_type == SHT_RELA; 3324 3403 uint32_t cRelocs = fRelocs ? paShdrs[i + 1].sh_size / sizeof(Elf64_Rela) : 0; … … 3384 3463 Elf64_Sym const * const pElfSym = &paSymbols[iSymbol]; 3385 3464 POMFSYMBOL const pOmfSym = &pThis->paSymbols[iSymbol]; 3386 const char * const pszSymName = &p ElfStuff->pchStrTab[pElfSym->st_name];3465 const char * const pszSymName = &pMachOStuff->pchStrTab[pElfSym->st_name]; 3387 3466 3388 3467 /* Calc fixup location in the pending chunk and setup a flexible pointer to it. */ … … 3524 3603 * Validate the source file a little. 3525 3604 */ 3526 ELFDETAILS ElfStuff;3527 if (!validate Elf(pszFile, pbFile, cbFile, &ElfStuff))3605 MACHODETAILS MachOStuff; 3606 if (!validateMachO(pszFile, pbFile, cbFile, &MachOStuff)) 3528 3607 return false; 3529 3608 … … 3531 3610 * Instantiate the OMF writer. 3532 3611 */ 3533 POMFWRITER pThis = omfWriter_Create(pszFile, ElfStuff.pEhdr->e_shnum, ElfStuff.cSymbols, pDst);3612 POMFWRITER pThis = omfWriter_Create(pszFile, MachOStuff.pEhdr->e_shnum, MachOStuff.cSymbols, pDst); 3534 3613 if (!pThis) 3535 3614 return false; … … 3544 3623 const char *pszStrTab = (const char *)&pbFile[paShdrs[pEhdr->e_shstrndx].sh_offset]; 3545 3624 3546 if ( convert ElfSectionsToSegDefsAndGrpDefs(pThis, &ElfStuff)3547 && convert ElfSymbolsToPubDefsAndExtDefs(pThis, &ElfStuff)3625 if ( convertMachOSectionsToSegDefsAndGrpDefs(pThis, &MachOStuff) 3626 && convertMachOSymbolsToPubDefsAndExtDefs(pThis, &MachOStuff) 3548 3627 && omfWriter_LinkPassSeparator(pThis) 3549 && convert ElfSectionsToLeDataAndFixupps(pThis, &ElfStuff, pbFile, cbFile)3628 && convertMachOSectionsToLeDataAndFixupps(pThis, &MachOStuff, pbFile, cbFile) 3550 3629 && omfWriter_EndModule(pThis) ) 3551 3630 { … … 4682 4761 } 4683 4762 4684 if (!omfWriter_ExtDefAddN(pThis, szName, cchName, idxType ))4763 if (!omfWriter_ExtDefAddN(pThis, szName, cchName, idxType, false /*fPrependUnderscore*/)) 4685 4764 return false; 4686 4765 } 4687 else if (!omfWriter_ExtDefAddN(pThis, pchName, cchName, idxType ))4766 else if (!omfWriter_ExtDefAddN(pThis, pchName, cchName, idxType, false /*fPrependUnderscore*/)) 4688 4767 return false; 4689 4768 } -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-bootsector.asm
r60365 r60676 225 225 .do_load 226 226 mov [g_bBootDrv], dl 227 call bs3InitLoadImage227 call NAME(bs3InitLoadImage) 228 228 %if 0 229 229 mov al, '=' 230 call bs3PrintChrInAl230 call NAME(bs3PrintChrInAl) 231 231 %endif 232 232 -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-MemAlloc.c
r60527 r60676 37 37 { 38 38 void BS3_FAR *pvRet; 39 uint8_t idxSlabList = bs3MemSizeToSlabListIndex(cb); 39 uint8_t idxSlabList; 40 41 #if ARCH_BITS == 16 42 /* Don't try allocate memory which address we cannot return. */ 43 if ( enmKind != BS3MEMKIND_REAL 44 && BS3_MODE_IS_RM_OR_V86(g_bBs3CurrentMode)) 45 enmKind = BS3MEMKIND_REAL; 46 #endif 47 48 idxSlabList = bs3MemSizeToSlabListIndex(cb); 40 49 if (idxSlabList < BS3_MEM_SLAB_LIST_COUNT) 41 50 { -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-MemCmp.asm
r60657 r60676 1 1 ; $Id$ 2 2 ;; @file 3 ; BS3Kit - Bs3Mem Set.3 ; BS3Kit - Bs3MemCmp. 4 4 ; 5 5 6 6 ; 7 ; Copyright (C) 2007-201 5Oracle Corporation7 ; Copyright (C) 2007-2016 Oracle Corporation 8 8 ; 9 9 ; This file is part of VirtualBox Open Source Edition (OSE), as … … 28 28 29 29 ;; 30 ; @cproto BS3_CMN_PROTO_NOSB( void, Bs3MemSet,(void BS3_FAR *pvDst, uint8_t bFiller, size_t cbDst));30 ; @cproto BS3_CMN_PROTO_NOSB(int, Bs3MemCmp,(void const BS3_FAR *pv1, void const BS3_FAR *pv2, size_t cb)); 31 31 ; 32 BS3_PROC_BEGIN_CMN Bs3MemSet, BS3_PBC_HYBRID 32 BS3_PROC_BEGIN_CMN Bs3MemCmp, BS3_PBC_HYBRID 33 TONLY16 CPU 8086 33 34 push xBP 34 35 mov xBP, xSP 35 36 push xDI 36 %ifdef RT_ARCH_AMD64 37 push xSI 38 TNOT64 push es 39 TONLY16 push ds 40 cld 37 41 38 mov rdi, rcx ; rdi = pvDst 39 mov rcx, r8 ; rcx = cbDst 40 movzx edx, dl ; bFiller 41 mov rax, 0101010101010101h 42 mul rdx 43 mov rcx, r8 44 shr rcx, 3 ; calc qword count. 45 cld 46 rep stosq 42 ; 43 ; To save complexity and space, do straight forward byte compares. 44 ; 45 %if TMPL_BITS == 16 46 mov di, [bp + 2 + cbCurRetAddr] ; pv1.off 47 mov es, [bp + 2 + cbCurRetAddr + 2] ; pv1.sel 48 mov si, [bp + 2 + cbCurRetAddr + 4] ; pv2.off 49 mov ds, [bp + 2 + cbCurRetAddr + 6] ; pv2.sel 50 mov cx, [bp + 2 + cbCurRetAddr + 8] ; cbDst 51 xor ax, ax 52 repe cmpsb 53 je .return 47 54 48 mov rcx, r8 ; cbDst 49 and rcx, 7 ; calc trailing byte count. 50 rep stosb 51 52 %elif ARCH_BITS == 16 53 push es 54 55 mov di, [bp + 2 + cbCurRetAddr] ; pvDst.off 56 mov es, [bp + 2 + cbCurRetAddr + 2] ; pvDst.sel 57 mov al, [bp + 2 + cbCurRetAddr + 4] ; bFiller 58 mov ah, al 59 mov cx, [bp + 2 + cbCurRetAddr + 6] ; cbDst 60 shr cx, 1 ; calc dword count. 61 rep stosw 62 63 mov cx, [bp + 2 + cbCurRetAddr + 6] ; cbDst 64 and cx, 1 ; calc tailing byte count. 65 rep stosb 66 67 pop es 68 69 %elif ARCH_BITS == 32 70 mov edi, [ebp + 8] ; pvDst 71 mov al, byte [ebp + 4 + cbCurRetAddr + 4] ; bFiller 72 mov ah, al 73 mov dx, ax 74 shl eax, 16 75 mov ax, dx ; eax = RT_MAKE_U32_FROM_U8(bFiller, bFiller, bFiller, bFiller) 76 mov ecx, [ebp + 4 + cbCurRetAddr + 8] ; cbDst 77 shr cx, 2 ; calc dword count. 78 rep stosd 79 80 mov ecx, [ebp + 4 + cbCurRetAddr + 8] ; cbDst 81 and ecx, 3 ; calc tailing byte count. 82 rep stosb 55 mov al, [es:di - 1] 56 xor dx, dx 57 mov dl, [esi - 1] 58 sub ax, dx 83 59 84 60 %else 85 %error "Unknown bitness." 61 %if TMPL_BITS == 64 62 mov rdi, rcx ; rdi = pv1 63 mov rsi, rdx ; rdi = pv2 64 mov rcx, r8 ; rcx = cbDst 65 %else 66 mov ax, ds 67 mov es, ax ; paranoia 68 mov edi, [ebp + 4 + cbCurRetAddr] ; pv1 69 mov esi, [ebp + 4 + cbCurRetAddr + 4] ; pv2 70 mov ecx, [ebp + 4 + cbCurRetAddr + 8] ; cbDst 71 %endif 72 xor eax, eax 73 repe cmpsb 74 je .return 75 76 mov al, [xDI - 1] 77 movzx edx, byte [xSI - 1] 78 sub eax, edx 86 79 %endif 87 80 81 .return: 82 TONLY16 pop ds 83 TNOT64 pop es 84 pop xSI 88 85 pop xDI 89 86 pop xBP 90 87 BS3_HYBRID_RET 91 BS3_PROC_END_CMN Bs3Mem Set88 BS3_PROC_END_CMN Bs3MemCmp 92 89 -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-PagingProtect.c
r60527 r60676 37 37 * Defined Constants And Macros * 38 38 *********************************************************************************************************************************/ 39 #if 1 40 # define BS3PAGING_DPRINTF1(a) Bs3TestPrintf a 41 #else 42 # define BS3PAGING_DPRINTF1(a) do { } while (0) 43 #endif 39 44 #if 0 40 # define BS3PAGING_DPRINTF (a) Bs3TestPrintf a41 #else 42 # define BS3PAGING_DPRINTF (a) do { } while (0)45 # define BS3PAGING_DPRINTF2(a) Bs3TestPrintf a 46 #else 47 # define BS3PAGING_DPRINTF2(a) do { } while (0) 43 48 #endif 44 49 … … 68 73 uint32_t const uMaxAddr = UINT32_MAX; 69 74 #endif 70 BS3PAGING_DPRINTF (("bs3PagingGetLegacyPte: cr3=%RX32 uFlat=%RX32 uMaxAddr=%RX32\n", (uint32_t)cr3, uFlat, uMaxAddr));75 BS3PAGING_DPRINTF2(("bs3PagingGetLegacyPte: cr3=%RX32 uFlat=%RX32 uMaxAddr=%RX32\n", (uint32_t)cr3, uFlat, uMaxAddr)); 71 76 72 77 *prc = VERR_OUT_OF_RANGE; … … 82 87 83 88 BS3_ASSERT(pPD->a[iPde].b.u1Present); 84 BS3PAGING_DPRINTF (("bs3PagingGetLegacyPte: pPD=%p iPde=%#x: %#RX32\n", pPD, iPde, pPD->a[iPde]));89 BS3PAGING_DPRINTF2(("bs3PagingGetLegacyPte: pPD=%p iPde=%#x: %#RX32\n", pPD, iPde, pPD->a[iPde])); 85 90 if (pPD->a[iPde].b.u1Present) 86 91 { … … 88 93 { 89 94 if (pPD->a[iPde].u <= uMaxAddr) 95 { 90 96 pPTE = &((X86PT BS3_FAR *)Bs3XptrFlatToCurrent(pPD->a[iPde].u & ~(uint32_t)PAGE_OFFSET_MASK))->a[iPte]; 97 *prc = VINF_SUCCESS; 98 } 99 else 100 BS3PAGING_DPRINTF1(("bs3PagingGetLegacyPte: out of range! iPde=%#x: %#x\n", iPde, pPD->a[iPde].u)); 91 101 } 92 102 else … … 99 109 uPte |= X86_PTE_PAT; 100 110 101 pPT = (X86PT BS3_FAR *)bs3PagingBuildPaeTable(RT_MAKE_U64(uPte, uPte +PAGE_SIZE),111 pPT = (X86PT BS3_FAR *)bs3PagingBuildPaeTable(RT_MAKE_U64(uPte, uPte | PAGE_SIZE), 102 112 RT_MAKE_U64(PAGE_SIZE*2, PAGE_SIZE*2), 103 113 uMaxAddr > _1M ? BS3MEMKIND_TILED : BS3MEMKIND_REAL, prc); 104 114 105 BS3PAGING_DPRINTF (("bs3PagingGetLegacyPte: Built pPT=%p uPte=%RX32\n", pPT, uPte));115 BS3PAGING_DPRINTF2(("bs3PagingGetLegacyPte: Built pPT=%p uPte=%RX32\n", pPT, uPte)); 106 116 if (pPT) 107 117 { 108 118 pPD->a[iPde].u = Bs3SelPtrToFlat(pPT) 109 119 | (pPD->a[iPde].u & ~(uint32_t)(X86_PTE_PG_MASK | X86_PDE4M_PS | X86_PDE4M_G | X86_PDE4M_D)); 110 BS3PAGING_DPRINTF (("bs3PagingGetLegacyPte: iPde=%#x: %#RX32\n", iPde, pPD->a[iPde].u));120 BS3PAGING_DPRINTF2(("bs3PagingGetLegacyPte: iPde=%#x: %#RX32\n", iPde, pPD->a[iPde].u)); 111 121 if (fUseInvlPg) 112 122 ASMInvalidatePage(uFlat); 113 123 pPTE = &pPT->a[iPte]; 124 *prc = VINF_SUCCESS; 114 125 } 115 126 } … … 117 128 } 118 129 } 130 else 131 BS3PAGING_DPRINTF1(("bs3PagingGetLegacyPte: out of range! cr3=%#x\n", cr3)); 119 132 return pPTE; 120 133 } … … 149 162 if ((pPdpt->a[iPdpte].u & X86_PDPE_PG_MASK) <= uMaxAddr) 150 163 pPD = (X86PDPAE BS3_FAR *)Bs3XptrFlatToCurrent(pPdpt->a[iPdpte].u & ~(uint64_t)PAGE_OFFSET_MASK); 164 else 165 BS3PAGING_DPRINTF1(("bs3PagingGetPte: out of range! iPdpte=%#x: %RX64 max=%RX32\n", 166 iPdpte, pPdpt->a[iPdpte].u, (uint32_t)uMaxAddr)); 151 167 } 152 168 else … … 174 190 if ((pPdpt->a[iPdpte].u & X86_PDPE_PG_MASK) <= uMaxAddr) 175 191 pPD = (X86PDPAE BS3_FAR *)Bs3XptrFlatToCurrent(pPdpt->a[iPdpte].u & X86_PDPE_PG_MASK); 192 else 193 BS3PAGING_DPRINTF1(("bs3PagingGetPte: out of range! iPdpte=%#x: %RX64 max=%RX32\n", 194 iPdpte, pPdpt->a[iPdpte].u, (uint32_t)uMaxAddr)); 176 195 } 177 196 else 197 { 178 198 pPD = NULL; 199 BS3PAGING_DPRINTF1(("bs3PagingGetPte: out of range! uFlat=%#RX64 max=%RX32\n", uFlat, (uint32_t)uMaxAddr)); 200 } 179 201 if (pPD) 180 202 { … … 184 206 { 185 207 if ((pPD->a[iPde].u & X86_PDE_PAE_PG_MASK) <= uMaxAddr) 208 { 186 209 pPTE = &((X86PTPAE BS3_FAR *)Bs3XptrFlatToCurrent(pPD->a[iPde].u & ~(uint64_t)PAGE_OFFSET_MASK))->a[iPte]; 210 *prc = VINF_SUCCESS; 211 } 212 else 213 BS3PAGING_DPRINTF1(("bs3PagingGetPte: out of range! iPde=%#x: %RX64 max=%RX32\n", 214 iPde, pPD->a[iPde].u, (uint32_t)uMaxAddr)); 187 215 } 188 216 else … … 205 233 ASMInvalidatePage(uFlat); 206 234 pPTE = &pPT->a[iPte]; 235 *prc = VINF_SUCCESS; 207 236 } 208 237 } 209 238 } 210 239 } 211 240 else 241 BS3PAGING_DPRINTF1(("bs3PagingGetPte: out of range! cr3=%#RX32 uMaxAddr=%#RX32\n", (uint32_t)cr3, (uint32_t)uMaxAddr)); 212 242 return pPTE; 213 243 } … … 217 247 BS3_CMN_DEF(int, Bs3PagingProtect,(uint64_t uFlat, uint64_t cb, uint64_t fSet, uint64_t fClear)) 218 248 { 219 RTCCUINTXREG const cr3 = ASMGetCR3(); 220 RTCCUINTXREG const cr4 = g_uBs3CpuDetected & BS3CPU_F_CPUID ? ASMGetCR4() : 0; 221 bool const fLegacyPTs = !(cr4 & X86_CR4_PAE); 222 bool const fUseInvlPg = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486 223 && ( cb < UINT64_C(16)*PAGE_SIZE 224 || (cr4 & X86_CR4_PGE)); 225 unsigned cEntries; 226 int rc; 227 249 #if ARCH_BITS == 16 250 if (!BS3_MODE_IS_V86(g_bBs3CurrentMode)) 251 #endif 252 { 253 RTCCUINTXREG const cr3 = ASMGetCR3(); 254 RTCCUINTXREG const cr4 = g_uBs3CpuDetected & BS3CPU_F_CPUID ? ASMGetCR4() : 0; 255 bool const fLegacyPTs = !(cr4 & X86_CR4_PAE); 256 bool const fUseInvlPg = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486 257 && ( cb < UINT64_C(16)*PAGE_SIZE 258 || (cr4 & X86_CR4_PGE)); 259 unsigned cEntries; 260 int rc; 261 262 /* 263 * Adjust the range parameters. 264 */ 265 cb += uFlat & PAGE_OFFSET_MASK; 266 cb = RT_ALIGN_64(cb, PAGE_SIZE); 267 uFlat &= ~(uint64_t)PAGE_OFFSET_MASK; 268 269 fSet &= ~X86_PTE_PAE_PG_MASK; 270 fClear &= ~X86_PTE_PAE_PG_MASK; 271 272 BS3PAGING_DPRINTF1(("Bs3PagingProtect: uFlat=%RX64 cb=%RX64 fSet=%RX64 fClear=%RX64 %s %s\n", uFlat, cb, fSet, fClear, 273 fLegacyPTs ? "legacy" : "pae/amd64", fUseInvlPg ? "invlpg" : "reload-cr3")); 274 if (fLegacyPTs) 275 { 276 /* 277 * Legacy page tables. 278 */ 279 while ((uint32_t)cb > 0) 280 { 281 PX86PTE pPte = BS3_CMN_FAR_NM(bs3PagingGetLegacyPte)(cr3, (uint32_t)uFlat, fUseInvlPg, &rc); 282 if (!pPte) 283 return rc; 284 285 cEntries = X86_PG_ENTRIES - ((uFlat >> X86_PT_SHIFT) & X86_PT_MASK); 286 while (cEntries-- > 0 && cb > 0) 287 { 288 pPte->u &= ~(uint32_t)fClear; 289 pPte->u |= (uint32_t)fSet; 290 if (fUseInvlPg) 291 ASMInvalidatePage(uFlat); 292 293 pPte++; 294 uFlat += PAGE_SIZE; 295 cb -= PAGE_SIZE; 296 } 297 } 298 } 299 else 300 { 301 /* 302 * Long mode or PAE page tables (at this level they are the same). 303 */ 304 while (cb > 0) 305 { 306 PX86PTEPAE pPte = BS3_CMN_FAR_NM(bs3PagingGetPte)(cr3, uFlat, fUseInvlPg, &rc); 307 if (!pPte) 308 return rc; 309 310 cEntries = X86_PG_ENTRIES - ((uFlat >> X86_PT_PAE_SHIFT) & X86_PT_PAE_MASK); 311 while (cEntries-- > 0 && cb > 0) 312 { 313 pPte->u &= ~fClear; 314 pPte->u |= fSet; 315 if (fUseInvlPg) 316 ASMInvalidatePage(uFlat); 317 318 pPte++; 319 uFlat += PAGE_SIZE; 320 cb -= PAGE_SIZE; 321 } 322 } 323 } 324 325 /* 326 * Flush the TLB if we didn't use INVLPG above. 327 */ 328 BS3PAGING_DPRINTF2(("Bs3PagingProtect: reloading cr3=%RX32\n", (uint32_t)cr3)); 329 //if (!fUseInvlPg) 330 ASMSetCR3(cr3); 331 BS3PAGING_DPRINTF2(("Bs3PagingProtect: reloaded cr3=%RX32\n", (uint32_t)cr3)); 332 } 333 #if ARCH_BITS == 16 228 334 /* 229 * Adjust the range parameters.335 * We can do this stuff in v8086 mode. 230 336 */ 231 cb += uFlat & PAGE_OFFSET_MASK;232 cb = RT_ALIGN_64(cb, PAGE_SIZE);233 uFlat &= ~(uint64_t)PAGE_OFFSET_MASK;234 235 fSet &= ~X86_PTE_PAE_PG_MASK;236 fClear &= ~X86_PTE_PAE_PG_MASK;237 238 BS3PAGING_DPRINTF(("Bs3PagingProtect: uFlat=%RX64 cb=%RX64 fSet=%RX64 fClear=%RX64 %s %s\n", uFlat, cb, fSet, fClear,239 fLegacyPTs ? "legacy" : "pae/amd64", fUseInvlPg ? "invlpg" : "reload-cr3"));240 if (fLegacyPTs)241 {242 /*243 * Legacy page tables.244 */245 while ((uint32_t)cb > 0)246 {247 PX86PTE pPte = BS3_CMN_NM(bs3PagingGetLegacyPte)(cr3, (uint32_t)uFlat, fUseInvlPg, &rc);248 if (!pPte)249 return rc;250 251 cEntries = X86_PG_ENTRIES - ((uFlat >> X86_PT_SHIFT) & X86_PT_MASK);252 while (cEntries-- > 0 && cb > 0)253 {254 pPte->u &= ~(uint32_t)fClear;255 pPte->u |= (uint32_t)fSet;256 if (fUseInvlPg)257 ASMInvalidatePage(uFlat);258 259 pPte++;260 uFlat += PAGE_SIZE;261 cb -= PAGE_SIZE;262 }263 }264 }265 337 else 266 { 267 /* 268 * Long mode or PAE page tables (at this level they are the same). 269 */ 270 while (cb > 0) 271 { 272 PX86PTEPAE pPte = BS3_CMN_NM(bs3PagingGetPte)(cr3, uFlat, fUseInvlPg, &rc); 273 if (!pPte) 274 return rc; 275 276 cEntries = X86_PG_ENTRIES - ((uFlat >> X86_PT_PAE_SHIFT) & X86_PT_PAE_MASK); 277 while (cEntries-- > 0 && cb > 0) 278 { 279 pPte->u &= ~fClear; 280 pPte->u |= fSet; 281 if (fUseInvlPg) 282 ASMInvalidatePage(uFlat); 283 284 pPte++; 285 uFlat += PAGE_SIZE; 286 cb -= PAGE_SIZE; 287 } 288 } 289 } 290 291 /* 292 * Flush the TLB if we didn't use INVLPG above. 293 */ 294 BS3PAGING_DPRINTF(("Bs3PagingProtect: reloading cr3=%RX32\n", (uint32_t)cr3)); 295 //if (!fUseInvlPg) 296 ASMSetCR3(cr3); 297 BS3PAGING_DPRINTF(("Bs3PagingProtect: reloaded cr3=%RX32\n", (uint32_t)cr3)); 298 338 return Bs3SwitchFromV86To16BitAndCallC((FPFNBS3FAR)Bs3PagingProtect_f16, sizeof(uint64_t)*4, uFlat, cb, fSet, fClear); 339 #endif 299 340 return VINF_SUCCESS; 300 341 } -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-SelRealModeDataToFlat.asm
r60657 r60676 43 43 ;; 44 44 ; @cproto BS3_CMN_PROTO_NOSB(uint32_t, Bs3SelRealModeDataToFlat,(uint32_t uFar1616)); 45 ; @cproto BS3_CMN_PROTO_NOSB(uint32_t, Bs3SelRealModeCodeToFlat,(uint32_t uFar1616)); 45 46 ; 46 47 ; @uses Only return registers (ax:dx, eax, eax); 47 48 ; @remarks No 20h scratch area requirements. 48 49 ; 50 BS3_PROC_BEGIN_CMN Bs3SelRealModeCodeToFlat, BS3_PBC_NEAR ; Far stub generated by the makefile/bs3kit.h. 49 51 BS3_PROC_BEGIN_CMN Bs3SelRealModeDataToFlat, BS3_PBC_NEAR ; Far stub generated by the makefile/bs3kit.h. 50 52 push xBP -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-mode-EnteredMode.asm
r60557 r60676 221 221 %elif BS3_MODE_IS_64BIT_SYS(TMPL_MODE) 222 222 BS3_EXTERN_CMN Bs3Trap64SetGate 223 extern Bs3TrapSystemCallHandler_lm64223 extern _Bs3TrapSystemCallHandler_lm64 224 224 TMPL_BEGIN_TEXT 225 225 push 0 ; bIst 226 226 %if BS3_MODE_IS_64BIT_CODE(TMPL_MODE) 227 push Bs3TrapSystemCallHandler_lm64 wrt FLAT227 push _Bs3TrapSystemCallHandler_lm64 wrt FLAT 228 228 %else 229 229 push dword 0 ; upper offset 230 push dword Bs3TrapSystemCallHandler_lm64 wrt FLAT230 push dword _Bs3TrapSystemCallHandler_lm64 wrt FLAT 231 231 %endif 232 232 push BS3_SEL_R0_CS64 -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-mode-TestDoModesHlp.asm
r60557 r60676 134 134 extern RT_CONCAT3(_Bs3SwitchTo,TMPL_MODE_UNAME,_Safe_pae32):wrt BS3FLAT 135 135 extern RT_CONCAT3(_Bs3SwitchTo,TMPL_MODE_UNAME,_Safe_lm32):wrt BS3FLAT 136 extern RT_CONCAT3( Bs3SwitchTo,TMPL_MODE_UNAME,_Safe_lm64):wrt BS3FLAT136 extern RT_CONCAT3(_Bs3SwitchTo,TMPL_MODE_UNAME,_Safe_lm64):wrt BS3FLAT 137 137 138 138 … … 1105 1105 1106 1106 STRICT_SAVE_REGS 1107 call RT_CONCAT3( Bs3SwitchTo,TMPL_MODE_UNAME,_Safe_lm64)1107 call RT_CONCAT3(_Bs3SwitchTo,TMPL_MODE_UNAME,_Safe_lm64) 1108 1108 BS3_SET_BITS TMPL_BITS 1109 1109 STRICT_CHECK_REGS … … 1133 1133 1134 1134 %endif 1135 -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-I8DQ.asm
r60595 r60676 33 33 ; 64-bit unsigned integer division, SS variant. 34 34 ; 35 ; @returns ax:bx:cx:dx quotient. 35 ; @returns ax:bx:cx:dx quotient. (AX is the most significant, DX the least) 36 36 ; @param ax:bx:cx:dx Dividend. 37 37 ; @param [ss:si] Divisor … … 58 58 ; 64-bit unsigned integer division, ES variant. 59 59 ; 60 ; @returns ax:bx:cx:dx quotient. 60 ; @returns ax:bx:cx:dx quotient. (AX is the most significant, DX the least) 61 61 ; @param ax:bx:cx:dx Dividend. 62 62 ; @param [es:si] Divisor … … 88 88 89 89 ; The dividend. 90 push ax 91 push bx 92 push cx 90 93 push dx 91 push cx92 push bx93 push ax94 94 95 95 call Bs3Int64Div -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-I8DR.asm
r60595 r60676 33 33 ; 64-bit unsigned integer modulo, SS variant. 34 34 ; 35 ; @returns ax:bx:cx:dx reminder. 35 ; @returns ax:bx:cx:dx reminder. (AX is the most significant, DX the least) 36 36 ; @param ax:bx:cx:dx Dividend. 37 37 ; @param [ss:si] Divisor … … 88 88 89 89 ; The dividend. 90 push ax 91 push bx 92 push cx 90 93 push dx 91 push cx92 push bx93 push ax94 94 95 95 call Bs3Int64Div -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-I8RS.asm
r60595 r60676 31 31 ; 64-bit signed integer left shift. 32 32 ; 33 ; @returns AX:BX:CX:DX 33 ; @returns AX:BX:CX:DX (AX is the most significant, DX the least) 34 34 ; @param AX:BX:CX:DX Value to shift. 35 35 ; @param SI Shift count. -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-U8DQ.asm
r60595 r60676 33 33 ; 64-bit unsigned integer division, SS variant. 34 34 ; 35 ; @returns ax:bx:cx:dx quotient. 35 ; @returns ax:bx:cx:dx quotient. (AX is the most significant, DX the least) 36 36 ; @param ax:bx:cx:dx Dividend. 37 37 ; @param [ss:si] Divisor … … 58 58 ; 64-bit unsigned integer division, ES variant. 59 59 ; 60 ; @returns ax:bx:cx:dx quotient. 60 ; @returns ax:bx:cx:dx quotient. (AX is the most significant, DX the least) 61 61 ; @param ax:bx:cx:dx Dividend. 62 62 ; @param [es:si] Divisor … … 90 90 91 91 ; The dividend. 92 push ax 93 push bx 94 push cx 92 95 push dx 93 push cx94 push bx95 push ax96 96 97 97 call Bs3UInt64Div -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-U8DR.asm
r60595 r60676 33 33 ; 64-bit unsigned integer modulo, SS variant. 34 34 ; 35 ; @returns ax:bx:cx:dx reminder. 35 ; @returns ax:bx:cx:dx reminder. (AX is the most significant, DX the least) 36 36 ; @param ax:bx:cx:dx Dividend. 37 37 ; @param [ss:si] Divisor … … 58 58 ; 64-bit unsigned integer modulo, ES variant. 59 59 ; 60 ; @returns ax:bx:cx:dx reminder. 60 ; @returns ax:bx:cx:dx reminder. (AX is the most significant, DX the least) 61 61 ; @param ax:bx:cx:dx Dividend. 62 62 ; @param [es:si] Divisor … … 90 90 91 91 ; The dividend. 92 push ax 93 push bx 94 push cx 92 95 push dx 93 push cx94 push bx95 push ax96 96 97 97 call Bs3UInt64Div -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-U8LS.asm
r60595 r60676 31 31 ; 64-bit integer left shift. 32 32 ; 33 ; @returns AX:BX:CX:DX 33 ; @returns AX:BX:CX:DX (AX is the most significant, DX the least) 34 34 ; @param AX:BX:CX:DX Value to shift. 35 35 ; @param SI Shift count. … … 47 47 ; time in the below loop. 48 48 ; 49 ; Using 8086 com atible approach here as it's less hazzle to write49 ; Using 8086 compatible approach here as it's less hazzle to write 50 50 ; and smaller. 51 51 ; 52 52 and si, 3fh 53 53 54 jz .return 54 55 55 .next_shift: 56 shl ax, 1 56 shl dx, 1 57 rcl cx, 1 57 58 rcl bx, 1 58 rcl cx, 1 59 rcl dx, 1 59 rcl ax, 1 60 60 dec si 61 61 jnz .next_shift -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-wc16-U8RS.asm
r60595 r60676 31 31 ; 64-bit unsigned integer left shift. 32 32 ; 33 ; @returns AX:BX:CX:DX 33 ; @returns AX:BX:CX:DX (AX is the most significant, DX the least) 34 34 ; @param AX:BX:CX:DX Value to shift. 35 35 ; @param SI Shift count. -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit-autostubs.kmk
r60657 r60676 29 29 $(call BS3KIT_FN_GEN_CMN_FARSTUB,bs3kit-common-16,Bs3SelProtFar16DataToFlat,4) 30 30 $(call BS3KIT_FN_GEN_CMN_FARSTUB,bs3kit-common-16,Bs3SelProtFar16DataToRealMode,4) 31 $(call BS3KIT_FN_GEN_CMN_FARSTUB,bs3kit-common-16,Bs3SelRealModeCodeToFlat,4) 31 32 $(call BS3KIT_FN_GEN_CMN_FARSTUB,bs3kit-common-16,Bs3SelRealModeDataToFlat,4) 32 33 $(call BS3KIT_FN_GEN_CMN_FARSTUB,bs3kit-common-16,Bs3SelRealModeDataToProtFar16,4) … … 40 41 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3PagingProtect) 41 42 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3PagingProtectPtr) 43 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3SwitchFromV86To16BitAndCallC) 42 44 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3TrapSetHandler) 43 45 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3Printf) … … 55 57 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemAllocZ) 56 58 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemCpy) 59 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemGuardedTestPageAlloc) 60 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemGuardedTestPageAllocEx) 57 61 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemMove) 58 62 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemPCpy) … … 62 66 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3SlabListAllocEx) 63 67 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemFree) 68 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3MemGuardedTestPageFree) 64 69 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3PicMaskAll) 65 70 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3PrintStr) 66 71 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3RegCtxConvertToRingX) 67 72 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3RegCtxPrint) 73 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3RegCtxSetGrpDsFromCurPtr) 74 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3RegCtxSetGrpSegFromCurPtr) 75 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3RegCtxSetGrpSegFromFlat) 76 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3RegCtxSetRipCsFromLnkPtr) 68 77 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3SlabInit) 69 78 $(call BS3KIT_FN_GEN_CMN_NEARSTUB,bs3kit-common-16,Bs3SlabListAdd) -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit-mangling-code-define.h
r60657 r60676 37 37 #define Bs3MemAllocZ BS3_CMN_MANGLER(Bs3MemAllocZ) 38 38 #define Bs3MemChr BS3_CMN_MANGLER(Bs3MemChr) 39 #define Bs3MemCmp BS3_CMN_MANGLER(Bs3MemCmp) 39 40 #define Bs3MemCpy BS3_CMN_MANGLER(Bs3MemCpy) 40 41 #define Bs3MemFree BS3_CMN_MANGLER(Bs3MemFree) 42 #define Bs3MemGuardedTestPageAlloc BS3_CMN_MANGLER(Bs3MemGuardedTestPageAlloc) 43 #define Bs3MemGuardedTestPageAllocEx BS3_CMN_MANGLER(Bs3MemGuardedTestPageAllocEx) 44 #define Bs3MemGuardedTestPageFree BS3_CMN_MANGLER(Bs3MemGuardedTestPageFree) 41 45 #define Bs3MemMove BS3_CMN_MANGLER(Bs3MemMove) 42 46 #define Bs3MemPCpy BS3_CMN_MANGLER(Bs3MemPCpy) … … 62 66 #define Bs3RegCtxSave BS3_CMN_MANGLER(Bs3RegCtxSave) 63 67 #define Bs3RegCtxSaveEx BS3_CMN_MANGLER(Bs3RegCtxSaveEx) 68 #define Bs3RegCtxSetGrpDsFromCurPtr BS3_CMN_MANGLER(Bs3RegCtxSetGrpDsFromCurPtr) 69 #define Bs3RegCtxSetGrpSegFromCurPtr BS3_CMN_MANGLER(Bs3RegCtxSetGrpSegFromCurPtr) 70 #define Bs3RegCtxSetGrpSegFromFlat BS3_CMN_MANGLER(Bs3RegCtxSetGrpSegFromFlat) 71 #define Bs3RegCtxSetRipCsFromLnkPtr BS3_CMN_MANGLER(Bs3RegCtxSetRipCsFromLnkPtr) 64 72 #define Bs3SelFar32ToFlat32 BS3_CMN_MANGLER(Bs3SelFar32ToFlat32) 65 73 #define Bs3SelFar32ToFlat32NoClobber BS3_CMN_MANGLER(Bs3SelFar32ToFlat32NoClobber) … … 72 80 #define Bs3SelProtFar32ToFlat32 BS3_CMN_MANGLER(Bs3SelProtFar32ToFlat32) 73 81 #define Bs3SelProtModeCodeToRealMode BS3_CMN_MANGLER(Bs3SelProtModeCodeToRealMode) 82 #define Bs3SelRealModeCodeToFlat BS3_CMN_MANGLER(Bs3SelRealModeCodeToFlat) 74 83 #define Bs3SelRealModeCodeToProtMode BS3_CMN_MANGLER(Bs3SelRealModeCodeToProtMode) 75 84 #define Bs3SelRealModeDataToFlat BS3_CMN_MANGLER(Bs3SelRealModeDataToFlat) … … 91 100 #define Bs3StrPrintf BS3_CMN_MANGLER(Bs3StrPrintf) 92 101 #define Bs3StrPrintfV BS3_CMN_MANGLER(Bs3StrPrintfV) 102 #define Bs3SwitchFromV86To16BitAndCallC BS3_CMN_MANGLER(Bs3SwitchFromV86To16BitAndCallC) 93 103 #define Bs3TestCheckRegCtxEx BS3_CMN_MANGLER(Bs3TestCheckRegCtxEx) 94 104 #define Bs3TestFailed BS3_CMN_MANGLER(Bs3TestFailed) -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit-mangling-code-undef.h
r60657 r60676 37 37 #undef Bs3MemAllocZ 38 38 #undef Bs3MemChr 39 #undef Bs3MemCmp 39 40 #undef Bs3MemCpy 40 41 #undef Bs3MemFree 42 #undef Bs3MemGuardedTestPageAlloc 43 #undef Bs3MemGuardedTestPageAllocEx 44 #undef Bs3MemGuardedTestPageFree 41 45 #undef Bs3MemMove 42 46 #undef Bs3MemPCpy … … 62 66 #undef Bs3RegCtxSave 63 67 #undef Bs3RegCtxSaveEx 68 #undef Bs3RegCtxSetGrpDsFromCurPtr 69 #undef Bs3RegCtxSetGrpSegFromCurPtr 70 #undef Bs3RegCtxSetGrpSegFromFlat 71 #undef Bs3RegCtxSetRipCsFromLnkPtr 64 72 #undef Bs3SelFar32ToFlat32 65 73 #undef Bs3SelFar32ToFlat32NoClobber … … 72 80 #undef Bs3SelProtFar32ToFlat32 73 81 #undef Bs3SelProtModeCodeToRealMode 82 #undef Bs3SelRealModeCodeToFlat 74 83 #undef Bs3SelRealModeCodeToProtMode 75 84 #undef Bs3SelRealModeDataToFlat … … 91 100 #undef Bs3StrPrintf 92 101 #undef Bs3StrPrintfV 102 #undef Bs3SwitchFromV86To16BitAndCallC 93 103 #undef Bs3TestCheckRegCtxEx 94 104 #undef Bs3TestFailed -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit-mangling-data.h
r60557 r60676 32 32 #define ___bs3kit_mangling_data_h 33 33 34 #if 0 /* the object converter deals with this now */ 34 35 #if ARCH_BITS == 64 35 36 … … 275 276 276 277 #endif /* ARCH_BITS == 64 */ 278 #endif /* not needed */ 279 277 280 #endif /* !___bs3kit_mangling_data_h */ 278 281 -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit-template-footer.mac
r60657 r60676 127 127 %unmacro TONLY32 1+ 128 128 %unmacro TONLY64 1+ 129 %unmacro TNOT64 1+ 129 130 -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit-template-header.mac
r60657 r60676 137 137 %define TMPL_BITS 64 138 138 %define TMPL_PTR_DEF dq 139 %define TMPL_UNDERSCORE 140 %define BS3_CMN_NM(Name) Name %+ _c64139 %define TMPL_UNDERSCORE _ 140 %define BS3_CMN_NM(Name) _ %+ Name %+ _c64 141 141 142 142 %else … … 361 361 %define TMPL_LM64 362 362 %define TMPL_MODE_STR "long, 64-bit" 363 %define TMPL_NM(Name) Name %+ _lm64363 %define TMPL_NM(Name) _ %+ Name %+ _lm64 364 364 %define TMPL_MODE_LNAME lm64 365 365 %define TMPL_MODE_UNAME LM64 … … 382 382 383 383 ; TMPL_NM version with uppercased suffix and no underscore separating them. 384 %ifnidn TMPL_UNDERSCORE,_ 385 %define TMPL_NM_U(Name) Name %+ TMPL_MODE_UNAME 386 %else 387 %define TMPL_NM_U(Name) TMPL_UNDERSCORE %+ Name %+ TMPL_MODE_UNAME 388 %endif 384 %define TMPL_NM_U(Name) TMPL_UNDERSCORE %+ Name %+ TMPL_MODE_UNAME 389 385 390 386 ; TMPL_FAR_NM … … 474 470 %endif 475 471 472 ;; @def TNOT64 473 ; Version of BNOT64 that follows the code template. 474 ; Like BNOT64 this normally goes in column 1. 475 %if TMPL_BITS == 64 476 %macro TNOT64 1+ 477 %endmacro 478 %else 479 %macro TNOT64 1+ 480 %1 481 %endmacro 482 %endif 483 476 484 ; 477 485 ; Default code segment (changes BITS too). -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit.h
r60657 r60676 309 309 #define BS3_SEL_VMMDEV_MMIO16 0x00f8 /**< Selector for accessing the VMMDev MMIO segment at 0100000h from 16-bit code. */ 310 310 311 /** Checks if @a uSel is in the BS3_SEL_RX_XXX range. */ 312 #define BS3_SEL_IS_IN_RING_RANGE(uSel) ( (unsigned)(uSel - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT) ) 311 313 #define BS3_SEL_RING_SHIFT 8 /**< For the formula: BS3_SEL_R0_XXX + ((cs & 3) << BS3_SEL_RING_SHIFT) */ 312 314 #define BS3_SEL_RING_SUB_MASK 0x00f8 /**< Mask for getting the sub-selector. For use with BS3_SEL_R*_FIRST. */ 313 315 316 /** Checks if @a uSel is in the BS3_SEL_R0_XXX range. */ 317 #define BS3_SEL_IS_IN_R0_RANGE(uSel) ( (unsigned)(uSel - BS3_SEL_R0_FIRST) < (unsigned)(1 << BS3_SEL_RING_SHIFT) ) 314 318 #define BS3_SEL_R0_FIRST 0x0100 /**< The first selector in the ring-0 block. */ 315 319 #define BS3_SEL_R0_CS16 0x0100 /**< ring-0: 16-bit code selector, base 0x10000. */ … … 387 391 #define BS3_SEL_R3_CS64_CNF 0x0478 /**< ring-3: 64-bit conforming code selector, not accessed, flat. */ 388 392 #define BS3_SEL_R3_CS64_CNF_EO 0x0480 /**< ring-3: 64-bit execute-only conforming code selector, not accessed, flat. */ 393 394 #define BS3_SEL_R3_LAST 0x04f8 /**< ring-3: Last of the BS3_SEL_RX_XXX range. */ 389 395 390 396 #define BS3_SEL_SPARE_FIRST 0x0500 /**< The first selector in the spare block */ … … 680 686 * @remarks Mainly used in bs3kit-mangling.h, internal headers and templates. 681 687 */ 682 #if ARCH_BITS == 64683 # define BS3_DATA_NM(a_Name) RT_CONCAT(_,a_Name)684 #else688 //converter does this now//#if ARCH_BITS == 64 689 //converter does this now//# define BS3_DATA_NM(a_Name) RT_CONCAT(_,a_Name) 690 //converter does this now//#else 685 691 # define BS3_DATA_NM(a_Name) a_Name 686 #endif692 //converter does this now//#endif 687 693 688 694 /** … … 736 742 BS3_PTR_UNION_TEMPLATE(BS3CVPTRUNION, const volatile); 737 743 744 /** Generic far function type. */ 745 typedef BS3_DECL_FAR(void) FNBS3FAR(void); 746 /** Generic far function pointer type. */ 747 typedef FNBS3FAR *FPFNBS3FAR; 748 749 /** Generic near function type. */ 750 typedef BS3_DECL_NEAR(void) FNBS3NEAR(void); 751 /** Generic near function pointer type. */ 752 typedef FNBS3NEAR *PFNBS3NEAR; 753 754 /** Generic far 16:16 function pointer type for address conversion functions. */ 755 #if ARCH_BITS == 16 756 typedef FPFNBS3FAR PFNBS3FARADDRCONV; 757 #else 758 typedef uint32_t PFNBS3FARADDRCONV; 759 #endif 738 760 739 761 /** The system call vector. */ … … 1089 1111 1090 1112 1091 /** @def BS3_IS_PROTECTED_MODE1092 * @returns true if protected mode, false if not. */1093 #if ARCH_BITS != 161094 # define BS3_IS_PROTECTED_MODE() (true)1095 #else1096 # if 11097 # define BS3_IS_PROTECTED_MODE() (!BS3_MODE_IS_RM_SYS(g_bBs3CurrentMode))1098 # else1099 # define BS3_IS_PROTECTED_MODE() (Bs3AsmSmsw() & 1 /*PE*/)1100 # endif1101 #endif1102 1103 1104 1105 1113 /** @defgroup bs3kit_cross_ptr Cross Context Pointer Type 1106 1114 * … … 1240 1248 DECLINLINE(__segment) Bs3Sel16HighFlatPtrToSelector(uint16_t uHigh) 1241 1249 { 1242 if ( BS3_IS_PROTECTED_MODE())1250 if (!BS3_MODE_IS_RM_OR_V86(g_bBs3CurrentMode)) 1243 1251 return (__segment)(((uHigh << 3) + BS3_SEL_TILED) | Bs3Sel16GetCurRing()); 1244 1252 return (__segment)(uHigh << 12); 1245 1253 } 1246 1254 1247 #endif /* ARCH_BITS == 16 */1255 #endif /* ARCH_BITS == 16 */ 1248 1256 1249 1257 /** @def BS3_XPTR_GET … … 1277 1285 do { \ 1278 1286 a_Type BS3_FAR *pTypeCheck = (a_pValue); \ 1279 if (BS3_IS_PROTECTED_MODE()) \ 1287 if (BS3_MODE_IS_RM_OR_V86(g_bBs3CurrentMode)) \ 1288 (a_Name).XPtr.uFlat = BS3_FP_OFF(pTypeCheck) + ((uint32_t)BS3_FP_SEG(pTypeCheck) << 4); \ 1289 else \ 1280 1290 { \ 1281 1291 (a_Name).XPtr.u.uLow = BS3_FP_OFF(pTypeCheck); \ 1282 1292 (a_Name).XPtr.u.uHigh = ((BS3_FP_SEG(pTypeCheck) & UINT16_C(0xfff8)) - BS3_SEL_TILED) >> 3; \ 1283 1293 } \ 1284 else \1285 (a_Name).XPtr.uFlat = BS3_FP_OFF(pTypeCheck) + ((uint32_t)BS3_FP_SEG(pTypeCheck) << 4); \1286 1294 } while (0) 1287 1295 #elif ARCH_BITS == 32 … … 1642 1650 BS3_CMN_PROTO_NOSB(void BS3_FAR *, Bs3MemChr,(void const BS3_FAR *pvHaystack, uint8_t bNeedle, size_t cbHaystack)); 1643 1651 1652 /** 1653 * CRT style memcmp. 1654 * 1655 * @returns 0 if equal. Negative if the left side is 'smaller' than the right 1656 * side, and positive in the other case. 1657 * @param pv1 The left hand memory. 1658 * @param pv2 The right hand memory. 1659 * @param bNeedle The number of bytes to compare. 1660 */ 1661 BS3_CMN_PROTO_NOSB(int, Bs3MemCmp,(void const BS3_FAR *pv1, void const BS3_FAR *pv2, size_t cb)); 1644 1662 1645 1663 BS3_CMN_PROTO_STUB(void, Bs3UInt64Div,(RTUINT64U uDividend, RTUINT64U uDivisor, RTUINT64U BS3_FAR *paQuotientReminder)); … … 1713 1731 */ 1714 1732 BS3_CMN_PROTO_NOSB(uint32_t, Bs3SelFlatCodeToProtFar16,(uint32_t uFlatAddr)); 1733 1734 /** 1735 * Converts a far 16:16 real mode (code) address to a flat address. 1736 * 1737 * @returns 32-bit flat address. 1738 * @param uFar1616 Far real mode address (high 16-bit is segment, low 1739 * is offset). 1740 * @remarks All register are preserved, except return. 1741 * @remarks No 20h scratch space required in 64-bit mode. 1742 * @remarks Exactly the same as Bs3SelRealModeDataToFlat, except for param. 1743 */ 1744 BS3_CMN_PROTO_FARSTUB(4, uint32_t, Bs3SelRealModeCodeToFlat,(PFNBS3FARADDRCONV uFar1616)); 1715 1745 1716 1746 /** … … 1983 2013 BS3_CMN_PROTO_STUB(void, Bs3MemFree,(void BS3_FAR *pv, size_t cb)); 1984 2014 2015 /** 2016 * Allocates a page with non-present pages on each side. 2017 * 2018 * @returns Pointer to the usable page. NULL on failure. Use 2019 * Bs3MemGuardedTestPageFree to free the allocation. 2020 * @param enmKind The kind of addressing constraints imposed on the 2021 * allocation. 2022 */ 2023 BS3_CMN_PROTO_STUB(void BS3_FAR *, Bs3MemGuardedTestPageAlloc,(BS3MEMKIND enmKind)); 2024 2025 /** 2026 * Allocates a page with pages on each side to the @a fPte specification. 2027 * 2028 * @returns Pointer to the usable page. NULL on failure. Use 2029 * Bs3MemGuardedTestPageFree to free the allocation. 2030 * @param enmKind The kind of addressing constraints imposed on the 2031 * allocation. 2032 * @param fPte The page table entry specification for the guard pages. 2033 */ 2034 BS3_CMN_PROTO_STUB(void BS3_FAR *, Bs3MemGuardedTestPageAllocEx,(BS3MEMKIND enmKind, uint64_t fPte)); 2035 2036 /** 2037 * Frees guarded page allocated by Bs3MemGuardedTestPageAlloc or 2038 * Bs3MemGuardedTestPageAllocEx. 2039 * 2040 * @param pvGuardedPage Pointer returned by Bs3MemGuardedTestPageAlloc or 2041 * Bs3MemGuardedTestPageAllocEx. NULL is ignored. 2042 */ 2043 BS3_CMN_PROTO_STUB(void, Bs3MemGuardedTestPageFree,(void BS3_FAR *pvGuardedPage)); 2044 1985 2045 1986 2046 /** … … 2110 2170 BS3_CMN_PROTO_STUB(void, Bs3PicMaskAll,(void)); 2111 2171 2172 2173 /** 2174 * Call 16-bit prot mode function from v8086 mode. 2175 * 2176 * This switches from v8086 mode to 16-bit protected mode (code) and executed 2177 * @a fpfnCall with @a cbParams bytes of parameters pushed on the stack. 2178 * Afterwards it switches back to v8086 mode and returns a 16-bit status code. 2179 * 2180 * @returns 16-bit status code if the function returned anything. 2181 * @param fpfnCall Far real mode pointer to the function to call. 2182 * @param cbParams The size of the parameter list, in bytes. 2183 * @param ... The parameters. 2184 */ 2185 BS3_CMN_PROTO_STUB(int, Bs3SwitchFromV86To16BitAndCallC,(FPFNBS3FAR fpfnCall, unsigned cbParams, ...)); 2112 2186 2113 2187 … … 2264 2338 BS3_CMN_PROTO_STUB(void, Bs3RegCtxPrint,(PCBS3REGCTX pRegCtx)); 2265 2339 2340 /** 2341 * Sets a GPR and segment register to point at the same location as @a uFlat. 2342 * 2343 * @param pRegCtx The register context. 2344 * @param pGpr The general purpose register to set (points within 2345 * @a pRegCtx). 2346 * @param pSel The selector register (points within @a pRegCtx). 2347 * @param uFlat Flat location address. 2348 */ 2349 BS3_CMN_PROTO_STUB(void, Bs3RegCtxSetGrpSegFromFlat,(PBS3REGCTX pRegCtx, PBS3REG pGpr, PRTSEL pSel, RTCCUINTXREG uFlat)); 2350 2351 /** 2352 * Sets a GPR and segment register to point at the same location as @a ovPtr. 2353 * 2354 * @param pRegCtx The register context. 2355 * @param pGpr The general purpose register to set (points within 2356 * @a pRegCtx). 2357 * @param pSel The selector register (points within @a pRegCtx). 2358 * @param pvPtr Current context pointer. 2359 */ 2360 BS3_CMN_PROTO_STUB(void, Bs3RegCtxSetGrpSegFromCurPtr,(PBS3REGCTX pRegCtx, PBS3REG pGpr, PRTSEL pSel, void BS3_FAR *pvPtr)); 2361 2362 /** 2363 * Sets a GPR and DS to point at the same location as @a ovPtr. 2364 * 2365 * @param pRegCtx The register context. 2366 * @param pGpr The general purpose register to set (points within 2367 * @a pRegCtx). 2368 * @param pvPtr Current context pointer. 2369 */ 2370 BS3_CMN_PROTO_STUB(void, Bs3RegCtxSetGrpDsFromCurPtr,(PBS3REGCTX pRegCtx, PBS3REG pGpr, void BS3_FAR *pvPtr)); 2371 2372 /** 2373 * Sets CS:RIP to point at the same piece of code as @a pfnCode. 2374 * 2375 * The 16-bit edition of this function expects a far 16:16 address as written by 2376 * the linker (i.e. real mode). 2377 * 2378 * @param pRegCtx The register context. 2379 * @param pfnCode Pointer to the code. In 32-bit and 64-bit mode this is a 2380 * flat address, while in 16-bit it's a far 16:16 address 2381 * as fixed up by the linker (real mode selector). This 2382 * address is converted to match the mode of the context. 2383 */ 2384 BS3_CMN_PROTO_STUB(void, Bs3RegCtxSetRipCsFromLnkPtr,(PBS3REGCTX pRegCtx, FPFNBS3FAR pfnCode)); 2385 2266 2386 2267 2387 /** -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3kit.mac
r60657 r60676 149 149 ;; Mostly internal macro. Follows BS3_SET_BITS. 150 150 %undef BS3_NAME_UNDERSCORE 151 %if %1 == 64 152 %define BS3_NAME_UNDERSCORE 153 %else 154 %define BS3_NAME_UNDERSCORE _ 155 %endif 151 %define BS3_NAME_UNDERSCORE _ 156 152 157 153 ;; For segment overrides and stuff. Follows BS3_SET_BITS. … … 302 298 ;; @{ 303 299 304 %if def ASM_FORMAT_OMF300 %ifndef ASM_FORMAT_BIN 305 301 ; !!HACK ALERT!! 306 302 ; … … 517 513 ;; When using watcom + OMF, we're using __cdecl by default, which 518 514 ; get an underscore added in front. 519 %ifdef ASM_FORMAT_OMF 520 %define NAME(name) _ %+ NAME_OVERLOAD(name) 521 %endif 515 %define NAME(name) _ %+ NAME_OVERLOAD(name) 522 516 523 517 … … 627 621 ; 628 622 %macro BS3_GLOBAL_NAME_EX 3 629 %ifdef ASM_FORMAT_ELF630 %ifdef __NASM__631 global %1:%2 %3632 %else633 global %1:%2634 %endif635 %else636 623 global %1 637 %endif638 624 %1: 639 625 %endmacro … … 656 642 ; 657 643 ; @param %1 The mangled name. 658 ; @param %2 Create an underscore prefixed alias if present and 659 ; BS3_PB_WITH_US_ALIAS (automatic in 64-bit code). 660 ; 661 %macro BS3_PROC_BEGIN 1-2 0 662 %if __BITS__ == 64 || ((%2) & BS3_PB_WITH_US_ALIAS) 663 BS3_GLOBAL_NAME_EX _ %+ %1, function, (%1 %+ _EndProc - %1) 664 %endif 644 ; 645 %macro BS3_PROC_BEGIN 1 665 646 BS3_GLOBAL_NAME_EX %1, function, (%1 %+ _EndProc - %1) 666 647 %endmacro 667 668 ;; For use with BS3_PROC_BEGIN for making it produce an underscore prefixed alias.669 %define BS3_PB_WITH_US_ALIAS 1670 648 671 649 ;; … … 678 656 %macro BS3_PROC_END 1 679 657 BS3_GLOBAL_NAME_EX %1 %+ _EndProc, function hidden, (%1 %+ _EndProc - %1) 680 %ifdef ASM_FORMAT_ELF681 %ifdef __YASM__682 size %1 %1 %+ _EndProc - %1683 size %1 %+ _EndProc 0684 %endif685 %endif686 658 int3 ; handy and avoids overlapping labels. 687 659 %endmacro … … 1073 1045 %define BS3_MODE_NM_071h(a_Name) _ %+ a_Name %+ _lm16 1074 1046 %define BS3_MODE_NM_072h(a_Name) _ %+ a_Name %+ _lm32 1075 %define BS3_MODE_NM_074h(a_Name) a_Name %+ _lm641047 %define BS3_MODE_NM_074h(a_Name) _ %+ a_Name %+ _lm64 1076 1048 1077 1049 %define BS3_MODE_LNAME_001h rm … … 1142 1114 %define BS3_MODE_UNDERSCORE_071h _ 1143 1115 %define BS3_MODE_UNDERSCORE_072h _ 1144 %define BS3_MODE_UNDERSCORE_074h 1116 %define BS3_MODE_UNDERSCORE_074h _ 1145 1117 1146 1118 %define BS3_MODE_CNAME_001h c16 … … 1191 1163 %define BS3_MODE_R0_NM_071h(a_Name) _ %+ a_Name %+ _lm16 1192 1164 %define BS3_MODE_R0_NM_072h(a_Name) _ %+ a_Name %+ _lm32 1193 %define BS3_MODE_R0_NM_074h(a_Name) a_Name %+ _lm641165 %define BS3_MODE_R0_NM_074h(a_Name) _ %+ a_Name %+ _lm64 1194 1166 ;; @} 1195 1167
Note:
See TracChangeset
for help on using the changeset viewer.

