VirtualBox

source: vbox/trunk/src/VBox/Runtime/common/alloc/heapoffset.cpp

Last change on this file was 100308, checked in by vboxsync, 11 months ago

Runtime: Replace occurence of PAGE_SIZE with RTSystemgetPageSize() in code used by linux.arm64 guest additions, bugref:10476

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id Revision
File size: 33.8 KB
Line 
1/* $Id: heapoffset.cpp 100308 2023-06-28 10:24:38Z vboxsync $ */
2/** @file
3 * IPRT - An Offset Based Heap.
4 */
5
6/*
7 * Copyright (C) 2006-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define LOG_GROUP RTLOGGROUP_DEFAULT
42#include <iprt/heap.h>
43#include "internal/iprt.h"
44
45#include <iprt/assert.h>
46#include <iprt/asm.h>
47#include <iprt/errcore.h>
48#include <iprt/log.h>
49#include <iprt/param.h>
50#include <iprt/string.h>
51
52#include "internal/magics.h"
53
54
55/*********************************************************************************************************************************
56* Structures and Typedefs *
57*********************************************************************************************************************************/
58/** Pointer to the heap anchor block. */
59typedef struct RTHEAPOFFSETINTERNAL *PRTHEAPOFFSETINTERNAL;
60/** Pointer to a heap block. */
61typedef struct RTHEAPOFFSETBLOCK *PRTHEAPOFFSETBLOCK;
62/** Pointer to a free heap block. */
63typedef struct RTHEAPOFFSETFREE *PRTHEAPOFFSETFREE;
64
65/**
66 * Structure describing a block in an offset based heap.
67 *
68 * If this block is allocated, it is followed by the user data.
69 * If this block is free, see RTHEAPOFFSETFREE.
70 */
71typedef struct RTHEAPOFFSETBLOCK
72{
73 /** The next block in the global block list. */
74 uint32_t /*PRTHEAPOFFSETBLOCK*/ offNext;
75 /** The previous block in the global block list. */
76 uint32_t /*PRTHEAPOFFSETBLOCK*/ offPrev;
77 /** Offset into the heap of this block. Used to locate the anchor block. */
78 uint32_t /*PRTHEAPOFFSETINTERNAL*/ offSelf;
79 /** Flags + magic. */
80 uint32_t fFlags;
81} RTHEAPOFFSETBLOCK;
82AssertCompileSize(RTHEAPOFFSETBLOCK, 16);
83
84/** The block is free if this flag is set. When cleared it's allocated. */
85#define RTHEAPOFFSETBLOCK_FLAGS_FREE (RT_BIT_32(0))
86/** The magic value. */
87#define RTHEAPOFFSETBLOCK_FLAGS_MAGIC (UINT32_C(0xabcdef00))
88/** The mask that needs to be applied to RTHEAPOFFSETBLOCK::fFlags to obtain the magic value. */
89#define RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK (~RT_BIT_32(0))
90
91/**
92 * Checks if the specified block is valid or not.
93 * @returns boolean answer.
94 * @param pBlock Pointer to a RTHEAPOFFSETBLOCK structure.
95 */
96#define RTHEAPOFFSETBLOCK_IS_VALID(pBlock) \
97 ( ((pBlock)->fFlags & RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK) == RTHEAPOFFSETBLOCK_FLAGS_MAGIC )
98
99/**
100 * Checks if the specified block is valid and in use.
101 * @returns boolean answer.
102 * @param pBlock Pointer to a RTHEAPOFFSETBLOCK structure.
103 */
104#define RTHEAPOFFSETBLOCK_IS_VALID_USED(pBlock) \
105 ( ((pBlock)->fFlags & (RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK | RTHEAPOFFSETBLOCK_FLAGS_FREE)) \
106 == RTHEAPOFFSETBLOCK_FLAGS_MAGIC )
107
108/**
109 * Checks if the specified block is valid and free.
110 * @returns boolean answer.
111 * @param pBlock Pointer to a RTHEAPOFFSETBLOCK structure.
112 */
113#define RTHEAPOFFSETBLOCK_IS_VALID_FREE(pBlock) \
114 ( ((pBlock)->fFlags & (RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK | RTHEAPOFFSETBLOCK_FLAGS_FREE)) \
115 == (RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE) )
116
117/**
118 * Checks if the specified block is free or not.
119 * @returns boolean answer.
120 * @param pBlock Pointer to a valid RTHEAPOFFSETBLOCK structure.
121 */
122#define RTHEAPOFFSETBLOCK_IS_FREE(pBlock) (!!((pBlock)->fFlags & RTHEAPOFFSETBLOCK_FLAGS_FREE))
123
124/**
125 * A free heap block.
126 * This is an extended version of RTHEAPOFFSETBLOCK that takes the unused
127 * user data to store free list pointers and a cached size value.
128 */
129typedef struct RTHEAPOFFSETFREE
130{
131 /** Core stuff. */
132 RTHEAPOFFSETBLOCK Core;
133 /** Pointer to the next free block. */
134 uint32_t /*PRTHEAPOFFSETFREE*/ offNext;
135 /** Pointer to the previous free block. */
136 uint32_t /*PRTHEAPOFFSETFREE*/ offPrev;
137 /** The size of the block (excluding the RTHEAPOFFSETBLOCK part). */
138 uint32_t cb;
139 /** An alignment filler to make it a multiple of 16 bytes. */
140 uint32_t Alignment;
141} RTHEAPOFFSETFREE;
142AssertCompileSize(RTHEAPOFFSETFREE, 16+16);
143
144
145/**
146 * The heap anchor block.
147 * This structure is placed at the head of the memory block specified to RTHeapOffsetInit(),
148 * which means that the first RTHEAPOFFSETBLOCK appears immediately after this structure.
149 */
150typedef struct RTHEAPOFFSETINTERNAL
151{
152 /** The typical magic (RTHEAPOFFSET_MAGIC). */
153 uint32_t u32Magic;
154 /** The heap size. (This structure is included!) */
155 uint32_t cbHeap;
156 /** The amount of free memory in the heap. */
157 uint32_t cbFree;
158 /** Free head pointer. */
159 uint32_t /*PRTHEAPOFFSETFREE*/ offFreeHead;
160 /** Free tail pointer. */
161 uint32_t /*PRTHEAPOFFSETFREE*/ offFreeTail;
162 /** Make the size of this structure 32 bytes. */
163 uint32_t au32Alignment[3];
164} RTHEAPOFFSETINTERNAL;
165AssertCompileSize(RTHEAPOFFSETINTERNAL, 32);
166
167
168/** The minimum allocation size. */
169#define RTHEAPOFFSET_MIN_BLOCK (sizeof(RTHEAPOFFSETBLOCK))
170AssertCompile(RTHEAPOFFSET_MIN_BLOCK >= sizeof(RTHEAPOFFSETBLOCK));
171AssertCompile(RTHEAPOFFSET_MIN_BLOCK >= sizeof(RTHEAPOFFSETFREE) - sizeof(RTHEAPOFFSETBLOCK));
172
173/** The minimum and default alignment. */
174#define RTHEAPOFFSET_ALIGNMENT (sizeof(RTHEAPOFFSETBLOCK))
175
176
177/*********************************************************************************************************************************
178* Defined Constants And Macros *
179*********************************************************************************************************************************/
180#ifdef RT_STRICT
181# define RTHEAPOFFSET_STRICT 1
182#endif
183
184/**
185 * Converts RTHEAPOFFSETBLOCK::offSelf into a heap anchor block pointer.
186 *
187 * @returns Pointer of given type.
188 * @param pBlock The block to find the heap anchor block for.
189 */
190#define RTHEAPOFF_GET_ANCHOR(pBlock) ( (PRTHEAPOFFSETINTERNAL)((uint8_t *)(pBlock) - (pBlock)->offSelf ) )
191
192
193/**
194 * Converts an offset to a pointer.
195 *
196 * All offsets are relative to the heap to make life simple.
197 *
198 * @returns Pointer of given type.
199 * @param pHeapInt Pointer to the heap anchor block.
200 * @param off The offset to convert.
201 * @param type The desired type.
202 */
203#ifdef RTHEAPOFFSET_STRICT
204# define RTHEAPOFF_TO_PTR_N(pHeapInt, off, type) ( (type)rtHeapOffCheckedOffToPtr(pHeapInt, off, true /*fNull*/) )
205#else
206# define RTHEAPOFF_TO_PTR_N(pHeapInt, off, type) ( (type)((off) ? (uint8_t *)(pHeapInt) + (off) : NULL) )
207#endif
208
209/**
210 * Converts an offset to a pointer.
211 *
212 * All offsets are relative to the heap to make life simple.
213 *
214 * @returns Pointer of given type.
215 * @param pHeapInt Pointer to the heap anchor block.
216 * @param off The offset to convert.
217 * @param type The desired type.
218 */
219#ifdef RTHEAPOFFSET_STRICT
220# define RTHEAPOFF_TO_PTR(pHeapInt, off, type) ( (type)rtHeapOffCheckedOffToPtr(pHeapInt, off, false /*fNull*/) )
221#else
222# define RTHEAPOFF_TO_PTR(pHeapInt, off, type) ( (type)((uint8_t *)(pHeapInt) + (off)) )
223#endif
224
225/**
226 * Converts a pointer to an offset.
227 *
228 * All offsets are relative to the heap to make life simple.
229 *
230 * @returns Offset into the heap.
231 * @param pHeapInt Pointer to the heap anchor block.
232 * @param ptr The pointer to convert.
233 */
234#ifdef RTHEAPOFFSET_STRICT
235# define RTHEAPOFF_TO_OFF(pHeapInt, ptr) rtHeapOffCheckedPtrToOff(pHeapInt, ptr)
236#else
237# define RTHEAPOFF_TO_OFF(pHeapInt, ptr) ( (uint32_t)((ptr) ? (uintptr_t)(ptr) - (uintptr_t)(pHeapInt) : UINT32_C(0)) )
238#endif
239
240#define ASSERT_L(a, b) AssertMsg((a) < (b), ("a=%08x b=%08x\n", (a), (b)))
241#define ASSERT_LE(a, b) AssertMsg((a) <= (b), ("a=%08x b=%08x\n", (a), (b)))
242#define ASSERT_G(a, b) AssertMsg((a) > (b), ("a=%08x b=%08x\n", (a), (b)))
243#define ASSERT_GE(a, b) AssertMsg((a) >= (b), ("a=%08x b=%08x\n", (a), (b)))
244#define ASSERT_ALIGN(a) AssertMsg(!((uintptr_t)(a) & (RTHEAPOFFSET_ALIGNMENT - 1)), ("a=%p\n", (uintptr_t)(a)))
245
246#define ASSERT_PREV(pHeapInt, pBlock) \
247 do { ASSERT_ALIGN((pBlock)->offPrev); \
248 if ((pBlock)->offPrev) \
249 { \
250 ASSERT_L((pBlock)->offPrev, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
251 ASSERT_GE((pBlock)->offPrev, sizeof(RTHEAPOFFSETINTERNAL)); \
252 } \
253 else \
254 Assert((pBlock) == (PRTHEAPOFFSETBLOCK)((pHeapInt) + 1)); \
255 } while (0)
256
257#define ASSERT_NEXT(pHeap, pBlock) \
258 do { ASSERT_ALIGN((pBlock)->offNext); \
259 if ((pBlock)->offNext) \
260 { \
261 ASSERT_L((pBlock)->offNext, (pHeapInt)->cbHeap); \
262 ASSERT_G((pBlock)->offNext, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
263 } \
264 } while (0)
265
266#define ASSERT_BLOCK(pHeapInt, pBlock) \
267 do { AssertMsg(RTHEAPOFFSETBLOCK_IS_VALID(pBlock), ("%#x\n", (pBlock)->fFlags)); \
268 AssertMsg(RTHEAPOFF_GET_ANCHOR(pBlock) == (pHeapInt), ("%p != %p\n", RTHEAPOFF_GET_ANCHOR(pBlock), (pHeapInt))); \
269 ASSERT_GE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), sizeof(RTHEAPOFFSETINTERNAL)); \
270 ASSERT_L( RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->cbHeap); \
271 ASSERT_NEXT(pHeapInt, pBlock); \
272 ASSERT_PREV(pHeapInt, pBlock); \
273 } while (0)
274
275#define ASSERT_BLOCK_USED(pHeapInt, pBlock) \
276 do { AssertMsg(RTHEAPOFFSETBLOCK_IS_VALID_USED((pBlock)), ("%#x\n", (pBlock)->fFlags)); \
277 AssertMsg(RTHEAPOFF_GET_ANCHOR(pBlock) == (pHeapInt), ("%p != %p\n", RTHEAPOFF_GET_ANCHOR(pBlock), (pHeapInt))); \
278 ASSERT_GE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), sizeof(RTHEAPOFFSETINTERNAL)); \
279 ASSERT_L( RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->cbHeap); \
280 ASSERT_NEXT(pHeapInt, pBlock); \
281 ASSERT_PREV(pHeapInt, pBlock); \
282 } while (0)
283
284#define ASSERT_FREE_PREV(pHeapInt, pBlock) \
285 do { ASSERT_ALIGN((pBlock)->offPrev); \
286 if ((pBlock)->offPrev) \
287 { \
288 ASSERT_GE((pBlock)->offPrev, (pHeapInt)->offFreeHead); \
289 ASSERT_L((pBlock)->offPrev, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
290 ASSERT_LE((pBlock)->offPrev, (pBlock)->Core.offPrev); \
291 } \
292 else \
293 Assert((pBlock) == RTHEAPOFF_TO_PTR(pHeapInt, (pHeapInt)->offFreeHead, PRTHEAPOFFSETFREE) ); \
294 } while (0)
295
296#define ASSERT_FREE_NEXT(pHeapInt, pBlock) \
297 do { ASSERT_ALIGN((pBlock)->offNext); \
298 if ((pBlock)->offNext) \
299 { \
300 ASSERT_LE((pBlock)->offNext, (pHeapInt)->offFreeTail); \
301 ASSERT_G((pBlock)->offNext, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
302 ASSERT_GE((pBlock)->offNext, (pBlock)->Core.offNext); \
303 } \
304 else \
305 Assert((pBlock) == RTHEAPOFF_TO_PTR(pHeapInt, (pHeapInt)->offFreeTail, PRTHEAPOFFSETFREE)); \
306 } while (0)
307
308#ifdef RTHEAPOFFSET_STRICT
309# define ASSERT_FREE_CB(pHeapInt, pBlock) \
310 do { size_t cbCalc = ((pBlock)->Core.offNext ? (pBlock)->Core.offNext : (pHeapInt)->cbHeap) \
311 - RTHEAPOFF_TO_OFF((pHeapInt), (pBlock)) - sizeof(RTHEAPOFFSETBLOCK); \
312 AssertMsg((pBlock)->cb == cbCalc, ("cb=%#zx cbCalc=%#zx\n", (pBlock)->cb, cbCalc)); \
313 } while (0)
314#else
315# define ASSERT_FREE_CB(pHeapInt, pBlock) do {} while (0)
316#endif
317
318/** Asserts that a free block is valid. */
319#define ASSERT_BLOCK_FREE(pHeapInt, pBlock) \
320 do { ASSERT_BLOCK(pHeapInt, &(pBlock)->Core); \
321 Assert(RTHEAPOFFSETBLOCK_IS_VALID_FREE(&(pBlock)->Core)); \
322 ASSERT_GE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->offFreeHead); \
323 ASSERT_LE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->offFreeTail); \
324 ASSERT_FREE_NEXT(pHeapInt, pBlock); \
325 ASSERT_FREE_PREV(pHeapInt, pBlock); \
326 ASSERT_FREE_CB(pHeapInt, pBlock); \
327 } while (0)
328
329/** Asserts that the heap anchor block is ok. */
330#define ASSERT_ANCHOR(pHeapInt) \
331 do { AssertPtr(pHeapInt);\
332 Assert((pHeapInt)->u32Magic == RTHEAPOFFSET_MAGIC); \
333 } while (0)
334
335
336/*********************************************************************************************************************************
337* Internal Functions *
338*********************************************************************************************************************************/
339#ifdef RTHEAPOFFSET_STRICT
340static void rtHeapOffsetAssertAll(PRTHEAPOFFSETINTERNAL pHeapInt);
341#endif
342static PRTHEAPOFFSETBLOCK rtHeapOffsetAllocBlock(PRTHEAPOFFSETINTERNAL pHeapInt, size_t cb, size_t uAlignment);
343static void rtHeapOffsetFreeBlock(PRTHEAPOFFSETINTERNAL pHeapInt, PRTHEAPOFFSETBLOCK pBlock);
344
345#ifdef RTHEAPOFFSET_STRICT
346
347/** Checked version of RTHEAPOFF_TO_PTR and RTHEAPOFF_TO_PTR_N. */
348static void *rtHeapOffCheckedOffToPtr(PRTHEAPOFFSETINTERNAL pHeapInt, uint32_t off, bool fNull)
349{
350 Assert(off || fNull);
351 if (!off)
352 return NULL;
353 AssertMsg(off < pHeapInt->cbHeap, ("%#x %#x\n", off, pHeapInt->cbHeap));
354 AssertMsg(off >= sizeof(*pHeapInt), ("%#x %#x\n", off, sizeof(*pHeapInt)));
355 return (uint8_t *)pHeapInt + off;
356}
357
358/** Checked version of RTHEAPOFF_TO_OFF. */
359static uint32_t rtHeapOffCheckedPtrToOff(PRTHEAPOFFSETINTERNAL pHeapInt, void *pv)
360{
361 if (!pv)
362 return 0;
363 uintptr_t off = (uintptr_t)pv - (uintptr_t)pHeapInt;
364 AssertMsg(off < pHeapInt->cbHeap, ("%#x %#x\n", off, pHeapInt->cbHeap));
365 AssertMsg(off >= sizeof(*pHeapInt), ("%#x %#x\n", off, sizeof(*pHeapInt)));
366 return (uint32_t)off;
367}
368
369#endif /* RTHEAPOFFSET_STRICT */
370
371
372
373RTDECL(int) RTHeapOffsetInit(PRTHEAPOFFSET phHeap, void *pvMemory, size_t cbMemory)
374{
375 PRTHEAPOFFSETINTERNAL pHeapInt;
376 PRTHEAPOFFSETFREE pFree;
377 unsigned i;
378
379 /*
380 * Validate input. The imposed minimum heap size is just a convenient value.
381 */
382 AssertReturn(cbMemory >= _4K, VERR_INVALID_PARAMETER);
383 AssertReturn(cbMemory < UINT32_MAX, VERR_INVALID_PARAMETER);
384 AssertPtrReturn(pvMemory, VERR_INVALID_POINTER);
385 AssertReturn((uintptr_t)pvMemory + (cbMemory - 1) > (uintptr_t)cbMemory, VERR_INVALID_PARAMETER);
386
387 /*
388 * Place the heap anchor block at the start of the heap memory,
389 * enforce 32 byte alignment of it. Also align the heap size correctly.
390 */
391 pHeapInt = (PRTHEAPOFFSETINTERNAL)pvMemory;
392 if ((uintptr_t)pvMemory & 31)
393 {
394 const uintptr_t off = 32 - ((uintptr_t)pvMemory & 31);
395 cbMemory -= off;
396 pHeapInt = (PRTHEAPOFFSETINTERNAL)((uintptr_t)pvMemory + off);
397 }
398 cbMemory &= ~(RTHEAPOFFSET_ALIGNMENT - 1);
399
400
401 /* Init the heap anchor block. */
402 pHeapInt->u32Magic = RTHEAPOFFSET_MAGIC;
403 pHeapInt->cbHeap = (uint32_t)cbMemory;
404 pHeapInt->cbFree = (uint32_t)cbMemory
405 - sizeof(RTHEAPOFFSETBLOCK)
406 - sizeof(RTHEAPOFFSETINTERNAL);
407 pHeapInt->offFreeTail = pHeapInt->offFreeHead = sizeof(*pHeapInt);
408 for (i = 0; i < RT_ELEMENTS(pHeapInt->au32Alignment); i++)
409 pHeapInt->au32Alignment[i] = UINT32_MAX;
410
411 /* Init the single free block. */
412 pFree = RTHEAPOFF_TO_PTR(pHeapInt, pHeapInt->offFreeHead, PRTHEAPOFFSETFREE);
413 pFree->Core.offNext = 0;
414 pFree->Core.offPrev = 0;
415 pFree->Core.offSelf = pHeapInt->offFreeHead;
416 pFree->Core.fFlags = RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE;
417 pFree->offNext = 0;
418 pFree->offPrev = 0;
419 pFree->cb = pHeapInt->cbFree;
420
421 *phHeap = pHeapInt;
422
423#ifdef RTHEAPOFFSET_STRICT
424 rtHeapOffsetAssertAll(pHeapInt);
425#endif
426 return VINF_SUCCESS;
427}
428RT_EXPORT_SYMBOL(RTHeapOffsetInit);
429
430
431RTDECL(void *) RTHeapOffsetAlloc(RTHEAPOFFSET hHeap, size_t cb, size_t cbAlignment)
432{
433 PRTHEAPOFFSETINTERNAL pHeapInt = hHeap;
434 PRTHEAPOFFSETBLOCK pBlock;
435
436 /*
437 * Validate and adjust the input.
438 */
439 AssertPtrReturn(pHeapInt, NULL);
440 if (cb < RTHEAPOFFSET_MIN_BLOCK)
441 cb = RTHEAPOFFSET_MIN_BLOCK;
442 else
443 cb = RT_ALIGN_Z(cb, RTHEAPOFFSET_ALIGNMENT);
444 if (!cbAlignment)
445 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
446 else
447 {
448 Assert(!(cbAlignment & (cbAlignment - 1)));
449 Assert((cbAlignment & ~(cbAlignment - 1)) == cbAlignment);
450 if (cbAlignment < RTHEAPOFFSET_ALIGNMENT)
451 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
452 }
453
454 /*
455 * Do the allocation.
456 */
457 pBlock = rtHeapOffsetAllocBlock(pHeapInt, cb, cbAlignment);
458 if (RT_LIKELY(pBlock))
459 {
460 void *pv = pBlock + 1;
461 return pv;
462 }
463 return NULL;
464}
465RT_EXPORT_SYMBOL(RTHeapOffsetAlloc);
466
467
468RTDECL(void *) RTHeapOffsetAllocZ(RTHEAPOFFSET hHeap, size_t cb, size_t cbAlignment)
469{
470 PRTHEAPOFFSETINTERNAL pHeapInt = hHeap;
471 PRTHEAPOFFSETBLOCK pBlock;
472
473 /*
474 * Validate and adjust the input.
475 */
476 AssertPtrReturn(pHeapInt, NULL);
477 if (cb < RTHEAPOFFSET_MIN_BLOCK)
478 cb = RTHEAPOFFSET_MIN_BLOCK;
479 else
480 cb = RT_ALIGN_Z(cb, RTHEAPOFFSET_ALIGNMENT);
481 if (!cbAlignment)
482 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
483 else
484 {
485 Assert(!(cbAlignment & (cbAlignment - 1)));
486 Assert((cbAlignment & ~(cbAlignment - 1)) == cbAlignment);
487 if (cbAlignment < RTHEAPOFFSET_ALIGNMENT)
488 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
489 }
490
491 /*
492 * Do the allocation.
493 */
494 pBlock = rtHeapOffsetAllocBlock(pHeapInt, cb, cbAlignment);
495 if (RT_LIKELY(pBlock))
496 {
497 void *pv = pBlock + 1;
498 memset(pv, 0, cb);
499 return pv;
500 }
501 return NULL;
502}
503RT_EXPORT_SYMBOL(RTHeapOffsetAllocZ);
504
505
506/**
507 * Allocates a block of memory from the specified heap.
508 *
509 * No parameter validation or adjustment is performed.
510 *
511 * @returns Pointer to the allocated block.
512 * @returns NULL on failure.
513 *
514 * @param pHeapInt The heap.
515 * @param cb Size of the memory block to allocate.
516 * @param uAlignment The alignment specifications for the allocated block.
517 */
518static PRTHEAPOFFSETBLOCK rtHeapOffsetAllocBlock(PRTHEAPOFFSETINTERNAL pHeapInt, size_t cb, size_t uAlignment)
519{
520 PRTHEAPOFFSETBLOCK pRet = NULL;
521 PRTHEAPOFFSETFREE pFree;
522
523 AssertReturn((pHeapInt)->u32Magic == RTHEAPOFFSET_MAGIC, NULL);
524#ifdef RTHEAPOFFSET_STRICT
525 rtHeapOffsetAssertAll(pHeapInt);
526#endif
527
528 /*
529 * Search for a fitting block from the lower end of the heap.
530 */
531 for (pFree = RTHEAPOFF_TO_PTR_N(pHeapInt, pHeapInt->offFreeHead, PRTHEAPOFFSETFREE);
532 pFree;
533 pFree = RTHEAPOFF_TO_PTR_N(pHeapInt, pFree->offNext, PRTHEAPOFFSETFREE))
534 {
535 uintptr_t offAlign;
536 ASSERT_BLOCK_FREE(pHeapInt, pFree);
537
538 /*
539 * Match for size and alignment.
540 */
541 if (pFree->cb < cb)
542 continue;
543 offAlign = (uintptr_t)(&pFree->Core + 1) & (uAlignment - 1);
544 if (offAlign)
545 {
546 PRTHEAPOFFSETFREE pPrev;
547
548 offAlign = (uintptr_t)(&pFree[1].Core + 1) & (uAlignment - 1);
549 offAlign = uAlignment - offAlign;
550 if (pFree->cb < cb + offAlign + sizeof(RTHEAPOFFSETFREE))
551 continue;
552
553 /*
554 * Split up the free block into two, so that the 2nd is aligned as
555 * per specification.
556 */
557 pPrev = pFree;
558 pFree = (PRTHEAPOFFSETFREE)((uintptr_t)(pFree + 1) + offAlign);
559 pFree->Core.offPrev = pPrev->Core.offSelf;
560 pFree->Core.offNext = pPrev->Core.offNext;
561 pFree->Core.offSelf = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
562 pFree->Core.fFlags = RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE;
563 pFree->offPrev = pPrev->Core.offSelf;
564 pFree->offNext = pPrev->offNext;
565 pFree->cb = (pFree->Core.offNext ? pFree->Core.offNext : pHeapInt->cbHeap)
566 - pFree->Core.offSelf - sizeof(RTHEAPOFFSETBLOCK);
567
568 pPrev->Core.offNext = pFree->Core.offSelf;
569 pPrev->offNext = pFree->Core.offSelf;
570 pPrev->cb = pFree->Core.offSelf - pPrev->Core.offSelf - sizeof(RTHEAPOFFSETBLOCK);
571
572 if (pFree->Core.offNext)
573 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = pFree->Core.offSelf;
574 if (pFree->offNext)
575 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETFREE)->offPrev = pFree->Core.offSelf;
576 else
577 pHeapInt->offFreeTail = pFree->Core.offSelf;
578
579 pHeapInt->cbFree -= sizeof(RTHEAPOFFSETBLOCK);
580 ASSERT_BLOCK_FREE(pHeapInt, pPrev);
581 ASSERT_BLOCK_FREE(pHeapInt, pFree);
582 }
583
584 /*
585 * Split off a new FREE block?
586 */
587 if (pFree->cb >= cb + RT_ALIGN_Z(sizeof(RTHEAPOFFSETFREE), RTHEAPOFFSET_ALIGNMENT))
588 {
589 /*
590 * Create a new FREE block at then end of this one.
591 */
592 PRTHEAPOFFSETFREE pNew = (PRTHEAPOFFSETFREE)((uintptr_t)&pFree->Core + cb + sizeof(RTHEAPOFFSETBLOCK));
593
594 pNew->Core.offSelf = RTHEAPOFF_TO_OFF(pHeapInt, pNew);
595 pNew->Core.offNext = pFree->Core.offNext;
596 if (pFree->Core.offNext)
597 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = pNew->Core.offSelf;
598 pNew->Core.offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
599 pNew->Core.fFlags = RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE;
600
601 pNew->offNext = pFree->offNext;
602 if (pNew->offNext)
603 RTHEAPOFF_TO_PTR(pHeapInt, pNew->offNext, PRTHEAPOFFSETFREE)->offPrev = pNew->Core.offSelf;
604 else
605 pHeapInt->offFreeTail = pNew->Core.offSelf;
606 pNew->offPrev = pFree->offPrev;
607 if (pNew->offPrev)
608 RTHEAPOFF_TO_PTR(pHeapInt, pNew->offPrev, PRTHEAPOFFSETFREE)->offNext = pNew->Core.offSelf;
609 else
610 pHeapInt->offFreeHead = pNew->Core.offSelf;
611 pNew->cb = (pNew->Core.offNext ? pNew->Core.offNext : pHeapInt->cbHeap) \
612 - pNew->Core.offSelf - sizeof(RTHEAPOFFSETBLOCK);
613 ASSERT_BLOCK_FREE(pHeapInt, pNew);
614
615 /*
616 * Adjust and convert the old FREE node into a USED node.
617 */
618 pFree->Core.fFlags &= ~RTHEAPOFFSETBLOCK_FLAGS_FREE;
619 pFree->Core.offNext = pNew->Core.offSelf;
620 pHeapInt->cbFree -= pFree->cb;
621 pHeapInt->cbFree += pNew->cb;
622 pRet = &pFree->Core;
623 ASSERT_BLOCK_USED(pHeapInt, pRet);
624 }
625 else
626 {
627 /*
628 * Link it out of the free list.
629 */
630 if (pFree->offNext)
631 RTHEAPOFF_TO_PTR(pHeapInt, pFree->offNext, PRTHEAPOFFSETFREE)->offPrev = pFree->offPrev;
632 else
633 pHeapInt->offFreeTail = pFree->offPrev;
634 if (pFree->offPrev)
635 RTHEAPOFF_TO_PTR(pHeapInt, pFree->offPrev, PRTHEAPOFFSETFREE)->offNext = pFree->offNext;
636 else
637 pHeapInt->offFreeHead = pFree->offNext;
638
639 /*
640 * Convert it to a used block.
641 */
642 pHeapInt->cbFree -= pFree->cb;
643 pFree->Core.fFlags &= ~RTHEAPOFFSETBLOCK_FLAGS_FREE;
644 pRet = &pFree->Core;
645 ASSERT_BLOCK_USED(pHeapInt, pRet);
646 }
647 break;
648 }
649
650#ifdef RTHEAPOFFSET_STRICT
651 rtHeapOffsetAssertAll(pHeapInt);
652#endif
653 return pRet;
654}
655
656
657RTDECL(void) RTHeapOffsetFree(RTHEAPOFFSET hHeap, void *pv)
658{
659 PRTHEAPOFFSETINTERNAL pHeapInt;
660 PRTHEAPOFFSETBLOCK pBlock;
661
662 /*
663 * Validate input.
664 */
665 if (!pv)
666 return;
667 AssertPtr(pv);
668 Assert(RT_ALIGN_P(pv, RTHEAPOFFSET_ALIGNMENT) == pv);
669
670 /*
671 * Get the block and heap. If in strict mode, validate these.
672 */
673 pBlock = (PRTHEAPOFFSETBLOCK)pv - 1;
674 pHeapInt = RTHEAPOFF_GET_ANCHOR(pBlock);
675 ASSERT_BLOCK_USED(pHeapInt, pBlock);
676 ASSERT_ANCHOR(pHeapInt);
677 Assert(pHeapInt == (PRTHEAPOFFSETINTERNAL)hHeap || !hHeap); RT_NOREF_PV(hHeap);
678
679#ifdef RTHEAPOFFSET_FREE_POISON
680 /*
681 * Poison the block.
682 */
683 const size_t cbBlock = (pBlock->pNext ? (uintptr_t)pBlock->pNext : (uintptr_t)pHeapInt->pvEnd)
684 - (uintptr_t)pBlock - sizeof(RTHEAPOFFSETBLOCK);
685 memset(pBlock + 1, RTHEAPOFFSET_FREE_POISON, cbBlock);
686#endif
687
688 /*
689 * Call worker which does the actual job.
690 */
691 rtHeapOffsetFreeBlock(pHeapInt, pBlock);
692}
693RT_EXPORT_SYMBOL(RTHeapOffsetFree);
694
695
696/**
697 * Free a memory block.
698 *
699 * @param pHeapInt The heap.
700 * @param pBlock The memory block to free.
701 */
702static void rtHeapOffsetFreeBlock(PRTHEAPOFFSETINTERNAL pHeapInt, PRTHEAPOFFSETBLOCK pBlock)
703{
704 PRTHEAPOFFSETFREE pFree = (PRTHEAPOFFSETFREE)pBlock;
705 PRTHEAPOFFSETFREE pLeft;
706 PRTHEAPOFFSETFREE pRight;
707
708#ifdef RTHEAPOFFSET_STRICT
709 rtHeapOffsetAssertAll(pHeapInt);
710#endif
711
712 /*
713 * Look for the closest free list blocks by walking the blocks right
714 * of us (both lists are sorted by address).
715 */
716 pLeft = NULL;
717 pRight = NULL;
718 if (pHeapInt->offFreeTail)
719 {
720 pRight = RTHEAPOFF_TO_PTR_N(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETFREE);
721 while (pRight && !RTHEAPOFFSETBLOCK_IS_FREE(&pRight->Core))
722 {
723 ASSERT_BLOCK(pHeapInt, &pRight->Core);
724 pRight = RTHEAPOFF_TO_PTR_N(pHeapInt, pRight->Core.offNext, PRTHEAPOFFSETFREE);
725 }
726 if (!pRight)
727 pLeft = RTHEAPOFF_TO_PTR_N(pHeapInt, pHeapInt->offFreeTail, PRTHEAPOFFSETFREE);
728 else
729 {
730 ASSERT_BLOCK_FREE(pHeapInt, pRight);
731 pLeft = RTHEAPOFF_TO_PTR_N(pHeapInt, pRight->offPrev, PRTHEAPOFFSETFREE);
732 }
733 if (pLeft)
734 ASSERT_BLOCK_FREE(pHeapInt, pLeft);
735 }
736 AssertMsgReturnVoid(pLeft != pFree, ("Freed twice! pv=%p (pBlock=%p)\n", pBlock + 1, pBlock));
737 ASSERT_L(RTHEAPOFF_TO_OFF(pHeapInt, pLeft), RTHEAPOFF_TO_OFF(pHeapInt, pFree));
738 Assert(!pRight || (uintptr_t)pRight > (uintptr_t)pFree);
739 Assert(!pLeft || RTHEAPOFF_TO_PTR_N(pHeapInt, pLeft->offNext, PRTHEAPOFFSETFREE) == pRight);
740
741 /*
742 * Insert at the head of the free block list?
743 */
744 if (!pLeft)
745 {
746 Assert(pRight == RTHEAPOFF_TO_PTR_N(pHeapInt, pHeapInt->offFreeHead, PRTHEAPOFFSETFREE));
747 pFree->Core.fFlags |= RTHEAPOFFSETBLOCK_FLAGS_FREE;
748 pFree->offPrev = 0;
749 pFree->offNext = RTHEAPOFF_TO_OFF(pHeapInt, pRight);
750 if (pRight)
751 pRight->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
752 else
753 pHeapInt->offFreeTail = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
754 pHeapInt->offFreeHead = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
755 }
756 else
757 {
758 /*
759 * Can we merge with left hand free block?
760 */
761 if (pLeft->Core.offNext == RTHEAPOFF_TO_OFF(pHeapInt, pFree))
762 {
763 pLeft->Core.offNext = pFree->Core.offNext;
764 if (pFree->Core.offNext)
765 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pLeft);
766 pHeapInt->cbFree -= pLeft->cb;
767 pFree = pLeft;
768 }
769 /*
770 * No, just link it into the free list then.
771 */
772 else
773 {
774 pFree->Core.fFlags |= RTHEAPOFFSETBLOCK_FLAGS_FREE;
775 pFree->offNext = RTHEAPOFF_TO_OFF(pHeapInt, pRight);
776 pFree->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pLeft);
777 pLeft->offNext = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
778 if (pRight)
779 pRight->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
780 else
781 pHeapInt->offFreeTail = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
782 }
783 }
784
785 /*
786 * Can we merge with right hand free block?
787 */
788 if ( pRight
789 && pRight->Core.offPrev == RTHEAPOFF_TO_OFF(pHeapInt, pFree))
790 {
791 /* core */
792 pFree->Core.offNext = pRight->Core.offNext;
793 if (pRight->Core.offNext)
794 RTHEAPOFF_TO_PTR(pHeapInt, pRight->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
795
796 /* free */
797 pFree->offNext = pRight->offNext;
798 if (pRight->offNext)
799 RTHEAPOFF_TO_PTR(pHeapInt, pRight->offNext, PRTHEAPOFFSETFREE)->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
800 else
801 pHeapInt->offFreeTail = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
802 pHeapInt->cbFree -= pRight->cb;
803 }
804
805 /*
806 * Calculate the size and update free stats.
807 */
808 pFree->cb = (pFree->Core.offNext ? pFree->Core.offNext : pHeapInt->cbHeap)
809 - RTHEAPOFF_TO_OFF(pHeapInt, pFree) - sizeof(RTHEAPOFFSETBLOCK);
810 pHeapInt->cbFree += pFree->cb;
811 ASSERT_BLOCK_FREE(pHeapInt, pFree);
812
813#ifdef RTHEAPOFFSET_STRICT
814 rtHeapOffsetAssertAll(pHeapInt);
815#endif
816}
817
818
819#ifdef RTHEAPOFFSET_STRICT
820/**
821 * Internal consistency check (relying on assertions).
822 * @param pHeapInt
823 */
824static void rtHeapOffsetAssertAll(PRTHEAPOFFSETINTERNAL pHeapInt)
825{
826 PRTHEAPOFFSETFREE pPrev = NULL;
827 PRTHEAPOFFSETFREE pPrevFree = NULL;
828 PRTHEAPOFFSETFREE pBlock;
829 for (pBlock = (PRTHEAPOFFSETFREE)(pHeapInt + 1);
830 pBlock;
831 pBlock = RTHEAPOFF_TO_PTR_N(pHeapInt, pBlock->Core.offNext, PRTHEAPOFFSETFREE))
832 {
833 if (RTHEAPOFFSETBLOCK_IS_FREE(&pBlock->Core))
834 {
835 ASSERT_BLOCK_FREE(pHeapInt, pBlock);
836 Assert(pBlock->offPrev == RTHEAPOFF_TO_OFF(pHeapInt, pPrevFree));
837 Assert(pPrevFree || pHeapInt->offFreeHead == RTHEAPOFF_TO_OFF(pHeapInt, pBlock));
838 pPrevFree = pBlock;
839 }
840 else
841 ASSERT_BLOCK_USED(pHeapInt, &pBlock->Core);
842 Assert(!pPrev || RTHEAPOFF_TO_OFF(pHeapInt, pPrev) == pBlock->Core.offPrev);
843 pPrev = pBlock;
844 }
845 Assert(pHeapInt->offFreeTail == RTHEAPOFF_TO_OFF(pHeapInt, pPrevFree));
846}
847#endif
848
849
850RTDECL(size_t) RTHeapOffsetSize(RTHEAPOFFSET hHeap, void *pv)
851{
852 PRTHEAPOFFSETINTERNAL pHeapInt;
853 PRTHEAPOFFSETBLOCK pBlock;
854 size_t cbBlock;
855
856 /*
857 * Validate input.
858 */
859 if (!pv)
860 return 0;
861 AssertPtrReturn(pv, 0);
862 AssertReturn(RT_ALIGN_P(pv, RTHEAPOFFSET_ALIGNMENT) == pv, 0);
863
864 /*
865 * Get the block and heap. If in strict mode, validate these.
866 */
867 pBlock = (PRTHEAPOFFSETBLOCK)pv - 1;
868 pHeapInt = RTHEAPOFF_GET_ANCHOR(pBlock);
869 ASSERT_BLOCK_USED(pHeapInt, pBlock);
870 ASSERT_ANCHOR(pHeapInt);
871 Assert(pHeapInt == (PRTHEAPOFFSETINTERNAL)hHeap || !hHeap); RT_NOREF_PV(hHeap);
872
873 /*
874 * Calculate the block size.
875 */
876 cbBlock = (pBlock->offNext ? pBlock->offNext : pHeapInt->cbHeap)
877 - RTHEAPOFF_TO_OFF(pHeapInt, pBlock) - sizeof(RTHEAPOFFSETBLOCK);
878 return cbBlock;
879}
880RT_EXPORT_SYMBOL(RTHeapOffsetSize);
881
882
883RTDECL(size_t) RTHeapOffsetGetHeapSize(RTHEAPOFFSET hHeap)
884{
885 PRTHEAPOFFSETINTERNAL pHeapInt;
886
887 if (hHeap == NIL_RTHEAPOFFSET)
888 return 0;
889
890 pHeapInt = hHeap;
891 AssertPtrReturn(pHeapInt, 0);
892 ASSERT_ANCHOR(pHeapInt);
893 return pHeapInt->cbHeap;
894}
895RT_EXPORT_SYMBOL(RTHeapOffsetGetHeapSize);
896
897
898RTDECL(size_t) RTHeapOffsetGetFreeSize(RTHEAPOFFSET hHeap)
899{
900 PRTHEAPOFFSETINTERNAL pHeapInt;
901
902 if (hHeap == NIL_RTHEAPOFFSET)
903 return 0;
904
905 pHeapInt = hHeap;
906 AssertPtrReturn(pHeapInt, 0);
907 ASSERT_ANCHOR(pHeapInt);
908 return pHeapInt->cbFree;
909}
910RT_EXPORT_SYMBOL(RTHeapOffsetGetFreeSize);
911
912
913RTDECL(void) RTHeapOffsetDump(RTHEAPOFFSET hHeap, PFNRTHEAPOFFSETPRINTF pfnPrintf)
914{
915 PRTHEAPOFFSETINTERNAL pHeapInt = (PRTHEAPOFFSETINTERNAL)hHeap;
916 PRTHEAPOFFSETFREE pBlock;
917
918 pfnPrintf("**** Dumping Heap %p - cbHeap=%x cbFree=%x ****\n",
919 hHeap, pHeapInt->cbHeap, pHeapInt->cbFree);
920
921 for (pBlock = (PRTHEAPOFFSETFREE)(pHeapInt + 1);
922 pBlock;
923 pBlock = RTHEAPOFF_TO_PTR_N(pHeapInt, pBlock->Core.offNext, PRTHEAPOFFSETFREE))
924 {
925 size_t cb = (pBlock->offNext ? pBlock->Core.offNext : pHeapInt->cbHeap)
926 - RTHEAPOFF_TO_OFF(pHeapInt, pBlock) - sizeof(RTHEAPOFFSETBLOCK);
927 if (RTHEAPOFFSETBLOCK_IS_FREE(&pBlock->Core))
928 pfnPrintf("%p %06x FREE offNext=%06x offPrev=%06x fFlags=%#x cb=%#06x : cb=%#06x offNext=%06x offPrev=%06x\n",
929 pBlock, pBlock->Core.offSelf, pBlock->Core.offNext, pBlock->Core.offPrev, pBlock->Core.fFlags, cb,
930 pBlock->cb, pBlock->offNext, pBlock->offPrev);
931 else
932 pfnPrintf("%p %06x USED offNext=%06x offPrev=%06x fFlags=%#x cb=%#06x\n",
933 pBlock, pBlock->Core.offSelf, pBlock->Core.offNext, pBlock->Core.offPrev, pBlock->Core.fFlags, cb);
934 }
935 pfnPrintf("**** Done dumping Heap %p ****\n", hHeap);
936}
937RT_EXPORT_SYMBOL(RTHeapOffsetDump);
938
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use