2 * Copyright 2003-2013, Axel Dörfler, axeld@pinc-software.de.
3 * Distributed under the terms of the MIT License.
17 #include <boot/platform.h>
18 #include <util/OpenHashTable.h>
19 #include <util/SplayTree.h>
24 # define TRACE(format...) dprintf(format)
26 # define TRACE(format...) do { } while (false)
30 /*! This is a very simple malloc()/free() implementation - it only
32 After heap_init() is called, all free memory is contained in one
33 big chunk, the only entry in the free link list (which is a single
35 When memory is allocated, the smallest free chunk that contains
36 the requested size is split (or taken as a whole if it can't be
37 splitted anymore), and it's lower half will be removed from the
39 The free list is ordered by size, starting with the smallest
40 free chunk available. When a chunk is freed, it will be joint
41 with its predecessor or successor, if possible.
42 To ease list handling, the list anchor itself is a free chunk with
43 size 0 that can't be allocated.
46 #define DEBUG_ALLOCATIONS
47 // if defined, freed memory is filled with 0xcc
48 #define DEBUG_MAX_HEAP_USAGE
49 // if defined, the maximum heap usage is determined and printed before
50 // entering the kernel
53 const static size_t kAlignment
= 8;
54 // all memory chunks will be a multiple of this
55 const static size_t kLargeAllocationThreshold
= 16 * 1024;
56 // allocations of this size or larger are allocated via
57 // platform_allocate_region()
62 size_t CompleteSize() const
70 char fAlignment
[kAlignment
];
78 struct FreeChunkData
: SplayTreeLink
<FreeChunk
> {
80 FreeChunk
* Next() const
85 FreeChunk
** NextLink()
95 class FreeChunk
: public Chunk
, public FreeChunkData
{
97 void SetTo(size_t size
);
101 FreeChunk
* Split(size_t splitSize
);
102 bool IsTouching(FreeChunk
* link
);
103 FreeChunk
* Join(FreeChunk
* link
);
105 void* AllocatedAddress() const;
106 static FreeChunk
* SetToAllocated(void* allocated
);
110 struct FreeChunkKey
{
111 FreeChunkKey(size_t size
)
118 FreeChunkKey(const FreeChunk
* chunk
)
120 fSize(chunk
->Size()),
125 int Compare(const FreeChunk
* chunk
) const
127 size_t chunkSize
= chunk
->Size();
128 if (chunkSize
!= fSize
)
129 return fSize
< chunkSize
? -1 : 1;
133 return fChunk
< chunk
? -1 : 1;
138 const FreeChunk
* fChunk
;
142 struct FreeChunkTreeDefinition
{
143 typedef FreeChunkKey KeyType
;
144 typedef FreeChunk NodeType
;
146 static FreeChunkKey
GetKey(const FreeChunk
* node
)
148 return FreeChunkKey(node
);
151 static SplayTreeLink
<FreeChunk
>* GetLink(FreeChunk
* node
)
156 static int Compare(const FreeChunkKey
& key
, const FreeChunk
* node
)
158 return key
.Compare(node
);
161 static FreeChunk
** GetListLink(FreeChunk
* node
)
163 return node
->NextLink();
168 typedef IteratableSplayTree
<FreeChunkTreeDefinition
> FreeChunkTree
;
171 struct LargeAllocation
{
176 void SetTo(void* address
, size_t size
)
182 status_t
Allocate(size_t size
)
185 return platform_allocate_region(&fAddress
, fSize
,
186 B_READ_AREA
| B_WRITE_AREA
, false);
191 platform_free_region(fAddress
, fSize
);
194 void* Address() const
204 LargeAllocation
*& HashNext()
212 LargeAllocation
* fHashNext
;
216 struct LargeAllocationHashDefinition
{
217 typedef void* KeyType
;
218 typedef LargeAllocation ValueType
;
220 size_t HashKey(void* key
) const
222 return size_t(key
) / kAlignment
;
225 size_t Hash(LargeAllocation
* value
) const
227 return HashKey(value
->Address());
230 bool Compare(void* key
, LargeAllocation
* value
) const
232 return value
->Address() == key
;
235 LargeAllocation
*& GetLink(LargeAllocation
* value
) const
237 return value
->HashNext();
242 typedef BOpenHashTable
<LargeAllocationHashDefinition
> LargeAllocationHashTable
;
245 static void* sHeapBase
;
246 static void* sHeapEnd
;
247 static size_t sMaxHeapSize
, sAvailable
, sMaxHeapUsage
;
248 static FreeChunkTree sFreeChunkTree
;
250 static LargeAllocationHashTable sLargeAllocations
;
256 return (size
+ kAlignment
- 1) & ~(kAlignment
- 1);
261 malloc_large(size_t size
)
263 LargeAllocation
* allocation
= new(std::nothrow
) LargeAllocation
;
264 if (allocation
== NULL
) {
265 dprintf("malloc_large(): Out of memory!\n");
269 if (allocation
->Allocate(size
) != B_OK
) {
270 dprintf("malloc_large(): Out of memory!\n");
275 sLargeAllocations
.InsertUnchecked(allocation
);
276 TRACE("malloc_large(%lu) -> %p\n", size
, allocation
->Address());
277 return allocation
->Address();
282 free_large(void* address
)
284 LargeAllocation
* allocation
= sLargeAllocations
.Lookup(address
);
285 if (allocation
== NULL
)
286 panic("free_large(%p): unknown allocation!\n", address
);
288 sLargeAllocations
.RemoveUnchecked(allocation
);
295 FreeChunk::SetTo(size_t size
)
302 /*! Returns the amount of bytes that can be allocated
306 FreeChunk::Size() const
308 return (addr_t
)this + fSize
- (addr_t
)AllocatedAddress();
312 /*! Splits the upper half at the requested location and returns it. This chunk
313 will no longer be a valid FreeChunk object; only its fSize will be valid.
316 FreeChunk::Split(size_t splitSize
)
318 splitSize
= align(splitSize
);
320 FreeChunk
* chunk
= (FreeChunk
*)((addr_t
)AllocatedAddress() + splitSize
);
321 size_t newSize
= (addr_t
)chunk
- (addr_t
)this;
322 chunk
->fSize
= fSize
- newSize
;
331 /*! Checks if the specified chunk touches this chunk, so
332 that they could be joined.
335 FreeChunk::IsTouching(FreeChunk
* chunk
)
338 && (((uint8
*)this + fSize
== (uint8
*)chunk
)
339 || (uint8
*)chunk
+ chunk
->fSize
== (uint8
*)this);
343 /*! Joins the chunk to this chunk and returns the pointer
344 to the new chunk - which will either be one of the
346 Note, the chunks must be joinable, or else this method
347 doesn't work correctly. Use FreeChunk::IsTouching()
348 to check if this method can be applied.
351 FreeChunk::Join(FreeChunk
* chunk
)
354 chunk
->fSize
+= fSize
;
355 chunk
->fNext
= fNext
;
360 fSize
+= chunk
->fSize
;
361 fNext
= chunk
->fNext
;
368 FreeChunk::AllocatedAddress() const
370 return (void*)static_cast<const FreeChunkData
*>(this);
375 FreeChunk::SetToAllocated(void* allocated
)
377 return static_cast<FreeChunk
*>((FreeChunkData
*)allocated
);
385 heap_release(stage2_args
* args
)
387 platform_release_heap(args
, sHeapBase
);
392 heap_print_statistics()
394 #ifdef DEBUG_MAX_HEAP_USAGE
395 dprintf("maximum boot loader heap usage: %zu, currently used: %zu\n",
396 sMaxHeapUsage
, sMaxHeapSize
- sAvailable
);
402 heap_init(stage2_args
* args
)
406 if (platform_init_heap(args
, &base
, &top
) < B_OK
)
411 sMaxHeapSize
= (uint8
*)top
- (uint8
*)base
;
413 // declare the whole heap as one chunk, and add it
415 FreeChunk
* chunk
= (FreeChunk
*)base
;
416 chunk
->SetTo(sMaxHeapSize
);
417 sFreeChunkTree
.Insert(chunk
);
419 sAvailable
= chunk
->Size();
420 #ifdef DEBUG_MAX_HEAP_USAGE
421 sMaxHeapUsage
= sMaxHeapSize
- sAvailable
;
424 if (sLargeAllocations
.Init(64) != B_OK
)
435 FreeChunk
* chunk
= sFreeChunkTree
.FindMin();
436 while (chunk
!= NULL
) {
437 printf("\t%p: chunk size = %ld, end = %p, next = %p\n", chunk
,
438 chunk
->Size(), (uint8
*)chunk
+ chunk
->CompleteSize(),
440 chunk
= chunk
->Next();
448 return (uint32
)sAvailable
;
455 if (sHeapBase
== NULL
|| size
== 0)
458 // align the size requirement to a kAlignment bytes boundary
459 if (size
< sizeof(FreeChunkData
))
460 size
= sizeof(FreeChunkData
);
463 if (size
>= kLargeAllocationThreshold
)
464 return malloc_large(size
);
466 if (size
> sAvailable
) {
467 dprintf("malloc(): Out of memory!\n");
471 FreeChunk
* chunk
= sFreeChunkTree
.FindClosest(FreeChunkKey(size
), true,
475 // could not find a free chunk as large as needed
476 dprintf("malloc(): Out of memory!\n");
480 sFreeChunkTree
.Remove(chunk
);
481 sAvailable
-= chunk
->Size();
483 void* allocatedAddress
= chunk
->AllocatedAddress();
485 // If this chunk is bigger than the requested size and there's enough space
486 // left over for a new chunk, we split it.
487 if (chunk
->Size() >= size
+ align(sizeof(FreeChunk
))) {
488 FreeChunk
* freeChunk
= chunk
->Split(size
);
489 sFreeChunkTree
.Insert(freeChunk
);
490 sAvailable
+= freeChunk
->Size();
493 #ifdef DEBUG_MAX_HEAP_USAGE
494 sMaxHeapUsage
= std::max(sMaxHeapUsage
, sMaxHeapSize
- sAvailable
);
497 TRACE("malloc(%lu) -> %p\n", size
, allocatedAddress
);
498 return allocatedAddress
;
503 realloc(void* oldBuffer
, size_t newSize
)
506 TRACE("realloc(%p, %lu) -> NULL\n", oldBuffer
, newSize
);
512 if (oldBuffer
!= NULL
) {
513 if (oldBuffer
>= sHeapBase
&& oldBuffer
< sHeapEnd
) {
514 FreeChunk
* oldChunk
= FreeChunk::SetToAllocated(oldBuffer
);
515 oldSize
= oldChunk
->Size();
517 LargeAllocation
* allocation
= sLargeAllocations
.Lookup(oldBuffer
);
518 if (allocation
== NULL
) {
519 panic("realloc(%p, %zu): unknown allocation!\n", oldBuffer
,
523 oldSize
= allocation
->Size();
526 // Check if the old buffer still fits, and if it makes sense to keep it.
527 if (oldSize
>= newSize
528 && (oldSize
< 128 || newSize
> oldSize
/ 3)) {
529 TRACE("realloc(%p, %lu) old buffer is large enough\n",
535 void* newBuffer
= malloc(newSize
);
536 if (newBuffer
== NULL
)
539 if (oldBuffer
!= NULL
) {
540 memcpy(newBuffer
, oldBuffer
, std::min(oldSize
, newSize
));
544 TRACE("realloc(%p, %lu) -> %p\n", oldBuffer
, newSize
, newBuffer
);
550 free(void* allocated
)
552 if (allocated
== NULL
)
555 TRACE("free(%p)\n", allocated
);
557 if (allocated
< sHeapBase
|| allocated
>= sHeapEnd
) {
558 free_large(allocated
);
562 FreeChunk
* freedChunk
= FreeChunk::SetToAllocated(allocated
);
564 #ifdef DEBUG_ALLOCATIONS
565 if (freedChunk
->Size() > sMaxHeapSize
- sAvailable
) {
566 panic("freed chunk %p clobbered (%#zx)!\n", freedChunk
,
570 FreeChunk
* chunk
= sFreeChunkTree
.FindMin();
572 if (chunk
->Size() > sAvailable
|| freedChunk
== chunk
)
573 panic("invalid chunk in free list (%p (%zu)), or double free\n",
574 chunk
, chunk
->Size());
575 chunk
= chunk
->Next();
580 // try to join the new free chunk with an existing one
581 // it may be joined with up to two chunks
583 FreeChunk
* chunk
= sFreeChunkTree
.FindMin();
587 FreeChunk
* nextChunk
= chunk
->Next();
589 if (chunk
->IsTouching(freedChunk
)) {
590 sFreeChunkTree
.Remove(chunk
);
591 sAvailable
-= chunk
->Size();
593 freedChunk
= chunk
->Join(freedChunk
);
595 if (++joinCount
== 2)
602 sFreeChunkTree
.Insert(freedChunk
);
603 sAvailable
+= freedChunk
->Size();
604 #ifdef DEBUG_MAX_HEAP_USAGE
605 sMaxHeapUsage
= std::max(sMaxHeapUsage
, sMaxHeapSize
- sAvailable
);