2 * Copyright 2002-2006, Axel Dörfler, axeld@pinc-software.de.
3 * Distributed under the terms of the MIT License.
5 * Copyright 2001-2002, Travis Geiselbrecht. All rights reserved.
6 * Distributed under the terms of the NewOS License.
13 #include "kernel_debug_config.h"
16 #if USE_GUARDED_HEAP_FOR_MALLOC && USE_GUARDED_HEAP_FOR_OBJECT_CACHE
18 // This requires a lot of up-front memory to boot at all...
19 #define INITIAL_HEAP_SIZE 128 * 1024 * 1024
20 // ... and a lot of reserves to keep running.
21 #define HEAP_GROW_SIZE 128 * 1024 * 1024
23 #else // USE_GUARDED_HEAP_FOR_MALLOC && USE_GUARDED_HEAP_FOR_OBJECT_CACHE
25 // allocate 16MB initial heap for the kernel
26 #define INITIAL_HEAP_SIZE 16 * 1024 * 1024
27 // grow by another 4MB each time the heap runs out of memory
28 #define HEAP_GROW_SIZE 4 * 1024 * 1024
29 // allocate a dedicated 1MB area for dynamic growing
30 #define HEAP_DEDICATED_GROW_SIZE 1 * 1024 * 1024
31 // use areas for allocations bigger than 1MB
32 #define HEAP_AREA_USE_THRESHOLD 1 * 1024 * 1024
34 #endif // !(USE_GUARDED_HEAP_FOR_MALLOC && USE_GUARDED_HEAP_FOR_OBJECT_CACHE)
37 // allocation/deallocation flags for {malloc,free}_etc()
38 #define HEAP_DONT_WAIT_FOR_MEMORY 0x01
39 #define HEAP_DONT_LOCK_KERNEL_SPACE 0x02
40 #define HEAP_PRIORITY_VIP 0x04
43 typedef struct heap_class_s
{
45 uint32 initial_percentage
;
46 size_t max_allocation_size
;
50 uint32 min_count_per_page
;
51 size_t max_waste_per_page
;
54 typedef struct heap_allocator_s heap_allocator
;
62 void* memalign_etc(size_t alignment
, size_t size
, uint32 flags
);
63 void free_etc(void* address
, uint32 flags
);
65 void* memalign(size_t alignment
, size_t size
);
67 void deferred_free(void* block
);
69 void* malloc_referenced(size_t size
);
70 void* malloc_referenced_acquire(void* data
);
71 void malloc_referenced_release(void* data
);
73 void heap_add_area(heap_allocator
* heap
, area_id areaID
, addr_t base
,
75 heap_allocator
* heap_create_allocator(const char* name
, addr_t base
,
76 size_t size
, const heap_class
* heapClass
, bool allocateOnHeap
);
77 void* heap_memalign(heap_allocator
* heap
, size_t alignment
, size_t size
);
78 status_t
heap_free(heap_allocator
* heap
, void* address
);
80 #if KERNEL_HEAP_LEAK_CHECK
81 void heap_set_get_caller(heap_allocator
* heap
, addr_t (*getCaller
)());
84 status_t
heap_init(addr_t heapBase
, size_t heapSize
);
85 status_t
heap_init_post_area();
86 status_t
heap_init_post_sem();
87 status_t
heap_init_post_thread();
95 malloc_etc(size_t size
, uint32 flags
)
97 return memalign_etc(0, size
, flags
);
105 #include <util/SinglyLinkedList.h>
108 struct malloc_flags
{
111 malloc_flags(uint32 flags
)
117 malloc_flags(const malloc_flags
& other
)
126 operator new(size_t size
, const malloc_flags
& flags
) throw()
128 return malloc_etc(size
, flags
.flags
);
133 operator new[](size_t size
, const malloc_flags
& flags
) throw()
135 return malloc_etc(size
, flags
.flags
);
139 class DeferredDeletable
: public SinglyLinkedListLinkImpl
<DeferredDeletable
> {
141 virtual ~DeferredDeletable();
145 void deferred_delete(DeferredDeletable
* deletable
);
148 #endif /* __cplusplus */
151 #endif /* _KERNEL_MEMHEAP_H */