2 * Copyright 2010, Ingo Weinhold <ingo_weinhold@gmx.de>.
3 * Distributed under the terms of the MIT License.
5 #ifndef MEMORY_MANAGER_H
6 #define MEMORY_MANAGER_H
9 #include <KernelExport.h>
11 #include <condition_variable.h>
14 #include <util/DoublyLinkedList.h>
15 #include <util/OpenHashTable.h>
17 #include "slab_debug.h"
18 #include "slab_private.h"
21 class AbstractTraceEntryWithStackTrace
;
27 #define SLAB_CHUNK_SIZE_SMALL B_PAGE_SIZE
28 #define SLAB_CHUNK_SIZE_MEDIUM (16 * B_PAGE_SIZE)
29 #define SLAB_CHUNK_SIZE_LARGE (128 * B_PAGE_SIZE)
30 #define SLAB_AREA_SIZE (2048 * B_PAGE_SIZE)
31 // TODO: These sizes have been chosen with 4 KB pages in mind.
32 #define SLAB_AREA_STRUCT_OFFSET B_PAGE_SIZE
33 // The offset from the start of the area to the Area structure. This space
34 // is not mapped and will trip code writing beyond the previous area's
37 #define SLAB_META_CHUNKS_PER_AREA (SLAB_AREA_SIZE / SLAB_CHUNK_SIZE_LARGE)
38 #define SLAB_SMALL_CHUNKS_PER_META_CHUNK \
39 (SLAB_CHUNK_SIZE_LARGE / SLAB_CHUNK_SIZE_SMALL)
44 static void Init(kernel_args
* args
);
45 static void InitPostArea();
47 static status_t
Allocate(ObjectCache
* cache
, uint32 flags
,
49 static void Free(void* pages
, uint32 flags
);
51 static status_t
AllocateRaw(size_t size
, uint32 flags
,
53 static ObjectCache
* FreeRawOrReturnCache(void* pages
,
56 static size_t AcceptableChunkSize(size_t size
);
57 static ObjectCache
* GetAllocationInfo(void* address
,
59 static ObjectCache
* CacheForAddress(void* address
);
61 static bool MaintenanceNeeded();
62 static void PerformMaintenance();
64 #if SLAB_MEMORY_MANAGER_ALLOCATION_TRACKING
65 static bool AnalyzeAllocationCallers(
66 AllocationTrackingCallback
& callback
);
69 static ObjectCache
* DebugObjectCacheForAddress(void* address
);
83 struct MetaChunk
: DoublyLinkedListLinkImpl
<MetaChunk
> {
88 uint16 usedChunkCount
;
89 uint16 firstFreeChunk
; // *some* free range
90 uint16 lastFreeChunk
; // inclusive
91 Chunk chunks
[SLAB_SMALL_CHUNKS_PER_META_CHUNK
];
94 Area
* GetArea() const;
97 friend struct MetaChunk
;
98 typedef DoublyLinkedList
<MetaChunk
> MetaChunkList
;
100 struct Area
: DoublyLinkedListLinkImpl
<Area
> {
103 size_t reserved_memory_for_mapping
;
104 uint16 usedMetaChunkCount
;
106 MetaChunk metaChunks
[SLAB_META_CHUNKS_PER_AREA
];
108 addr_t
BaseAddress() const
110 return (addr_t
)this - SLAB_AREA_STRUCT_OFFSET
;
114 typedef DoublyLinkedList
<Area
> AreaList
;
116 struct AreaHashDefinition
{
117 typedef addr_t KeyType
;
118 typedef Area ValueType
;
120 size_t HashKey(addr_t key
) const
122 return key
/ SLAB_AREA_SIZE
;
125 size_t Hash(const Area
* value
) const
127 return HashKey(value
->BaseAddress());
130 bool Compare(addr_t key
, const Area
* value
) const
132 return key
== value
->BaseAddress();
135 Area
*& GetLink(Area
* value
) const
141 typedef BOpenHashTable
<AreaHashDefinition
> AreaTable
;
143 struct AllocationEntry
{
144 ConditionVariable condition
;
149 static status_t
_AllocateChunks(size_t chunkSize
,
150 uint32 chunkCount
, uint32 flags
,
151 MetaChunk
*& _metaChunk
, Chunk
*& _chunk
);
152 static bool _GetChunks(MetaChunkList
* metaChunkList
,
153 size_t chunkSize
, uint32 chunkCount
,
154 MetaChunk
*& _metaChunk
, Chunk
*& _chunk
);
155 static bool _GetChunk(MetaChunkList
* metaChunkList
,
156 size_t chunkSize
, MetaChunk
*& _metaChunk
,
158 static void _FreeChunk(Area
* area
, MetaChunk
* metaChunk
,
159 Chunk
* chunk
, addr_t chunkAddress
,
160 bool alreadyUnmapped
, uint32 flags
);
162 static void _PrepareMetaChunk(MetaChunk
* metaChunk
,
165 static void _PushFreeArea(Area
* area
);
166 static Area
* _PopFreeArea();
168 static void _AddArea(Area
* area
);
169 static status_t
_AllocateArea(uint32 flags
, Area
*& _area
);
170 static void _FreeArea(Area
* area
, bool areaRemoved
,
173 static status_t
_MapChunk(VMArea
* vmArea
, addr_t address
,
174 size_t size
, size_t reserveAdditionalMemory
,
176 static status_t
_UnmapChunk(VMArea
* vmArea
, addr_t address
,
177 size_t size
, uint32 flags
);
179 static void _UnmapFreeChunksEarly(Area
* area
);
180 static void _ConvertEarlyArea(Area
* area
);
182 static void _RequestMaintenance();
184 static addr_t
_AreaBaseAddressForAddress(addr_t address
);
185 static Area
* _AreaForAddress(addr_t address
);
186 static uint32
_ChunkIndexForAddress(
187 const MetaChunk
* metaChunk
, addr_t address
);
188 static addr_t
_ChunkAddress(const MetaChunk
* metaChunk
,
190 static bool _IsChunkFree(const MetaChunk
* metaChunk
,
192 static bool _IsChunkInFreeList(const MetaChunk
* metaChunk
,
194 static void _CheckMetaChunk(MetaChunk
* metaChunk
);
196 static int _DumpRawAllocations(int argc
, char** argv
);
197 static void _PrintMetaChunkTableHeader(bool printChunks
);
198 static void _DumpMetaChunk(MetaChunk
* metaChunk
,
199 bool printChunks
, bool printHeader
);
200 static int _DumpMetaChunk(int argc
, char** argv
);
201 static void _DumpMetaChunks(const char* name
,
202 MetaChunkList
& metaChunkList
,
204 static int _DumpMetaChunks(int argc
, char** argv
);
205 static int _DumpArea(int argc
, char** argv
);
206 static int _DumpAreas(int argc
, char** argv
);
208 #if SLAB_MEMORY_MANAGER_ALLOCATION_TRACKING
209 static void _AddTrackingInfo(void* allocation
, size_t size
,
210 AbstractTraceEntryWithStackTrace
* entry
);
211 static AllocationTrackingInfo
* _TrackingInfoFor(void* allocation
,
216 static const size_t kAreaAdminSize
217 = ROUNDUP(sizeof(Area
), B_PAGE_SIZE
);
220 static rw_lock sAreaTableLock
;
221 static kernel_args
* sKernelArgs
;
222 static AreaTable sAreaTable
;
223 static Area
* sFreeAreas
;
224 static int sFreeAreaCount
;
225 static MetaChunkList sFreeCompleteMetaChunks
;
226 static MetaChunkList sFreeShortMetaChunks
;
227 static MetaChunkList sPartialMetaChunksSmall
;
228 static MetaChunkList sPartialMetaChunksMedium
;
229 static AllocationEntry
* sAllocationEntryCanWait
;
230 static AllocationEntry
* sAllocationEntryDontWait
;
231 static bool sMaintenanceNeeded
;
235 /*static*/ inline bool
236 MemoryManager::MaintenanceNeeded()
238 return sMaintenanceNeeded
;
242 /*static*/ inline void
243 MemoryManager::_PushFreeArea(Area
* area
)
245 _push(sFreeAreas
, area
);
250 /*static*/ inline MemoryManager::Area
*
251 MemoryManager::_PopFreeArea()
253 if (sFreeAreaCount
== 0)
257 return _pop(sFreeAreas
);
261 /*static*/ inline addr_t
262 MemoryManager::_AreaBaseAddressForAddress(addr_t address
)
264 return ROUNDDOWN((addr_t
)address
, SLAB_AREA_SIZE
);
268 /*static*/ inline MemoryManager::Area
*
269 MemoryManager::_AreaForAddress(addr_t address
)
271 return (Area
*)(_AreaBaseAddressForAddress(address
)
272 + SLAB_AREA_STRUCT_OFFSET
);
276 /*static*/ inline uint32
277 MemoryManager::_ChunkIndexForAddress(const MetaChunk
* metaChunk
, addr_t address
)
279 return (address
- metaChunk
->chunkBase
) / metaChunk
->chunkSize
;
283 /*static*/ inline addr_t
284 MemoryManager::_ChunkAddress(const MetaChunk
* metaChunk
, const Chunk
* chunk
)
286 return metaChunk
->chunkBase
287 + (chunk
- metaChunk
->chunks
) * metaChunk
->chunkSize
;
291 /*static*/ inline bool
292 MemoryManager::_IsChunkFree(const MetaChunk
* metaChunk
, const Chunk
* chunk
)
294 return chunk
->next
== NULL
295 || (chunk
->next
>= metaChunk
->chunks
296 && chunk
->next
< metaChunk
->chunks
+ metaChunk
->chunkCount
);
300 inline MemoryManager::Area
*
301 MemoryManager::MetaChunk::GetArea() const
303 return _AreaForAddress((addr_t
)this);
307 #if SLAB_MEMORY_MANAGER_ALLOCATION_TRACKING
309 /*static*/ inline AllocationTrackingInfo
*
310 MemoryManager::_TrackingInfoFor(void* allocation
, size_t size
)
312 return (AllocationTrackingInfo
*)((uint8
*)allocation
+ size
313 - sizeof(AllocationTrackingInfo
));
316 #endif // SLAB_MEMORY_MANAGER_ALLOCATION_TRACKING
319 #endif // MEMORY_MANAGER_H