2 Copyright (c) 2003-2006 by Juliusz Chroboczek
4 Permission is hereby granted, free of charge, to any person obtaining a copy
5 of this software and associated documentation files (the "Software"), to deal
6 in the Software without restriction, including without limitation the rights
7 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 copies of the Software, and to permit persons to whom the Software is
9 furnished to do so, subject to the following conditions:
11 The above copyright notice and this permission notice shall be included in
12 all copies or substantial portions of the Software.
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #define MB (1024 * 1024)
27 chunkCriticalMark
= 0,
33 CONFIG_VARIABLE(chunkLowMark
, CONFIG_INT
,
34 "Low mark for chunk memory (0 = auto).");
35 CONFIG_VARIABLE(chunkCriticalMark
, CONFIG_INT
,
36 "Critical mark for chunk memory (0 = auto).");
37 CONFIG_VARIABLE(chunkHighMark
, CONFIG_INT
,
38 "High mark for chunk memory.");
44 #define ROUND_CHUNKS(a) a = (((a) + CHUNK_SIZE - 1) / CHUNK_SIZE) * CHUNK_SIZE;
47 if(CHUNK_SIZE
!= 1 << log2_ceil(CHUNK_SIZE
)) {
48 do_log(L_ERROR
, "CHUNK SIZE %d is not a power of two.\n", CHUNK_SIZE
);
52 ROUND_CHUNKS(chunkHighMark
);
53 ROUND_CHUNKS(chunkCriticalMark
);
54 ROUND_CHUNKS(chunkLowMark
);
56 if(chunkHighMark
< 8 * CHUNK_SIZE
) {
57 int mem
= physicalMemory();
59 chunkHighMark
= mem
/ 4;
61 chunkHighMark
= 24 * MB
;
62 chunkHighMark
= MIN(chunkHighMark
, 24 * MB
);
63 chunkHighMark
= MAX(chunkHighMark
, 8 * CHUNK_SIZE
);
66 if(chunkHighMark
< MB
/ 2)
68 "Warning: little chunk memory (%d bytes)\n", chunkHighMark
);
71 if(chunkLowMark
<= 0) q
= 1;
72 if(chunkLowMark
< 4 * CHUNK_SIZE
||
73 chunkLowMark
> chunkHighMark
- 4 * CHUNK_SIZE
) {
74 chunkLowMark
= MIN(chunkHighMark
- 4 * CHUNK_SIZE
,
75 chunkHighMark
* 3 / 4);
76 ROUND_CHUNKS(chunkLowMark
);
77 if(!q
) do_log(L_WARN
, "Inconsistent chunkLowMark -- setting to %d.\n",
82 if(chunkCriticalMark
<= 0) q
= 1;
83 if(chunkCriticalMark
>= chunkHighMark
- 2 * CHUNK_SIZE
||
84 chunkCriticalMark
<= chunkLowMark
+ 2 * CHUNK_SIZE
) {
86 MIN(chunkHighMark
- 2 * CHUNK_SIZE
,
87 chunkLowMark
+ (chunkHighMark
- chunkLowMark
) * 15 / 16);
88 ROUND_CHUNKS(chunkCriticalMark
);
89 if(!q
) do_log(L_WARN
, "Inconsistent chunkCriticalMark -- "
90 "setting to %d.\n", chunkCriticalMark
);
99 maybe_free_chunks(int arenas
, int force
)
101 if(force
|| used_chunks
>= CHUNKS(chunkHighMark
)) {
102 discardObjects(force
, force
);
108 if(used_chunks
>= CHUNKS(chunkLowMark
) && !objectExpiryScheduled
) {
109 TimeEventHandlerPtr event
;
110 event
= scheduleTimeEvent(1, discardObjectsHandler
, 0, NULL
);
112 objectExpiryScheduled
= 1;
123 do_log(L_WARN
, "Warning: using malloc(3) for chunk allocation.\n");
139 if(used_chunks
> CHUNKS(chunkHighMark
))
140 maybe_free_chunks(0, 0);
141 if(used_chunks
> CHUNKS(chunkHighMark
))
143 chunk
= malloc(CHUNK_SIZE
);
145 maybe_free_chunks(1, 1);
146 chunk
= malloc(CHUNK_SIZE
);
158 if(used_chunks
> CHUNKS(chunkHighMark
))
160 chunk
= malloc(CHUNK_SIZE
);
167 dispose_chunk(void *chunk
)
169 assert(chunk
!= NULL
);
181 totalChunkArenaSize()
183 return used_chunks
* CHUNK_SIZE
;
188 #define MAP_FAILED NULL
189 #define getpagesize() (64 * 1024)
191 alloc_arena(size_t size
)
193 return VirtualAlloc(NULL
, size
, MEM_COMMIT
| MEM_RESERVE
, PAGE_READWRITE
);
196 free_arena(void *addr
, size_t size
)
199 rc
= VirtualFree(addr
, size
, MEM_RELEASE
);
206 #define MAP_FAILED ((void*)((long int)-1))
209 alloc_arena(size_t size
)
211 return mmap(NULL
, size
, PROT_READ
| PROT_WRITE
,
212 MAP_PRIVATE
| MAP_ANONYMOUS
, -1, 0);
215 free_arena(void *addr
, size_t size
)
217 return munmap(addr
, size
);
221 /* Memory is organised into a number of chunks of ARENA_CHUNKS chunks
222 each. Every arena is pointed at by a struct _ChunkArena. */
223 /* If currentArena is not NULL, it points at the last arena used,
224 which gives very fast dispose/get sequences. */
226 #define DEFINE_FFS(type, ffs_name) \
231 if(i == 0) return 0; \
233 while((i & 1) == 0) { \
240 #ifndef LONG_LONG_ARENA_BITMAPS
241 #ifndef LONG_ARENA_BITMAPS
245 typedef unsigned int ChunkBitmap
;
246 #define BITMAP_FFS(bitmap) (ffs(bitmap))
251 DEFINE_FFS(long, ffsl
)
253 typedef unsigned long ChunkBitmap
;
254 #define BITMAP_FFS(bitmap) (ffsl(bitmap))
260 DEFINE_FFS(long long, ffsll
)
262 typedef unsigned long long ChunkBitmap
;
263 #define BITMAP_FFS(bitmap) (ffsll(bitmap))
266 #define ARENA_CHUNKS ((int)sizeof(ChunkBitmap) * 8)
267 #define EMPTY_BITMAP (~(ChunkBitmap)0)
268 #define BITMAP_BIT(i) (((ChunkBitmap)1) << (i))
271 typedef struct _ChunkArena
{
274 } ChunkArenaRec
, *ChunkArenaPtr
;
276 static ChunkArenaPtr chunkArenas
, currentArena
;
277 static int numArenas
;
278 #define CHUNK_IN_ARENA(chunk, arena) \
279 ((arena)->chunks && \
280 (char*)(chunk) >= (arena)->chunks && \
281 (char*)(chunk) < (arena)->chunks + (ARENA_CHUNKS * CHUNK_SIZE))
283 #define CHUNK_ARENA_INDEX(chunk, arena) \
284 (((char*)(chunk) - (arena)->chunks) / CHUNK_SIZE)
292 pagesize
= getpagesize();
293 if((CHUNK_SIZE
* ARENA_CHUNKS
) % pagesize
!= 0) {
295 "The arena size %d (%d x %d) "
296 "is not a multiple of the page size %d.\n",
297 ARENA_CHUNKS
* CHUNK_SIZE
, ARENA_CHUNKS
, CHUNK_SIZE
, pagesize
);
301 (CHUNKS(chunkHighMark
) + (ARENA_CHUNKS
- 1)) / ARENA_CHUNKS
;
302 chunkArenas
= malloc(numArenas
* sizeof(ChunkArenaRec
));
303 if(chunkArenas
== NULL
) {
304 do_log(L_ERROR
, "Couldn't allocate chunk arenas.\n");
307 for(i
= 0; i
< numArenas
; i
++) {
308 chunkArenas
[i
].bitmap
= EMPTY_BITMAP
;
309 chunkArenas
[i
].chunks
= NULL
;
317 ChunkArenaPtr arena
= NULL
;
320 for(i
= 0; i
< numArenas
; i
++) {
321 arena
= &(chunkArenas
[i
]);
322 if(arena
->bitmap
!= 0)
328 assert(arena
!= NULL
);
332 p
= alloc_arena(CHUNK_SIZE
* ARENA_CHUNKS
);
333 if(p
== MAP_FAILED
) {
334 do_log_error(L_ERROR
, errno
, "Couldn't allocate chunk");
335 maybe_free_chunks(1, 1);
347 ChunkArenaPtr arena
= NULL
;
349 if(currentArena
&& currentArena
->bitmap
!= 0) {
350 arena
= currentArena
;
352 if(used_chunks
>= CHUNKS(chunkHighMark
))
353 maybe_free_chunks(0, 0);
355 if(used_chunks
>= CHUNKS(chunkHighMark
))
361 currentArena
= arena
;
363 i
= BITMAP_FFS(arena
->bitmap
) - 1;
364 arena
->bitmap
&= ~BITMAP_BIT(i
);
366 return arena
->chunks
+ CHUNK_SIZE
* i
;
373 ChunkArenaPtr arena
= NULL
;
375 if(currentArena
&& currentArena
->bitmap
!= 0) {
376 arena
= currentArena
;
378 if(used_chunks
>= CHUNKS(chunkHighMark
))
384 currentArena
= arena
;
386 i
= ffs(arena
->bitmap
) - 1;
387 arena
->bitmap
&= ~BITMAP_BIT(i
);
389 return arena
->chunks
+ CHUNK_SIZE
* i
;
393 dispose_chunk(void *chunk
)
395 ChunkArenaPtr arena
= NULL
;
398 assert(chunk
!= NULL
);
400 if(currentArena
&& CHUNK_IN_ARENA(chunk
, currentArena
)) {
401 arena
= currentArena
;
403 for(i
= 0; i
< numArenas
; i
++) {
404 arena
= &(chunkArenas
[i
]);
405 if(CHUNK_IN_ARENA(chunk
, arena
))
408 assert(arena
!= NULL
);
409 currentArena
= arena
;
412 i
= CHUNK_ARENA_INDEX(chunk
, arena
);
413 arena
->bitmap
|= BITMAP_BIT(i
);
423 for(i
= 0; i
< numArenas
; i
++) {
424 arena
= &(chunkArenas
[i
]);
425 if(arena
->bitmap
== EMPTY_BITMAP
&& arena
->chunks
) {
426 rc
= free_arena(arena
->chunks
, CHUNK_SIZE
* ARENA_CHUNKS
);
428 do_log_error(L_ERROR
, errno
, "Couldn't unmap memory");
431 arena
->chunks
= NULL
;
434 if(currentArena
&& currentArena
->chunks
== NULL
)
439 totalChunkArenaSize()
444 for(i
= 0; i
< numArenas
; i
++) {
445 arena
= &(chunkArenas
[i
]);
447 size
+= (CHUNK_SIZE
* ARENA_CHUNKS
);