4 * Very simple linked-list based malloc()/free().
16 static void *__malloc_from_block(struct free_arena_header
*fp
,
17 size_t size
, malloc_tag_t tag
)
20 struct free_arena_header
*nfp
, *na
;
21 unsigned int heap
= ARENA_HEAP_GET(fp
->a
.attrs
);
23 fsize
= ARENA_SIZE_GET(fp
->a
.attrs
);
25 /* We need the 2* to account for the larger requirements of a free block */
26 if ( fsize
>= size
+2*sizeof(struct arena_header
) ) {
27 /* Bigger block than required -- split block */
28 nfp
= (struct free_arena_header
*)((char *)fp
+ size
);
31 ARENA_TYPE_SET(nfp
->a
.attrs
, ARENA_TYPE_FREE
);
32 ARENA_HEAP_SET(nfp
->a
.attrs
, heap
);
33 ARENA_SIZE_SET(nfp
->a
.attrs
, fsize
-size
);
34 nfp
->a
.tag
= MALLOC_FREE
;
35 ARENA_TYPE_SET(fp
->a
.attrs
, ARENA_TYPE_USED
);
36 ARENA_SIZE_SET(fp
->a
.attrs
, size
);
39 /* Insert into all-block chain */
45 /* Replace current block on free chain */
46 nfp
->next_free
= fp
->next_free
;
47 nfp
->prev_free
= fp
->prev_free
;
48 fp
->next_free
->prev_free
= nfp
;
49 fp
->prev_free
->next_free
= nfp
;
51 /* Allocate the whole block */
52 ARENA_TYPE_SET(fp
->a
.attrs
, ARENA_TYPE_USED
);
55 /* Remove from free chain */
56 fp
->next_free
->prev_free
= fp
->prev_free
;
57 fp
->prev_free
->next_free
= fp
->next_free
;
60 return (void *)(&fp
->a
+ 1);
63 static void *_malloc(size_t size
, enum heap heap
, malloc_tag_t tag
)
65 struct free_arena_header
*fp
;
66 struct free_arena_header
*head
= &__core_malloc_head
[heap
];
69 dprintf("_malloc(%zu, %u, %u) @ %p = ",
70 size
, heap
, tag
, __builtin_return_address(0));
73 /* Add the obligatory arena header, and round up */
74 size
= (size
+ 2 * sizeof(struct arena_header
) - 1) & ARENA_SIZE_MASK
;
76 for ( fp
= head
->next_free
; fp
!= head
; fp
= fp
->next_free
) {
77 if ( ARENA_SIZE_GET(fp
->a
.attrs
) >= size
) {
78 /* Found fit -- allocate out of this block */
79 p
= __malloc_from_block(fp
, size
, tag
);
89 __export
void *malloc(size_t size
)
91 return _malloc(size
, HEAP_MAIN
, MALLOC_CORE
);
94 __export
void *lmalloc(size_t size
)
98 p
= _malloc(size
, HEAP_LOWMEM
, MALLOC_CORE
);
104 void *pmapi_lmalloc(size_t size
)
106 return _malloc(size
, HEAP_LOWMEM
, MALLOC_MODULE
);
109 __export
void *realloc(void *ptr
, size_t size
)
111 struct free_arena_header
*ah
, *nah
;
112 struct free_arena_header
*head
;
115 size_t newsize
, oldsize
, xsize
;
125 ah
= (struct free_arena_header
*)
126 ((struct arena_header
*)ptr
- 1);
128 head
= &__core_malloc_head
[ARENA_HEAP_GET(ah
->a
.attrs
)];
130 /* Actual size of the old block */
131 //oldsize = ah->a.size;
132 oldsize
= ARENA_SIZE_GET(ah
->a
.attrs
);
134 /* Add the obligatory arena header, and round up */
135 newsize
= (size
+ 2 * sizeof(struct arena_header
) - 1) & ARENA_SIZE_MASK
;
137 if (oldsize
>= newsize
&& newsize
>= (oldsize
>> 2) &&
138 oldsize
- newsize
< 4096) {
139 /* This allocation is close enough already. */
145 if ((char *)nah
== (char *)ah
+ ARENA_SIZE_GET(ah
->a
.attrs
) &&
146 ARENA_TYPE_GET(nah
->a
.attrs
) == ARENA_TYPE_FREE
&&
147 ARENA_SIZE_GET(nah
->a
.attrs
) + oldsize
>= newsize
) {
148 //nah->a.type == ARENA_TYPE_FREE &&
149 //oldsize + nah->a.size >= newsize) {
150 /* Merge in subsequent free block */
151 ah
->a
.next
= nah
->a
.next
;
152 ah
->a
.next
->a
.prev
= ah
;
153 nah
->next_free
->prev_free
= nah
->prev_free
;
154 nah
->prev_free
->next_free
= nah
->next_free
;
155 ARENA_SIZE_SET(ah
->a
.attrs
, ARENA_SIZE_GET(ah
->a
.attrs
) +
156 ARENA_SIZE_GET(nah
->a
.attrs
));
157 xsize
= ARENA_SIZE_GET(ah
->a
.attrs
);
160 if (xsize
>= newsize
) {
161 /* We can reallocate in place */
162 if (xsize
>= newsize
+ 2 * sizeof(struct arena_header
)) {
163 /* Residual free block at end */
164 nah
= (struct free_arena_header
*)((char *)ah
+ newsize
);
165 ARENA_TYPE_SET(nah
->a
.attrs
, ARENA_TYPE_FREE
);
166 ARENA_SIZE_SET(nah
->a
.attrs
, xsize
- newsize
);
167 ARENA_SIZE_SET(ah
->a
.attrs
, newsize
);
168 ARENA_HEAP_SET(nah
->a
.attrs
, ARENA_HEAP_GET(ah
->a
.attrs
));
170 //nah->a.type = ARENA_TYPE_FREE;
171 //nah->a.size = xsize - newsize;
172 //ah->a.size = newsize;
174 /* Insert into block list */
175 nah
->a
.next
= ah
->a
.next
;
177 nah
->a
.next
->a
.prev
= nah
;
180 /* Insert into free list */
181 if (newsize
> oldsize
) {
182 /* Hack: this free block is in the path of a memory object
183 which has already been grown at least once. As such, put
184 it at the *end* of the freelist instead of the beginning;
185 trying to save it for future realloc()s of the same block. */
186 nah
->prev_free
= head
->prev_free
;
187 nah
->next_free
= head
;
188 head
->prev_free
= nah
;
189 nah
->prev_free
->next_free
= nah
;
191 nah
->next_free
= head
->next_free
;
192 nah
->prev_free
= head
;
193 head
->next_free
= nah
;
194 nah
->next_free
->prev_free
= nah
;
197 /* otherwise, use up the whole block */
200 /* Last resort: need to allocate a new block and copy */
201 oldsize
-= sizeof(struct arena_header
);
202 newptr
= malloc(size
);
204 memcpy(newptr
, ptr
, min(size
, oldsize
));
212 __export
void *zalloc(size_t size
)
218 memset(ptr
, 0, size
);