Adding upstream version 3.35.
[syslinux-debian/hramrach.git] / com32 / lib / malloc.c
blob2f8362b4c05e552a489efd1aeb3654af10206d38
1 /*
2 * malloc.c
4 * Very simple linked-list based malloc()/free().
5 */
7 #include <stdlib.h>
8 #include "init.h"
9 #include "malloc.h"
11 struct free_arena_header __malloc_head =
14 ARENA_TYPE_HEAD,
16 &__malloc_head,
17 &__malloc_head,
19 &__malloc_head,
20 &__malloc_head
23 /* This is extern so it can be overridden by the user application */
24 extern size_t __stack_size;
25 extern void *__mem_end; /* Produced after argv parsing */
27 static inline size_t sp(void)
29 size_t sp;
30 asm volatile("movl %%esp,%0" : "=rm" (sp));
31 return sp;
34 static void __constructor init_memory_arena(void)
36 struct free_arena_header *fp;
37 size_t start, total_space;
39 start = (size_t)ARENA_ALIGN_UP(__mem_end);
40 total_space = sp() - start;
42 if ( __stack_size == 0 || __stack_size > total_space >> 1 )
43 __stack_size = total_space >> 1; /* Half for the stack, half for the heap... */
45 if ( total_space < __stack_size + 4*sizeof(struct arena_header) )
46 __stack_size = total_space - 4*sizeof(struct arena_header);
48 fp = (struct free_arena_header *)start;
49 fp->a.type = ARENA_TYPE_FREE;
50 fp->a.size = total_space - __stack_size;
52 /* Insert into chains */
53 fp->a.next = fp->a.prev = &__malloc_head;
54 fp->next_free = fp->prev_free = &__malloc_head;
55 __malloc_head.a.next = __malloc_head.a.prev = fp;
56 __malloc_head.next_free = __malloc_head.prev_free = fp;
59 static void *__malloc_from_block(struct free_arena_header *fp, size_t size)
61 size_t fsize;
62 struct free_arena_header *nfp, *na;
64 fsize = fp->a.size;
66 /* We need the 2* to account for the larger requirements of a free block */
67 if ( fsize >= size+2*sizeof(struct arena_header) ) {
68 /* Bigger block than required -- split block */
69 nfp = (struct free_arena_header *)((char *)fp + size);
70 na = fp->a.next;
72 nfp->a.type = ARENA_TYPE_FREE;
73 nfp->a.size = fsize-size;
74 fp->a.type = ARENA_TYPE_USED;
75 fp->a.size = size;
77 /* Insert into all-block chain */
78 nfp->a.prev = fp;
79 nfp->a.next = na;
80 na->a.prev = nfp;
81 fp->a.next = nfp;
83 /* Replace current block on free chain */
84 nfp->next_free = fp->next_free;
85 nfp->prev_free = fp->prev_free;
86 fp->next_free->prev_free = nfp;
87 fp->prev_free->next_free = nfp;
88 } else {
89 /* Allocate the whole block */
90 fp->a.type = ARENA_TYPE_USED;
92 /* Remove from free chain */
93 fp->next_free->prev_free = fp->prev_free;
94 fp->prev_free->next_free = fp->next_free;
97 return (void *)(&fp->a + 1);
100 void *malloc(size_t size)
102 struct free_arena_header *fp;
104 if ( size == 0 )
105 return NULL;
107 /* Add the obligatory arena header, and round up */
108 size = (size+2*sizeof(struct arena_header)-1) & ARENA_SIZE_MASK;
110 for ( fp = __malloc_head.next_free ; fp->a.type != ARENA_TYPE_HEAD ;
111 fp = fp->next_free ) {
112 if ( fp->a.size >= size ) {
113 /* Found fit -- allocate out of this block */
114 return __malloc_from_block(fp, size);
118 /* Nothing found... need to request a block from the kernel */
119 return NULL; /* No kernel to get stuff from */