Adding upstream version 4.00~pre54+dfsg.
[syslinux-debian/hramrach.git] / com32 / lib / malloc.c
blobec103ab340509821dcd81b8359724123931f9953
1 /*
2 * malloc.c
4 * Very simple linked-list based malloc()/free().
5 */
7 #include <stdlib.h>
8 #include <string.h>
9 #include <com32.h>
10 #include <syslinux/memscan.h>
11 #include "init.h"
12 #include "malloc.h"
14 struct free_arena_header __malloc_head = {
16 ARENA_TYPE_HEAD,
18 &__malloc_head,
19 &__malloc_head,
21 &__malloc_head,
22 &__malloc_head
25 /* This is extern so it can be overridden by the user application */
26 extern size_t __stack_size;
27 extern void *__mem_end; /* Produced after argv parsing */
29 static inline size_t sp(void)
31 size_t sp;
32 asm volatile ("movl %%esp,%0":"=rm" (sp));
33 return sp;
36 #define E820_MEM_MAX 0xfff00000 /* 4 GB - 1 MB */
38 static int consider_memory_area(void *dummy, addr_t start,
39 addr_t len, bool valid)
41 struct free_arena_header *fp;
42 addr_t end;
44 (void)dummy;
46 if (valid && start < E820_MEM_MAX) {
47 if (len > E820_MEM_MAX - start)
48 len = E820_MEM_MAX - start;
50 end = start + len;
52 if (end > __com32.cs_memsize) {
53 if (start <= __com32.cs_memsize) {
54 start = __com32.cs_memsize;
55 len = end - start;
58 if (len >= 2 * sizeof(struct arena_header)) {
59 fp = (struct free_arena_header *)start;
60 fp->a.size = len;
61 __inject_free_block(fp);
66 return 0;
69 static void __constructor init_memory_arena(void)
71 struct free_arena_header *fp;
72 size_t start, total_space;
74 start = (size_t) ARENA_ALIGN_UP(__mem_end);
75 total_space = sp() - start;
77 if (__stack_size == 0 || __stack_size > total_space >> 1)
78 __stack_size = total_space >> 1; /* Half for the stack, half for the heap... */
80 if (total_space < __stack_size + 4 * sizeof(struct arena_header))
81 __stack_size = total_space - 4 * sizeof(struct arena_header);
83 fp = (struct free_arena_header *)start;
84 fp->a.size = total_space - __stack_size;
86 __inject_free_block(fp);
88 /* Scan the memory map to look for other suitable regions */
89 if (!__com32.cs_memsize)
90 return; /* Old Syslinux core, can't do this... */
92 syslinux_scan_memory(consider_memory_area, NULL);
95 static void *__malloc_from_block(struct free_arena_header *fp, size_t size)
97 size_t fsize;
98 struct free_arena_header *nfp, *na;
100 fsize = fp->a.size;
102 /* We need the 2* to account for the larger requirements of a free block */
103 if (fsize >= size + 2 * sizeof(struct arena_header)) {
104 /* Bigger block than required -- split block */
105 nfp = (struct free_arena_header *)((char *)fp + size);
106 na = fp->a.next;
108 nfp->a.type = ARENA_TYPE_FREE;
109 nfp->a.size = fsize - size;
110 fp->a.type = ARENA_TYPE_USED;
111 fp->a.size = size;
113 /* Insert into all-block chain */
114 nfp->a.prev = fp;
115 nfp->a.next = na;
116 na->a.prev = nfp;
117 fp->a.next = nfp;
119 /* Replace current block on free chain */
120 nfp->next_free = fp->next_free;
121 nfp->prev_free = fp->prev_free;
122 fp->next_free->prev_free = nfp;
123 fp->prev_free->next_free = nfp;
124 } else {
125 /* Allocate the whole block */
126 fp->a.type = ARENA_TYPE_USED;
128 /* Remove from free chain */
129 fp->next_free->prev_free = fp->prev_free;
130 fp->prev_free->next_free = fp->next_free;
133 return (void *)(&fp->a + 1);
136 void *malloc(size_t size)
138 struct free_arena_header *fp;
140 if (size == 0)
141 return NULL;
143 /* Add the obligatory arena header, and round up */
144 size = (size + 2 * sizeof(struct arena_header) - 1) & ARENA_SIZE_MASK;
146 for (fp = __malloc_head.next_free; fp->a.type != ARENA_TYPE_HEAD;
147 fp = fp->next_free) {
148 if (fp->a.size >= size) {
149 /* Found fit -- allocate out of this block */
150 return __malloc_from_block(fp, size);
154 /* Nothing found... need to request a block from the kernel */
155 return NULL; /* No kernel to get stuff from */