nss: upgrade to release 3.73
[LibreOffice.git] / sal / rtl / alloc_arena.cxx
blobf126efdabd27f0a377eb61f52a92cd6c3f10517e
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
2 /*
3 * This file is part of the LibreOffice project.
5 * This Source Code Form is subject to the terms of the Mozilla Public
6 * License, v. 2.0. If a copy of the MPL was not distributed with this
7 * file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 * This file incorporates work covered by the following license notice:
11 * Licensed to the Apache Software Foundation (ASF) under one or more
12 * contributor license agreements. See the NOTICE file distributed
13 * with this work for additional information regarding copyright
14 * ownership. The ASF licenses this file to you under the Apache
15 * License, Version 2.0 (the "License"); you may not use this file
16 * except in compliance with the License. You may obtain a copy of
17 * the License at http://www.apache.org/licenses/LICENSE-2.0 .
20 #include <sal/config.h>
22 #include "alloc_arena.hxx"
24 #include "alloc_impl.hxx"
25 #include <rtllifecycle.h>
26 #include <sal/macros.h>
28 #include <cassert>
29 #include <string.h>
30 #include <stdio.h>
32 namespace {
34 /**
35 @internal
37 struct rtl_arena_list_st
39 rtl_memory_lock_type m_lock;
40 rtl_arena_type m_arena_head;
45 static rtl_arena_list_st g_arena_list;
47 /**
48 provided for arena_type allocations, and hash_table resizing.
50 @internal
52 static rtl_arena_type * gp_arena_arena = nullptr;
54 /**
55 Low level virtual memory (pseudo) arena
56 (platform dependent implementation)
58 @internal
60 static rtl_arena_type * gp_machdep_arena = nullptr;
62 rtl_arena_type * gp_default_arena = nullptr;
64 namespace
67 void * rtl_machdep_alloc(
68 rtl_arena_type * pArena,
69 sal_Size * pSize
72 void rtl_machdep_free(
73 rtl_arena_type * pArena,
74 void * pAddr,
75 sal_Size nSize
78 sal_Size rtl_machdep_pagesize();
80 void rtl_arena_segment_constructor(void * obj)
82 rtl_arena_segment_type * segment = static_cast<rtl_arena_segment_type*>(obj);
84 QUEUE_START_NAMED(segment, s);
85 QUEUE_START_NAMED(segment, f);
88 void rtl_arena_segment_destructor(void * obj)
90 rtl_arena_segment_type * segment = static_cast< rtl_arena_segment_type * >(
91 obj);
92 assert(QUEUE_STARTED_NAMED(segment, s));
93 assert(QUEUE_STARTED_NAMED(segment, f));
94 (void) segment; // avoid warnings
97 /**
98 @precond arena->m_lock acquired.
100 bool rtl_arena_segment_populate(rtl_arena_type * arena)
102 rtl_arena_segment_type *span;
103 sal_Size size = rtl_machdep_pagesize();
105 span = static_cast< rtl_arena_segment_type * >(
106 rtl_machdep_alloc(gp_machdep_arena, &size));
107 if (span)
109 rtl_arena_segment_type *first, *last, *head;
110 sal_Size count = size / sizeof(rtl_arena_segment_type);
112 /* insert onto reserve span list */
113 QUEUE_INSERT_TAIL_NAMED(&(arena->m_segment_reserve_span_head), span, s);
114 QUEUE_START_NAMED(span, f);
115 span->m_addr = reinterpret_cast<sal_uIntPtr>(span);
116 span->m_size = size;
117 span->m_type = RTL_ARENA_SEGMENT_TYPE_SPAN;
119 /* insert onto reserve list */
120 head = &(arena->m_segment_reserve_head);
121 for (first = span + 1, last = span + count; first < last; ++first)
123 QUEUE_INSERT_TAIL_NAMED(head, first, s);
124 QUEUE_START_NAMED(first, f);
125 first->m_addr = 0;
126 first->m_size = 0;
127 first->m_type = 0;
130 return (span != nullptr);
134 @precond arena->m_lock acquired.
135 @precond (*ppSegment == 0)
137 void rtl_arena_segment_get(
138 rtl_arena_type * arena,
139 rtl_arena_segment_type ** ppSegment
142 rtl_arena_segment_type * head;
144 assert(!*ppSegment);
146 head = &(arena->m_segment_reserve_head);
147 if (head->m_snext != head || rtl_arena_segment_populate (arena))
149 (*ppSegment) = head->m_snext;
150 QUEUE_REMOVE_NAMED(*ppSegment, s);
155 @precond arena->m_lock acquired.
156 @postcond (*ppSegment == 0)
158 void rtl_arena_segment_put(
159 rtl_arena_type * arena,
160 rtl_arena_segment_type ** ppSegment
163 rtl_arena_segment_type * head;
165 assert(QUEUE_STARTED_NAMED(*ppSegment, s));
166 assert(QUEUE_STARTED_NAMED(*ppSegment, f));
168 (*ppSegment)->m_addr = 0;
169 (*ppSegment)->m_size = 0;
171 assert((*ppSegment)->m_type != RTL_ARENA_SEGMENT_TYPE_HEAD);
172 (*ppSegment)->m_type = 0;
174 /* keep as reserve */
175 head = &(arena->m_segment_reserve_head);
176 QUEUE_INSERT_HEAD_NAMED(head, (*ppSegment), s);
178 /* clear */
179 (*ppSegment) = nullptr;
183 @precond arena->m_lock acquired.
185 void rtl_arena_freelist_insert (
186 rtl_arena_type * arena,
187 rtl_arena_segment_type * segment
190 rtl_arena_segment_type * head;
191 const auto bit = highbit(segment->m_size);
192 assert(bit > 0);
193 head = &(arena->m_freelist_head[bit - 1]);
194 QUEUE_INSERT_TAIL_NAMED(head, segment, f);
196 arena->m_freelist_bitmap |= head->m_size;
200 @precond arena->m_lock acquired.
202 void rtl_arena_freelist_remove(
203 rtl_arena_type * arena,
204 rtl_arena_segment_type * segment
207 if (segment->m_fnext->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD &&
208 segment->m_fprev->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD)
210 rtl_arena_segment_type * head;
212 head = segment->m_fprev;
213 assert(arena->m_freelist_bitmap & head->m_size);
214 arena->m_freelist_bitmap ^= head->m_size;
216 QUEUE_REMOVE_NAMED(segment, f);
219 #define RTL_ARENA_HASH_INDEX_IMPL(a, s, q, m) \
220 ((((a) + ((a) >> (s)) + ((a) >> ((s) << 1))) >> (q)) & (m))
222 #define RTL_ARENA_HASH_INDEX(arena, addr) \
223 RTL_ARENA_HASH_INDEX_IMPL((addr), (arena)->m_hash_shift, (arena)->m_quantum_shift, ((arena)->m_hash_size - 1))
226 @precond arena->m_lock released.
228 void rtl_arena_hash_rescale(
229 rtl_arena_type * arena,
230 sal_Size new_size
233 assert(new_size != 0);
235 rtl_arena_segment_type ** new_table;
236 sal_Size new_bytes;
238 new_bytes = new_size * sizeof(rtl_arena_segment_type*);
239 new_table = static_cast<rtl_arena_segment_type **>(rtl_arena_alloc (gp_arena_arena, &new_bytes));
241 if (new_table)
243 rtl_arena_segment_type ** old_table;
244 sal_Size old_size, i;
246 memset (new_table, 0, new_bytes);
248 RTL_MEMORY_LOCK_ACQUIRE(&(arena->m_lock));
250 old_table = arena->m_hash_table;
251 old_size = arena->m_hash_size;
253 arena->m_hash_table = new_table;
254 arena->m_hash_size = new_size;
255 arena->m_hash_shift = highbit(arena->m_hash_size) - 1;
257 for (i = 0; i < old_size; i++)
259 rtl_arena_segment_type * curr = old_table[i];
260 while (curr)
262 rtl_arena_segment_type * next = curr->m_fnext;
263 rtl_arena_segment_type ** head;
265 // coverity[negative_shift] - bogus
266 head = &(arena->m_hash_table[RTL_ARENA_HASH_INDEX(arena, curr->m_addr)]);
267 curr->m_fnext = (*head);
268 (*head) = curr;
270 curr = next;
272 old_table[i] = nullptr;
275 RTL_MEMORY_LOCK_RELEASE(&(arena->m_lock));
277 if (old_table != arena->m_hash_table_0)
279 sal_Size old_bytes = old_size * sizeof(rtl_arena_segment_type*);
280 rtl_arena_free (gp_arena_arena, old_table, old_bytes);
286 Insert arena hash, and update stats.
288 void rtl_arena_hash_insert(
289 rtl_arena_type * arena,
290 rtl_arena_segment_type * segment
293 rtl_arena_segment_type ** ppSegment;
295 ppSegment = &(arena->m_hash_table[RTL_ARENA_HASH_INDEX(arena, segment->m_addr)]);
297 segment->m_fnext = (*ppSegment);
298 (*ppSegment) = segment;
300 arena->m_stats.m_alloc += 1;
301 arena->m_stats.m_mem_alloc += segment->m_size;
305 Remove arena hash, and update stats.
307 rtl_arena_segment_type * rtl_arena_hash_remove(
308 rtl_arena_type * arena,
309 sal_uIntPtr addr,
310 sal_Size size
313 rtl_arena_segment_type *segment, **segpp;
314 sal_Size lookups = 0;
316 segpp = &(arena->m_hash_table[RTL_ARENA_HASH_INDEX(arena, addr)]);
317 while ((segment = *segpp))
319 if (segment->m_addr == addr)
321 *segpp = segment->m_fnext;
322 segment->m_fnext = segment->m_fprev = segment;
323 break;
326 /* update lookup miss stats */
327 lookups += 1;
328 segpp = &(segment->m_fnext);
331 assert(segment); // bad free
332 if (segment)
334 assert(segment->m_size == size);
335 (void) size; // avoid warnings
337 arena->m_stats.m_free += 1;
338 arena->m_stats.m_mem_alloc -= segment->m_size;
340 if (lookups > 1)
342 sal_Size nseg = static_cast<sal_Size>(arena->m_stats.m_alloc - arena->m_stats.m_free);
343 if (nseg > 4 * arena->m_hash_size)
345 if (!(arena->m_flags & RTL_ARENA_FLAG_RESCALE))
347 sal_Size ave = nseg >> arena->m_hash_shift;
348 assert(ave != 0);
349 sal_Size new_size = arena->m_hash_size << (highbit(ave) - 1);
351 arena->m_flags |= RTL_ARENA_FLAG_RESCALE;
352 RTL_MEMORY_LOCK_RELEASE(&(arena->m_lock));
353 rtl_arena_hash_rescale (arena, new_size);
354 RTL_MEMORY_LOCK_ACQUIRE(&(arena->m_lock));
355 arena->m_flags &= ~RTL_ARENA_FLAG_RESCALE;
361 return segment;
365 allocate (and remove) segment from freelist
367 @precond arena->m_lock acquired
368 @precond (*ppSegment == 0)
370 bool rtl_arena_segment_alloc(
371 rtl_arena_type * arena,
372 sal_Size size,
373 rtl_arena_segment_type ** ppSegment
376 int index = 0;
378 assert(!*ppSegment);
379 if (!RTL_MEMORY_ISP2(size))
381 unsigned int msb = highbit(size);
382 if (RTL_ARENA_FREELIST_SIZE == msb)
384 /* highest possible freelist: fall back to first fit */
385 rtl_arena_segment_type *head, *segment;
387 head = &(arena->m_freelist_head[msb - 1]);
388 for (segment = head->m_fnext; segment != head; segment = segment->m_fnext)
390 if (segment->m_size >= size)
392 /* allocate first fit segment */
393 (*ppSegment) = segment;
394 break;
397 goto dequeue_and_leave;
400 /* roundup to next power of 2 */
401 size = ((sal_Size(1)) << msb);
404 index = lowbit(RTL_MEMORY_P2ALIGN(arena->m_freelist_bitmap, size));
405 if (index > 0)
407 /* instant fit: allocate first free segment */
408 rtl_arena_segment_type *head;
410 head = &(arena->m_freelist_head[index - 1]);
411 (*ppSegment) = head->m_fnext;
412 assert((*ppSegment) != head);
415 dequeue_and_leave:
416 if (*ppSegment)
418 /* remove from freelist */
419 rtl_arena_freelist_remove (arena, (*ppSegment));
421 return (*ppSegment != nullptr);
425 import new (span) segment from source arena
427 @precond arena->m_lock acquired
428 @precond (*ppSegment == 0)
430 bool rtl_arena_segment_create(
431 rtl_arena_type * arena,
432 sal_Size size,
433 rtl_arena_segment_type ** ppSegment
436 assert(!*ppSegment);
437 if (arena->m_source_alloc)
439 rtl_arena_segment_get (arena, ppSegment);
440 if (*ppSegment)
442 rtl_arena_segment_type * span = nullptr;
443 rtl_arena_segment_get (arena, &span);
444 if (span)
446 /* import new span from source arena */
447 RTL_MEMORY_LOCK_RELEASE(&(arena->m_lock));
449 span->m_size = size;
450 span->m_addr = reinterpret_cast<sal_uIntPtr>(
451 (arena->m_source_alloc)(
452 arena->m_source_arena, &(span->m_size)));
454 RTL_MEMORY_LOCK_ACQUIRE(&(arena->m_lock));
455 if (span->m_addr != 0)
457 /* insert onto segment list, update stats */
458 span->m_type = RTL_ARENA_SEGMENT_TYPE_SPAN;
459 QUEUE_INSERT_HEAD_NAMED(&(arena->m_segment_head), span, s);
460 arena->m_stats.m_mem_total += span->m_size;
462 (*ppSegment)->m_addr = span->m_addr;
463 (*ppSegment)->m_size = span->m_size;
464 (*ppSegment)->m_type = RTL_ARENA_SEGMENT_TYPE_FREE;
465 QUEUE_INSERT_HEAD_NAMED(span, (*ppSegment), s);
467 /* report success */
468 return true;
470 rtl_arena_segment_put (arena, &span);
472 rtl_arena_segment_put (arena, ppSegment);
475 return false; // failure
479 mark as free and join with adjacent free segment(s)
481 @precond arena->m_lock acquired
482 @precond segment marked 'used'
484 void rtl_arena_segment_coalesce(
485 rtl_arena_type * arena,
486 rtl_arena_segment_type * segment
489 rtl_arena_segment_type *next, *prev;
491 /* mark segment free */
492 assert(segment->m_type == RTL_ARENA_SEGMENT_TYPE_USED);
493 segment->m_type = RTL_ARENA_SEGMENT_TYPE_FREE;
495 /* try to merge w/ next segment */
496 next = segment->m_snext;
497 if (next->m_type == RTL_ARENA_SEGMENT_TYPE_FREE)
499 assert(segment->m_addr + segment->m_size == next->m_addr);
500 segment->m_size += next->m_size;
502 /* remove from freelist */
503 rtl_arena_freelist_remove (arena, next);
505 /* remove from segment list */
506 QUEUE_REMOVE_NAMED(next, s);
508 /* release segment descriptor */
509 rtl_arena_segment_put (arena, &next);
512 /* try to merge w/ prev segment */
513 prev = segment->m_sprev;
514 if (prev->m_type == RTL_ARENA_SEGMENT_TYPE_FREE)
516 assert(prev->m_addr + prev->m_size == segment->m_addr);
517 segment->m_addr = prev->m_addr;
518 segment->m_size += prev->m_size;
520 /* remove from freelist */
521 rtl_arena_freelist_remove (arena, prev);
523 /* remove from segment list */
524 QUEUE_REMOVE_NAMED(prev, s);
526 /* release segment descriptor */
527 rtl_arena_segment_put (arena, &prev);
531 void rtl_arena_constructor(void * obj)
533 rtl_arena_type * arena = static_cast<rtl_arena_type*>(obj);
534 rtl_arena_segment_type * head;
535 size_t i;
537 memset (arena, 0, sizeof(rtl_arena_type));
539 QUEUE_START_NAMED(arena, arena_);
541 RTL_MEMORY_LOCK_INIT(&(arena->m_lock));
543 head = &(arena->m_segment_reserve_span_head);
544 rtl_arena_segment_constructor (head);
545 head->m_type = RTL_ARENA_SEGMENT_TYPE_HEAD;
547 head = &(arena->m_segment_reserve_head);
548 rtl_arena_segment_constructor (head);
549 head->m_type = RTL_ARENA_SEGMENT_TYPE_HEAD;
551 head = &(arena->m_segment_head);
552 rtl_arena_segment_constructor (head);
553 head->m_type = RTL_ARENA_SEGMENT_TYPE_HEAD;
555 for (i = 0; i < RTL_ARENA_FREELIST_SIZE; i++)
557 head = &(arena->m_freelist_head[i]);
558 rtl_arena_segment_constructor (head);
560 head->m_size = ((sal_Size(1)) << i);
561 head->m_type = RTL_ARENA_SEGMENT_TYPE_HEAD;
564 arena->m_hash_table = arena->m_hash_table_0;
565 arena->m_hash_size = RTL_ARENA_HASH_SIZE;
566 arena->m_hash_shift = highbit(arena->m_hash_size) - 1;
569 void rtl_arena_destructor(void * obj)
571 rtl_arena_type * arena = static_cast<rtl_arena_type*>(obj);
572 rtl_arena_segment_type * head;
573 size_t i;
575 assert(QUEUE_STARTED_NAMED(arena, arena_));
577 RTL_MEMORY_LOCK_DESTROY(&(arena->m_lock));
579 head = &(arena->m_segment_reserve_span_head);
580 assert(head->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD);
581 rtl_arena_segment_destructor (head);
583 head = &(arena->m_segment_reserve_head);
584 assert(head->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD);
585 rtl_arena_segment_destructor (head);
587 head = &(arena->m_segment_head);
588 assert(head->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD);
589 rtl_arena_segment_destructor (head);
591 for (i = 0; i < RTL_ARENA_FREELIST_SIZE; i++)
593 head = &(arena->m_freelist_head[i]);
595 assert(head->m_size == ((sal_Size(1)) << i));
596 assert(head->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD);
598 rtl_arena_segment_destructor (head);
601 assert(arena->m_hash_table == arena->m_hash_table_0);
602 assert(arena->m_hash_size == RTL_ARENA_HASH_SIZE);
603 assert(arena->m_hash_shift == highbit(arena->m_hash_size) - 1);
606 rtl_arena_type * rtl_arena_activate(
607 rtl_arena_type * arena,
608 const char * name,
609 sal_Size quantum,
610 rtl_arena_type * source_arena,
611 void * (SAL_CALL * source_alloc)(rtl_arena_type *, sal_Size *),
612 void (SAL_CALL * source_free) (rtl_arena_type *, void *, sal_Size)
615 assert(arena);
616 if (arena)
618 (void) snprintf (arena->m_name, sizeof(arena->m_name), "%s", name);
620 if (!RTL_MEMORY_ISP2(quantum))
622 /* roundup to next power of 2 */
623 quantum = ((sal_Size(1)) << highbit(quantum));
626 arena->m_quantum = quantum;
627 arena->m_quantum_shift = highbit(arena->m_quantum) - 1;
629 arena->m_source_arena = source_arena;
630 arena->m_source_alloc = source_alloc;
631 arena->m_source_free = source_free;
633 /* insert into arena list */
634 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list.m_lock));
635 QUEUE_INSERT_TAIL_NAMED(&(g_arena_list.m_arena_head), arena, arena_);
636 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list.m_lock));
638 return arena;
641 void rtl_arena_deactivate(rtl_arena_type * arena)
643 rtl_arena_segment_type * head, * segment;
645 /* remove from arena list */
646 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list.m_lock));
647 QUEUE_REMOVE_NAMED(arena, arena_);
648 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list.m_lock));
650 /* check for leaked segments */
651 if (arena->m_stats.m_alloc > arena->m_stats.m_free)
653 sal_Size i, n;
655 /* cleanup still used segment(s) */
656 for (i = 0, n = arena->m_hash_size; i < n; i++)
658 while ((segment = arena->m_hash_table[i]))
660 /* pop from hash table */
661 arena->m_hash_table[i] = segment->m_fnext;
662 segment->m_fnext = segment->m_fprev = segment;
664 /* coalesce w/ adjacent free segment(s) */
665 rtl_arena_segment_coalesce (arena, segment);
667 /* insert onto freelist */
668 rtl_arena_freelist_insert (arena, segment);
673 /* cleanup hash table */
674 if (arena->m_hash_table != arena->m_hash_table_0)
676 rtl_arena_free(
677 gp_arena_arena,
678 arena->m_hash_table,
679 arena->m_hash_size * sizeof(rtl_arena_segment_type*));
681 arena->m_hash_table = arena->m_hash_table_0;
682 arena->m_hash_size = RTL_ARENA_HASH_SIZE;
683 arena->m_hash_shift = highbit(arena->m_hash_size) - 1;
686 /* cleanup segment list */
687 head = &(arena->m_segment_head);
688 for (segment = head->m_snext; segment != head; segment = head->m_snext)
690 if (segment->m_type == RTL_ARENA_SEGMENT_TYPE_FREE)
692 /* remove from freelist */
693 rtl_arena_freelist_remove (arena, segment);
695 else
697 /* can have only free and span segments here */
698 assert(segment->m_type == RTL_ARENA_SEGMENT_TYPE_SPAN);
701 /* remove from segment list */
702 QUEUE_REMOVE_NAMED(segment, s);
704 /* release segment descriptor */
705 rtl_arena_segment_put (arena, &segment);
708 /* cleanup segment reserve list */
709 head = &(arena->m_segment_reserve_head);
710 for (segment = head->m_snext; segment != head; segment = head->m_snext)
712 /* remove from segment list */
713 QUEUE_REMOVE_NAMED(segment, s);
716 /* cleanup segment reserve span(s) */
717 head = &(arena->m_segment_reserve_span_head);
718 for (segment = head->m_snext; segment != head; segment = head->m_snext)
720 /* can have only span segments here */
721 assert(segment->m_type == RTL_ARENA_SEGMENT_TYPE_SPAN);
723 /* remove from segment list */
724 QUEUE_REMOVE_NAMED(segment, s);
726 /* return span to g_machdep_arena */
727 rtl_machdep_free (gp_machdep_arena, reinterpret_cast<void*>(segment->m_addr), segment->m_size);
731 } // namespace
733 rtl_arena_type * SAL_CALL rtl_arena_create(
734 const char * name,
735 sal_Size quantum,
736 sal_Size,
737 rtl_arena_type * source_arena,
738 void * (SAL_CALL * source_alloc)(rtl_arena_type *, sal_Size *),
739 void (SAL_CALL * source_free) (rtl_arena_type *, void *, sal_Size),
740 SAL_UNUSED_PARAMETER int
741 ) SAL_THROW_EXTERN_C()
743 rtl_arena_type * result = nullptr;
744 sal_Size size = sizeof(rtl_arena_type);
746 try_alloc:
747 result = static_cast<rtl_arena_type*>(rtl_arena_alloc (gp_arena_arena, &size));
748 if (result)
750 rtl_arena_type * arena = result;
751 rtl_arena_constructor (arena);
753 if (!source_arena)
755 assert(gp_default_arena);
756 source_arena = gp_default_arena;
759 result = rtl_arena_activate (
760 arena,
761 name,
762 quantum,
763 source_arena,
764 source_alloc,
765 source_free
768 if (!result)
770 rtl_arena_deactivate (arena);
771 rtl_arena_destructor (arena);
772 rtl_arena_free (gp_arena_arena, arena, size);
775 else if (!gp_arena_arena)
777 ensureArenaSingleton();
778 if (gp_arena_arena)
780 /* try again */
781 goto try_alloc;
784 return result;
787 void SAL_CALL rtl_arena_destroy(rtl_arena_type * arena) SAL_THROW_EXTERN_C()
789 if (arena)
791 rtl_arena_deactivate (arena);
792 rtl_arena_destructor (arena);
793 rtl_arena_free (gp_arena_arena, arena, sizeof(rtl_arena_type));
797 void * SAL_CALL rtl_arena_alloc(
798 rtl_arena_type * arena,
799 sal_Size * pSize
800 ) SAL_THROW_EXTERN_C()
802 void * addr = nullptr;
804 if (arena && pSize)
806 sal_Size size;
808 size = RTL_MEMORY_ALIGN(*pSize, arena->m_quantum);
809 if (size > 0)
811 /* allocate from segment list */
812 rtl_arena_segment_type *segment = nullptr;
814 RTL_MEMORY_LOCK_ACQUIRE(&(arena->m_lock));
815 if (rtl_arena_segment_alloc (arena, size, &segment) ||
816 rtl_arena_segment_create(arena, size, &segment) )
818 /* shrink to fit */
819 sal_Size oversize;
821 /* mark segment used */
822 assert(segment->m_type == RTL_ARENA_SEGMENT_TYPE_FREE);
823 segment->m_type = RTL_ARENA_SEGMENT_TYPE_USED;
825 /* resize */
826 assert(segment->m_size >= size);
827 oversize = segment->m_size - size;
828 if (oversize >= arena->m_quantum)
830 rtl_arena_segment_type * remainder = nullptr;
831 rtl_arena_segment_get (arena, &remainder);
832 if (remainder)
834 segment->m_size = size;
836 remainder->m_addr = segment->m_addr + segment->m_size;
837 remainder->m_size = oversize;
838 remainder->m_type = RTL_ARENA_SEGMENT_TYPE_FREE;
839 QUEUE_INSERT_HEAD_NAMED(segment, remainder, s);
841 rtl_arena_freelist_insert (arena, remainder);
845 rtl_arena_hash_insert (arena, segment);
847 (*pSize) = segment->m_size;
848 addr = reinterpret_cast<void*>(segment->m_addr);
850 RTL_MEMORY_LOCK_RELEASE(&(arena->m_lock));
853 return addr;
856 void SAL_CALL rtl_arena_free (
857 rtl_arena_type * arena,
858 void * addr,
859 sal_Size size
860 ) SAL_THROW_EXTERN_C()
862 if (arena)
864 size = RTL_MEMORY_ALIGN(size, arena->m_quantum);
865 if (size > 0)
867 /* free to segment list */
868 rtl_arena_segment_type * segment;
870 RTL_MEMORY_LOCK_ACQUIRE(&(arena->m_lock));
872 segment = rtl_arena_hash_remove (arena, reinterpret_cast<sal_uIntPtr>(addr), size);
873 if (segment)
875 rtl_arena_segment_type *next, *prev;
877 /* coalesce w/ adjacent free segment(s) */
878 rtl_arena_segment_coalesce (arena, segment);
880 /* determine (new) next and prev segment */
881 next = segment->m_snext;
882 prev = segment->m_sprev;
884 /* entire span free when prev is a span, and next is either a span or a list head */
885 if (prev->m_type == RTL_ARENA_SEGMENT_TYPE_SPAN &&
886 ((next->m_type == RTL_ARENA_SEGMENT_TYPE_SPAN) ||
887 (next->m_type == RTL_ARENA_SEGMENT_TYPE_HEAD)))
889 assert(
890 prev->m_addr == segment->m_addr
891 && prev->m_size == segment->m_size);
893 if (arena->m_source_free)
895 addr = reinterpret_cast<void*>(prev->m_addr);
896 size = prev->m_size;
898 /* remove from segment list */
899 QUEUE_REMOVE_NAMED(segment, s);
901 /* release segment descriptor */
902 rtl_arena_segment_put (arena, &segment);
904 /* remove from segment list */
905 QUEUE_REMOVE_NAMED(prev, s);
907 /* release (span) segment descriptor */
908 rtl_arena_segment_put (arena, &prev);
910 /* update stats, return span to source arena */
911 arena->m_stats.m_mem_total -= size;
912 RTL_MEMORY_LOCK_RELEASE(&(arena->m_lock));
914 (arena->m_source_free)(arena->m_source_arena, addr, size);
915 return;
919 /* insert onto freelist */
920 rtl_arena_freelist_insert (arena, segment);
923 RTL_MEMORY_LOCK_RELEASE(&(arena->m_lock));
928 void rtl_arena_foreach (rtl_arena_type *arena, ArenaForeachFn foreachFn)
930 /* used segments */
931 for (int i = 0, n = arena->m_hash_size; i < n; i++)
933 for (rtl_arena_segment_type *segment = arena->m_hash_table[i];
934 segment != nullptr; segment = segment->m_fnext)
936 foreachFn(reinterpret_cast<void *>(segment->m_addr),
937 segment->m_size);
942 #if defined(SAL_UNX)
943 #include <sys/mman.h>
944 #elif defined(_WIN32)
945 #define MAP_FAILED nullptr
946 #endif /* SAL_UNX || _WIN32 */
948 namespace
951 void * rtl_machdep_alloc(
952 rtl_arena_type * pArena,
953 sal_Size * pSize
956 void * addr;
957 sal_Size size = *pSize;
959 assert(pArena == gp_machdep_arena);
961 #if defined(__sun) && defined(SPARC)
962 /* see @ mmap(2) man pages */
963 size += (pArena->m_quantum + pArena->m_quantum); /* "red-zone" pages */
964 if (size > (4 << 20))
965 size = RTL_MEMORY_P2ROUNDUP(size, (4 << 20));
966 else if (size > (512 << 10))
967 size = RTL_MEMORY_P2ROUNDUP(size, (512 << 10));
968 else
969 size = RTL_MEMORY_P2ROUNDUP(size, (64 << 10));
970 size -= (pArena->m_quantum + pArena->m_quantum); /* "red-zone" pages */
971 #else
972 /* default allocation granularity */
973 if (pArena->m_quantum < (64 << 10))
975 size = RTL_MEMORY_P2ROUNDUP(size, (64 << 10));
977 else
979 size = RTL_MEMORY_P2ROUNDUP(size, pArena->m_quantum);
981 #endif
983 #if defined(SAL_UNX)
984 addr = mmap (nullptr, static_cast<size_t>(size), PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANON, -1, 0);
985 #elif defined(_WIN32)
986 addr = VirtualAlloc (nullptr, static_cast<SIZE_T>(size), MEM_COMMIT, PAGE_READWRITE);
987 #endif /* (SAL_UNX || _WIN32) */
989 if (addr != MAP_FAILED)
991 pArena->m_stats.m_alloc += 1;
992 pArena->m_stats.m_mem_total += size;
993 pArena->m_stats.m_mem_alloc += size;
995 (*pSize) = size;
996 return addr;
998 return nullptr;
1001 void rtl_machdep_free(
1002 rtl_arena_type * pArena,
1003 void * pAddr,
1004 sal_Size nSize
1007 assert(pArena == gp_machdep_arena);
1009 pArena->m_stats.m_free += 1;
1010 pArena->m_stats.m_mem_total -= nSize;
1011 pArena->m_stats.m_mem_alloc -= nSize;
1013 #if defined(SAL_UNX)
1014 (void) munmap(pAddr, nSize);
1015 #elif defined(_WIN32)
1016 (void) VirtualFree (pAddr, SIZE_T(0), MEM_RELEASE);
1017 #endif /* (SAL_UNX || _WIN32) */
1020 sal_Size rtl_machdep_pagesize()
1022 #if defined(SAL_UNX)
1023 #if defined(FREEBSD) || defined(NETBSD) || defined(DRAGONFLY)
1024 return (sal_Size)getpagesize();
1025 #else /* POSIX */
1026 return static_cast<sal_Size>(sysconf(_SC_PAGESIZE));
1027 #endif /* xBSD || POSIX */
1028 #elif defined(_WIN32)
1029 SYSTEM_INFO info;
1030 GetSystemInfo (&info);
1031 return static_cast<sal_Size>(info.dwPageSize);
1032 #endif /* (SAL_UNX || _WIN32) */
1035 } //namespace
1037 void rtl_arena_init()
1040 /* list of arenas */
1041 RTL_MEMORY_LOCK_INIT(&(g_arena_list.m_lock));
1042 rtl_arena_constructor (&(g_arena_list.m_arena_head));
1045 /* machdep (pseudo) arena */
1046 static rtl_arena_type g_machdep_arena;
1048 assert(!gp_machdep_arena);
1049 rtl_arena_constructor (&g_machdep_arena);
1051 gp_machdep_arena = rtl_arena_activate (
1052 &g_machdep_arena,
1053 "rtl_machdep_arena",
1054 rtl_machdep_pagesize(),
1055 nullptr, nullptr, nullptr /* no source */
1057 assert(gp_machdep_arena);
1060 /* default arena */
1061 static rtl_arena_type g_default_arena;
1063 assert(!gp_default_arena);
1064 rtl_arena_constructor (&g_default_arena);
1066 gp_default_arena = rtl_arena_activate (
1067 &g_default_arena,
1068 "rtl_default_arena",
1069 rtl_machdep_pagesize(),
1070 gp_machdep_arena, /* source */
1071 rtl_machdep_alloc,
1072 rtl_machdep_free
1074 assert(gp_default_arena);
1077 /* arena internal arena */
1078 static rtl_arena_type g_arena_arena;
1080 assert(!gp_arena_arena);
1081 rtl_arena_constructor (&g_arena_arena);
1083 gp_arena_arena = rtl_arena_activate(
1084 &g_arena_arena,
1085 "rtl_arena_internal_arena",
1086 64, /* quantum */
1087 gp_default_arena, /* source */
1088 rtl_arena_alloc,
1089 rtl_arena_free
1091 assert(gp_arena_arena);
1095 void rtl_arena_fini()
1097 if (gp_arena_arena)
1099 rtl_arena_type * arena, * head;
1101 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list.m_lock));
1102 head = &(g_arena_list.m_arena_head);
1104 for (arena = head->m_arena_next; arena != head; arena = arena->m_arena_next)
1106 // noop
1108 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list.m_lock));
1112 /* vim:set shiftwidth=4 softtabstop=4 expandtab: */