1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
3 * This file is part of the LibreOffice project.
5 * This Source Code Form is subject to the terms of the Mozilla Public
6 * License, v. 2.0. If a copy of the MPL was not distributed with this
7 * file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 * This file incorporates work covered by the following license notice:
11 * Licensed to the Apache Software Foundation (ASF) under one or more
12 * contributor license agreements. See the NOTICE file distributed
13 * with this work for additional information regarding copyright
14 * ownership. The ASF licenses this file to you under the Apache
15 * License, Version 2.0 (the "License"); you may not use this file
16 * except in compliance with the License. You may obtain a copy of
17 * the License at http://www.apache.org/licenses/LICENSE-2.0 .
20 #include <sal/config.h>
22 #include "alloc_arena.hxx"
24 #include "alloc_impl.hxx"
25 #include <rtllifecycle.h>
26 #include <sal/macros.h>
35 struct rtl_arena_list_st
37 rtl_memory_lock_type m_lock
;
38 rtl_arena_type m_arena_head
;
41 static rtl_arena_list_st g_arena_list
;
44 provided for arena_type allocations, and hash_table resizing.
48 static rtl_arena_type
* gp_arena_arena
= nullptr;
51 Low level virtual memory (pseudo) arena
52 (platform dependent implementation)
56 static rtl_arena_type
* gp_machdep_arena
= nullptr;
58 rtl_arena_type
* gp_default_arena
= nullptr;
63 void * rtl_machdep_alloc(
64 rtl_arena_type
* pArena
,
68 void rtl_machdep_free(
69 rtl_arena_type
* pArena
,
74 sal_Size
rtl_machdep_pagesize();
76 void rtl_arena_segment_constructor(void * obj
)
78 rtl_arena_segment_type
* segment
= static_cast<rtl_arena_segment_type
*>(obj
);
80 QUEUE_START_NAMED(segment
, s
);
81 QUEUE_START_NAMED(segment
, f
);
84 void rtl_arena_segment_destructor(void * obj
)
86 rtl_arena_segment_type
* segment
= static_cast< rtl_arena_segment_type
* >(
88 assert(QUEUE_STARTED_NAMED(segment
, s
));
89 assert(QUEUE_STARTED_NAMED(segment
, f
));
90 (void) segment
; // avoid warnings
94 @precond arena->m_lock acquired.
96 bool rtl_arena_segment_populate(rtl_arena_type
* arena
)
98 rtl_arena_segment_type
*span
;
99 sal_Size size
= rtl_machdep_pagesize();
101 span
= static_cast< rtl_arena_segment_type
* >(
102 rtl_machdep_alloc(gp_machdep_arena
, &size
));
105 rtl_arena_segment_type
*first
, *last
, *head
;
106 sal_Size count
= size
/ sizeof(rtl_arena_segment_type
);
108 /* insert onto reserve span list */
109 QUEUE_INSERT_TAIL_NAMED(&(arena
->m_segment_reserve_span_head
), span
, s
);
110 QUEUE_START_NAMED(span
, f
);
111 span
->m_addr
= reinterpret_cast<sal_uIntPtr
>(span
);
113 span
->m_type
= RTL_ARENA_SEGMENT_TYPE_SPAN
;
115 /* insert onto reserve list */
116 head
= &(arena
->m_segment_reserve_head
);
117 for (first
= span
+ 1, last
= span
+ count
; first
< last
; ++first
)
119 QUEUE_INSERT_TAIL_NAMED(head
, first
, s
);
120 QUEUE_START_NAMED(first
, f
);
126 return (span
!= nullptr);
130 @precond arena->m_lock acquired.
131 @precond (*ppSegment == 0)
133 void rtl_arena_segment_get(
134 rtl_arena_type
* arena
,
135 rtl_arena_segment_type
** ppSegment
138 rtl_arena_segment_type
* head
;
142 head
= &(arena
->m_segment_reserve_head
);
143 if (head
->m_snext
!= head
|| rtl_arena_segment_populate (arena
))
145 (*ppSegment
) = head
->m_snext
;
146 QUEUE_REMOVE_NAMED(*ppSegment
, s
);
151 @precond arena->m_lock acquired.
152 @postcond (*ppSegment == 0)
154 void rtl_arena_segment_put(
155 rtl_arena_type
* arena
,
156 rtl_arena_segment_type
** ppSegment
159 rtl_arena_segment_type
* head
;
161 assert(QUEUE_STARTED_NAMED(*ppSegment
, s
));
162 assert(QUEUE_STARTED_NAMED(*ppSegment
, f
));
164 (*ppSegment
)->m_addr
= 0;
165 (*ppSegment
)->m_size
= 0;
167 assert((*ppSegment
)->m_type
!= RTL_ARENA_SEGMENT_TYPE_HEAD
);
168 (*ppSegment
)->m_type
= 0;
170 /* keep as reserve */
171 head
= &(arena
->m_segment_reserve_head
);
172 QUEUE_INSERT_HEAD_NAMED(head
, (*ppSegment
), s
);
175 (*ppSegment
) = nullptr;
179 @precond arena->m_lock acquired.
181 void rtl_arena_freelist_insert (
182 rtl_arena_type
* arena
,
183 rtl_arena_segment_type
* segment
186 rtl_arena_segment_type
* head
;
187 const auto bit
= highbit(segment
->m_size
);
189 head
= &(arena
->m_freelist_head
[bit
- 1]);
190 QUEUE_INSERT_TAIL_NAMED(head
, segment
, f
);
192 arena
->m_freelist_bitmap
|= head
->m_size
;
196 @precond arena->m_lock acquired.
198 void rtl_arena_freelist_remove(
199 rtl_arena_type
* arena
,
200 rtl_arena_segment_type
* segment
203 if (segment
->m_fnext
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
&&
204 segment
->m_fprev
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
)
206 rtl_arena_segment_type
* head
;
208 head
= segment
->m_fprev
;
209 assert(arena
->m_freelist_bitmap
& head
->m_size
);
210 arena
->m_freelist_bitmap
^= head
->m_size
;
212 QUEUE_REMOVE_NAMED(segment
, f
);
215 #define RTL_ARENA_HASH_INDEX_IMPL(a, s, q, m) \
216 ((((a) + ((a) >> (s)) + ((a) >> ((s) << 1))) >> (q)) & (m))
218 #define RTL_ARENA_HASH_INDEX(arena, addr) \
219 RTL_ARENA_HASH_INDEX_IMPL((addr), (arena)->m_hash_shift, (arena)->m_quantum_shift, ((arena)->m_hash_size - 1))
222 @precond arena->m_lock released.
224 void rtl_arena_hash_rescale(
225 rtl_arena_type
* arena
,
229 assert(new_size
!= 0);
231 rtl_arena_segment_type
** new_table
;
234 new_bytes
= new_size
* sizeof(rtl_arena_segment_type
*);
235 new_table
= static_cast<rtl_arena_segment_type
**>(rtl_arena_alloc (gp_arena_arena
, &new_bytes
));
239 rtl_arena_segment_type
** old_table
;
240 sal_Size old_size
, i
;
242 memset (new_table
, 0, new_bytes
);
244 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
246 old_table
= arena
->m_hash_table
;
247 old_size
= arena
->m_hash_size
;
249 arena
->m_hash_table
= new_table
;
250 arena
->m_hash_size
= new_size
;
251 arena
->m_hash_shift
= highbit(arena
->m_hash_size
) - 1;
253 for (i
= 0; i
< old_size
; i
++)
255 rtl_arena_segment_type
* curr
= old_table
[i
];
258 rtl_arena_segment_type
* next
= curr
->m_fnext
;
259 rtl_arena_segment_type
** head
;
261 // coverity[negative_shift] - bogus
262 head
= &(arena
->m_hash_table
[RTL_ARENA_HASH_INDEX(arena
, curr
->m_addr
)]);
263 curr
->m_fnext
= (*head
);
268 old_table
[i
] = nullptr;
271 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
273 if (old_table
!= arena
->m_hash_table_0
)
275 sal_Size old_bytes
= old_size
* sizeof(rtl_arena_segment_type
*);
276 rtl_arena_free (gp_arena_arena
, old_table
, old_bytes
);
282 Insert arena hash, and update stats.
284 void rtl_arena_hash_insert(
285 rtl_arena_type
* arena
,
286 rtl_arena_segment_type
* segment
289 rtl_arena_segment_type
** ppSegment
;
291 ppSegment
= &(arena
->m_hash_table
[RTL_ARENA_HASH_INDEX(arena
, segment
->m_addr
)]);
293 segment
->m_fnext
= (*ppSegment
);
294 (*ppSegment
) = segment
;
296 arena
->m_stats
.m_alloc
+= 1;
297 arena
->m_stats
.m_mem_alloc
+= segment
->m_size
;
301 Remove arena hash, and update stats.
303 rtl_arena_segment_type
* rtl_arena_hash_remove(
304 rtl_arena_type
* arena
,
309 rtl_arena_segment_type
*segment
, **segpp
;
310 sal_Size lookups
= 0;
312 segpp
= &(arena
->m_hash_table
[RTL_ARENA_HASH_INDEX(arena
, addr
)]);
313 while ((segment
= *segpp
))
315 if (segment
->m_addr
== addr
)
317 *segpp
= segment
->m_fnext
;
318 segment
->m_fnext
= segment
->m_fprev
= segment
;
322 /* update lookup miss stats */
324 segpp
= &(segment
->m_fnext
);
327 assert(segment
); // bad free
330 assert(segment
->m_size
== size
);
331 (void) size
; // avoid warnings
333 arena
->m_stats
.m_free
+= 1;
334 arena
->m_stats
.m_mem_alloc
-= segment
->m_size
;
338 sal_Size nseg
= static_cast<sal_Size
>(arena
->m_stats
.m_alloc
- arena
->m_stats
.m_free
);
339 if (nseg
> 4 * arena
->m_hash_size
)
341 if (!(arena
->m_flags
& RTL_ARENA_FLAG_RESCALE
))
343 sal_Size ave
= nseg
>> arena
->m_hash_shift
;
345 sal_Size new_size
= arena
->m_hash_size
<< (highbit(ave
) - 1);
347 arena
->m_flags
|= RTL_ARENA_FLAG_RESCALE
;
348 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
349 rtl_arena_hash_rescale (arena
, new_size
);
350 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
351 arena
->m_flags
&= ~RTL_ARENA_FLAG_RESCALE
;
361 allocate (and remove) segment from freelist
363 @precond arena->m_lock acquired
364 @precond (*ppSegment == 0)
366 bool rtl_arena_segment_alloc(
367 rtl_arena_type
* arena
,
369 rtl_arena_segment_type
** ppSegment
375 if (!RTL_MEMORY_ISP2(size
))
377 unsigned int msb
= highbit(size
);
378 if (RTL_ARENA_FREELIST_SIZE
== msb
)
380 /* highest possible freelist: fall back to first fit */
381 rtl_arena_segment_type
*head
, *segment
;
383 head
= &(arena
->m_freelist_head
[msb
- 1]);
384 for (segment
= head
->m_fnext
; segment
!= head
; segment
= segment
->m_fnext
)
386 if (segment
->m_size
>= size
)
388 /* allocate first fit segment */
389 (*ppSegment
) = segment
;
393 goto dequeue_and_leave
;
396 /* roundup to next power of 2 */
397 size
= ((sal_Size(1)) << msb
);
400 index
= lowbit(RTL_MEMORY_P2ALIGN(arena
->m_freelist_bitmap
, size
));
403 /* instant fit: allocate first free segment */
404 rtl_arena_segment_type
*head
;
406 head
= &(arena
->m_freelist_head
[index
- 1]);
407 (*ppSegment
) = head
->m_fnext
;
408 assert((*ppSegment
) != head
);
414 /* remove from freelist */
415 rtl_arena_freelist_remove (arena
, (*ppSegment
));
417 return (*ppSegment
!= nullptr);
421 import new (span) segment from source arena
423 @precond arena->m_lock acquired
424 @precond (*ppSegment == 0)
426 bool rtl_arena_segment_create(
427 rtl_arena_type
* arena
,
429 rtl_arena_segment_type
** ppSegment
433 if (arena
->m_source_alloc
)
435 rtl_arena_segment_get (arena
, ppSegment
);
438 rtl_arena_segment_type
* span
= nullptr;
439 rtl_arena_segment_get (arena
, &span
);
442 /* import new span from source arena */
443 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
446 span
->m_addr
= reinterpret_cast<sal_uIntPtr
>(
447 (arena
->m_source_alloc
)(
448 arena
->m_source_arena
, &(span
->m_size
)));
450 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
451 if (span
->m_addr
!= 0)
453 /* insert onto segment list, update stats */
454 span
->m_type
= RTL_ARENA_SEGMENT_TYPE_SPAN
;
455 QUEUE_INSERT_HEAD_NAMED(&(arena
->m_segment_head
), span
, s
);
456 arena
->m_stats
.m_mem_total
+= span
->m_size
;
458 (*ppSegment
)->m_addr
= span
->m_addr
;
459 (*ppSegment
)->m_size
= span
->m_size
;
460 (*ppSegment
)->m_type
= RTL_ARENA_SEGMENT_TYPE_FREE
;
461 QUEUE_INSERT_HEAD_NAMED(span
, (*ppSegment
), s
);
466 rtl_arena_segment_put (arena
, &span
);
468 rtl_arena_segment_put (arena
, ppSegment
);
471 return false; // failure
475 mark as free and join with adjacent free segment(s)
477 @precond arena->m_lock acquired
478 @precond segment marked 'used'
480 void rtl_arena_segment_coalesce(
481 rtl_arena_type
* arena
,
482 rtl_arena_segment_type
* segment
485 rtl_arena_segment_type
*next
, *prev
;
487 /* mark segment free */
488 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_USED
);
489 segment
->m_type
= RTL_ARENA_SEGMENT_TYPE_FREE
;
491 /* try to merge w/ next segment */
492 next
= segment
->m_snext
;
493 if (next
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
)
495 assert(segment
->m_addr
+ segment
->m_size
== next
->m_addr
);
496 segment
->m_size
+= next
->m_size
;
498 /* remove from freelist */
499 rtl_arena_freelist_remove (arena
, next
);
501 /* remove from segment list */
502 QUEUE_REMOVE_NAMED(next
, s
);
504 /* release segment descriptor */
505 rtl_arena_segment_put (arena
, &next
);
508 /* try to merge w/ prev segment */
509 prev
= segment
->m_sprev
;
510 if (prev
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
)
512 assert(prev
->m_addr
+ prev
->m_size
== segment
->m_addr
);
513 segment
->m_addr
= prev
->m_addr
;
514 segment
->m_size
+= prev
->m_size
;
516 /* remove from freelist */
517 rtl_arena_freelist_remove (arena
, prev
);
519 /* remove from segment list */
520 QUEUE_REMOVE_NAMED(prev
, s
);
522 /* release segment descriptor */
523 rtl_arena_segment_put (arena
, &prev
);
527 void rtl_arena_constructor(void * obj
)
529 rtl_arena_type
* arena
= static_cast<rtl_arena_type
*>(obj
);
530 rtl_arena_segment_type
* head
;
533 memset (arena
, 0, sizeof(rtl_arena_type
));
535 QUEUE_START_NAMED(arena
, arena_
);
537 RTL_MEMORY_LOCK_INIT(&(arena
->m_lock
));
539 head
= &(arena
->m_segment_reserve_span_head
);
540 rtl_arena_segment_constructor (head
);
541 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
543 head
= &(arena
->m_segment_reserve_head
);
544 rtl_arena_segment_constructor (head
);
545 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
547 head
= &(arena
->m_segment_head
);
548 rtl_arena_segment_constructor (head
);
549 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
551 for (i
= 0; i
< RTL_ARENA_FREELIST_SIZE
; i
++)
553 head
= &(arena
->m_freelist_head
[i
]);
554 rtl_arena_segment_constructor (head
);
556 head
->m_size
= ((sal_Size(1)) << i
);
557 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
560 arena
->m_hash_table
= arena
->m_hash_table_0
;
561 arena
->m_hash_size
= RTL_ARENA_HASH_SIZE
;
562 arena
->m_hash_shift
= highbit(arena
->m_hash_size
) - 1;
565 void rtl_arena_destructor(void * obj
)
567 rtl_arena_type
* arena
= static_cast<rtl_arena_type
*>(obj
);
568 rtl_arena_segment_type
* head
;
571 assert(QUEUE_STARTED_NAMED(arena
, arena_
));
573 RTL_MEMORY_LOCK_DESTROY(&(arena
->m_lock
));
575 head
= &(arena
->m_segment_reserve_span_head
);
576 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
577 rtl_arena_segment_destructor (head
);
579 head
= &(arena
->m_segment_reserve_head
);
580 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
581 rtl_arena_segment_destructor (head
);
583 head
= &(arena
->m_segment_head
);
584 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
585 rtl_arena_segment_destructor (head
);
587 for (i
= 0; i
< RTL_ARENA_FREELIST_SIZE
; i
++)
589 head
= &(arena
->m_freelist_head
[i
]);
591 assert(head
->m_size
== ((sal_Size(1)) << i
));
592 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
594 rtl_arena_segment_destructor (head
);
597 assert(arena
->m_hash_table
== arena
->m_hash_table_0
);
598 assert(arena
->m_hash_size
== RTL_ARENA_HASH_SIZE
);
599 assert(arena
->m_hash_shift
== highbit(arena
->m_hash_size
) - 1);
602 rtl_arena_type
* rtl_arena_activate(
603 rtl_arena_type
* arena
,
606 rtl_arena_type
* source_arena
,
607 void * (SAL_CALL
* source_alloc
)(rtl_arena_type
*, sal_Size
*),
608 void (SAL_CALL
* source_free
) (rtl_arena_type
*, void *, sal_Size
)
614 (void) snprintf (arena
->m_name
, sizeof(arena
->m_name
), "%s", name
);
616 if (!RTL_MEMORY_ISP2(quantum
))
618 /* roundup to next power of 2 */
619 quantum
= ((sal_Size(1)) << highbit(quantum
));
622 arena
->m_quantum
= quantum
;
623 arena
->m_quantum_shift
= highbit(arena
->m_quantum
) - 1;
625 arena
->m_source_arena
= source_arena
;
626 arena
->m_source_alloc
= source_alloc
;
627 arena
->m_source_free
= source_free
;
629 /* insert into arena list */
630 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list
.m_lock
));
631 QUEUE_INSERT_TAIL_NAMED(&(g_arena_list
.m_arena_head
), arena
, arena_
);
632 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list
.m_lock
));
637 void rtl_arena_deactivate(rtl_arena_type
* arena
)
639 rtl_arena_segment_type
* head
, * segment
;
641 /* remove from arena list */
642 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list
.m_lock
));
643 QUEUE_REMOVE_NAMED(arena
, arena_
);
644 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list
.m_lock
));
646 /* check for leaked segments */
647 if (arena
->m_stats
.m_alloc
> arena
->m_stats
.m_free
)
651 /* cleanup still used segment(s) */
652 for (i
= 0, n
= arena
->m_hash_size
; i
< n
; i
++)
654 while ((segment
= arena
->m_hash_table
[i
]))
656 /* pop from hash table */
657 arena
->m_hash_table
[i
] = segment
->m_fnext
;
658 segment
->m_fnext
= segment
->m_fprev
= segment
;
660 /* coalesce w/ adjacent free segment(s) */
661 rtl_arena_segment_coalesce (arena
, segment
);
663 /* insert onto freelist */
664 rtl_arena_freelist_insert (arena
, segment
);
669 /* cleanup hash table */
670 if (arena
->m_hash_table
!= arena
->m_hash_table_0
)
675 arena
->m_hash_size
* sizeof(rtl_arena_segment_type
*));
677 arena
->m_hash_table
= arena
->m_hash_table_0
;
678 arena
->m_hash_size
= RTL_ARENA_HASH_SIZE
;
679 arena
->m_hash_shift
= highbit(arena
->m_hash_size
) - 1;
682 /* cleanup segment list */
683 head
= &(arena
->m_segment_head
);
684 for (segment
= head
->m_snext
; segment
!= head
; segment
= head
->m_snext
)
686 if (segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
)
688 /* remove from freelist */
689 rtl_arena_freelist_remove (arena
, segment
);
693 /* can have only free and span segments here */
694 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
);
697 /* remove from segment list */
698 QUEUE_REMOVE_NAMED(segment
, s
);
700 /* release segment descriptor */
701 rtl_arena_segment_put (arena
, &segment
);
704 /* cleanup segment reserve list */
705 head
= &(arena
->m_segment_reserve_head
);
706 for (segment
= head
->m_snext
; segment
!= head
; segment
= head
->m_snext
)
708 /* remove from segment list */
709 QUEUE_REMOVE_NAMED(segment
, s
);
712 /* cleanup segment reserve span(s) */
713 head
= &(arena
->m_segment_reserve_span_head
);
714 for (segment
= head
->m_snext
; segment
!= head
; segment
= head
->m_snext
)
716 /* can have only span segments here */
717 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
);
719 /* remove from segment list */
720 QUEUE_REMOVE_NAMED(segment
, s
);
722 /* return span to g_machdep_arena */
723 rtl_machdep_free (gp_machdep_arena
, reinterpret_cast<void*>(segment
->m_addr
), segment
->m_size
);
729 rtl_arena_type
* SAL_CALL
rtl_arena_create(
733 rtl_arena_type
* source_arena
,
734 void * (SAL_CALL
* source_alloc
)(rtl_arena_type
*, sal_Size
*),
735 void (SAL_CALL
* source_free
) (rtl_arena_type
*, void *, sal_Size
),
736 SAL_UNUSED_PARAMETER
int
737 ) SAL_THROW_EXTERN_C()
739 rtl_arena_type
* result
= nullptr;
740 sal_Size size
= sizeof(rtl_arena_type
);
743 result
= static_cast<rtl_arena_type
*>(rtl_arena_alloc (gp_arena_arena
, &size
));
746 rtl_arena_type
* arena
= result
;
747 rtl_arena_constructor (arena
);
751 assert(gp_default_arena
);
752 source_arena
= gp_default_arena
;
755 result
= rtl_arena_activate (
766 rtl_arena_deactivate (arena
);
767 rtl_arena_destructor (arena
);
768 rtl_arena_free (gp_arena_arena
, arena
, size
);
771 else if (!gp_arena_arena
)
773 ensureArenaSingleton();
783 void SAL_CALL
rtl_arena_destroy(rtl_arena_type
* arena
) SAL_THROW_EXTERN_C()
787 rtl_arena_deactivate (arena
);
788 rtl_arena_destructor (arena
);
789 rtl_arena_free (gp_arena_arena
, arena
, sizeof(rtl_arena_type
));
793 void * SAL_CALL
rtl_arena_alloc(
794 rtl_arena_type
* arena
,
796 ) SAL_THROW_EXTERN_C()
798 void * addr
= nullptr;
804 size
= RTL_MEMORY_ALIGN(*pSize
, arena
->m_quantum
);
807 /* allocate from segment list */
808 rtl_arena_segment_type
*segment
= nullptr;
810 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
811 if (rtl_arena_segment_alloc (arena
, size
, &segment
) ||
812 rtl_arena_segment_create(arena
, size
, &segment
) )
817 /* mark segment used */
818 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
);
819 segment
->m_type
= RTL_ARENA_SEGMENT_TYPE_USED
;
822 assert(segment
->m_size
>= size
);
823 oversize
= segment
->m_size
- size
;
824 if (oversize
>= arena
->m_quantum
)
826 rtl_arena_segment_type
* remainder
= nullptr;
827 rtl_arena_segment_get (arena
, &remainder
);
830 segment
->m_size
= size
;
832 remainder
->m_addr
= segment
->m_addr
+ segment
->m_size
;
833 remainder
->m_size
= oversize
;
834 remainder
->m_type
= RTL_ARENA_SEGMENT_TYPE_FREE
;
835 QUEUE_INSERT_HEAD_NAMED(segment
, remainder
, s
);
837 rtl_arena_freelist_insert (arena
, remainder
);
841 rtl_arena_hash_insert (arena
, segment
);
843 (*pSize
) = segment
->m_size
;
844 addr
= reinterpret_cast<void*>(segment
->m_addr
);
846 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
852 void SAL_CALL
rtl_arena_free (
853 rtl_arena_type
* arena
,
856 ) SAL_THROW_EXTERN_C()
860 size
= RTL_MEMORY_ALIGN(size
, arena
->m_quantum
);
863 /* free to segment list */
864 rtl_arena_segment_type
* segment
;
866 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
868 segment
= rtl_arena_hash_remove (arena
, reinterpret_cast<sal_uIntPtr
>(addr
), size
);
871 rtl_arena_segment_type
*next
, *prev
;
873 /* coalesce w/ adjacent free segment(s) */
874 rtl_arena_segment_coalesce (arena
, segment
);
876 /* determine (new) next and prev segment */
877 next
= segment
->m_snext
;
878 prev
= segment
->m_sprev
;
880 /* entire span free when prev is a span, and next is either a span or a list head */
881 if (prev
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
&&
882 ((next
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
) ||
883 (next
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
)))
886 prev
->m_addr
== segment
->m_addr
887 && prev
->m_size
== segment
->m_size
);
889 if (arena
->m_source_free
)
891 addr
= reinterpret_cast<void*>(prev
->m_addr
);
894 /* remove from segment list */
895 QUEUE_REMOVE_NAMED(segment
, s
);
897 /* release segment descriptor */
898 rtl_arena_segment_put (arena
, &segment
);
900 /* remove from segment list */
901 QUEUE_REMOVE_NAMED(prev
, s
);
903 /* release (span) segment descriptor */
904 rtl_arena_segment_put (arena
, &prev
);
906 /* update stats, return span to source arena */
907 arena
->m_stats
.m_mem_total
-= size
;
908 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
910 (arena
->m_source_free
)(arena
->m_source_arena
, addr
, size
);
915 /* insert onto freelist */
916 rtl_arena_freelist_insert (arena
, segment
);
919 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
924 void rtl_arena_foreach (rtl_arena_type
*arena
, ArenaForeachFn foreachFn
)
927 for (int i
= 0, n
= arena
->m_hash_size
; i
< n
; i
++)
929 for (rtl_arena_segment_type
*segment
= arena
->m_hash_table
[i
];
930 segment
!= nullptr; segment
= segment
->m_fnext
)
932 foreachFn(reinterpret_cast<void *>(segment
->m_addr
),
939 #include <sys/mman.h>
940 #elif defined(_WIN32)
941 #define MAP_FAILED nullptr
942 #endif /* SAL_UNX || _WIN32 */
947 void * rtl_machdep_alloc(
948 rtl_arena_type
* pArena
,
953 sal_Size size
= *pSize
;
955 assert(pArena
== gp_machdep_arena
);
957 #if defined(__sun) && defined(SPARC)
958 /* see @ mmap(2) man pages */
959 size
+= (pArena
->m_quantum
+ pArena
->m_quantum
); /* "red-zone" pages */
960 if (size
> (4 << 20))
961 size
= RTL_MEMORY_P2ROUNDUP(size
, (4 << 20));
962 else if (size
> (512 << 10))
963 size
= RTL_MEMORY_P2ROUNDUP(size
, (512 << 10));
965 size
= RTL_MEMORY_P2ROUNDUP(size
, (64 << 10));
966 size
-= (pArena
->m_quantum
+ pArena
->m_quantum
); /* "red-zone" pages */
968 /* default allocation granularity */
969 if (pArena
->m_quantum
< (64 << 10))
971 size
= RTL_MEMORY_P2ROUNDUP(size
, (64 << 10));
975 size
= RTL_MEMORY_P2ROUNDUP(size
, pArena
->m_quantum
);
980 addr
= mmap (nullptr, static_cast<size_t>(size
), PROT_READ
| PROT_WRITE
, MAP_PRIVATE
| MAP_ANON
, -1, 0);
981 #elif defined(_WIN32)
982 addr
= VirtualAlloc (nullptr, static_cast<SIZE_T
>(size
), MEM_COMMIT
, PAGE_READWRITE
);
983 #endif /* (SAL_UNX || _WIN32) */
985 if (addr
!= MAP_FAILED
)
987 pArena
->m_stats
.m_alloc
+= 1;
988 pArena
->m_stats
.m_mem_total
+= size
;
989 pArena
->m_stats
.m_mem_alloc
+= size
;
997 void rtl_machdep_free(
998 rtl_arena_type
* pArena
,
1003 assert(pArena
== gp_machdep_arena
);
1005 pArena
->m_stats
.m_free
+= 1;
1006 pArena
->m_stats
.m_mem_total
-= nSize
;
1007 pArena
->m_stats
.m_mem_alloc
-= nSize
;
1009 #if defined(SAL_UNX)
1010 (void) munmap(pAddr
, nSize
);
1011 #elif defined(_WIN32)
1012 (void) VirtualFree (pAddr
, SIZE_T(0), MEM_RELEASE
);
1013 #endif /* (SAL_UNX || _WIN32) */
1016 sal_Size
rtl_machdep_pagesize()
1018 #if defined(SAL_UNX)
1019 #if defined(FREEBSD) || defined(NETBSD) || defined(DRAGONFLY)
1020 return (sal_Size
)getpagesize();
1022 return static_cast<sal_Size
>(sysconf(_SC_PAGESIZE
));
1023 #endif /* xBSD || POSIX */
1024 #elif defined(_WIN32)
1026 GetSystemInfo (&info
);
1027 return static_cast<sal_Size
>(info
.dwPageSize
);
1028 #endif /* (SAL_UNX || _WIN32) */
1033 void rtl_arena_init()
1036 /* list of arenas */
1037 RTL_MEMORY_LOCK_INIT(&(g_arena_list
.m_lock
));
1038 rtl_arena_constructor (&(g_arena_list
.m_arena_head
));
1041 /* machdep (pseudo) arena */
1042 static rtl_arena_type g_machdep_arena
;
1044 assert(!gp_machdep_arena
);
1045 rtl_arena_constructor (&g_machdep_arena
);
1047 gp_machdep_arena
= rtl_arena_activate (
1049 "rtl_machdep_arena",
1050 rtl_machdep_pagesize(),
1051 nullptr, nullptr, nullptr /* no source */
1053 assert(gp_machdep_arena
);
1057 static rtl_arena_type g_default_arena
;
1059 assert(!gp_default_arena
);
1060 rtl_arena_constructor (&g_default_arena
);
1062 gp_default_arena
= rtl_arena_activate (
1064 "rtl_default_arena",
1065 rtl_machdep_pagesize(),
1066 gp_machdep_arena
, /* source */
1070 assert(gp_default_arena
);
1073 /* arena internal arena */
1074 static rtl_arena_type g_arena_arena
;
1076 assert(!gp_arena_arena
);
1077 rtl_arena_constructor (&g_arena_arena
);
1079 gp_arena_arena
= rtl_arena_activate(
1081 "rtl_arena_internal_arena",
1083 gp_default_arena
, /* source */
1087 assert(gp_arena_arena
);
1091 void rtl_arena_fini()
1095 rtl_arena_type
* arena
, * head
;
1097 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list
.m_lock
));
1098 head
= &(g_arena_list
.m_arena_head
);
1100 for (arena
= head
->m_arena_next
; arena
!= head
; arena
= arena
->m_arena_next
)
1104 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list
.m_lock
));
1108 /* vim:set shiftwidth=4 softtabstop=4 expandtab: */