1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
3 * This file is part of the LibreOffice project.
5 * This Source Code Form is subject to the terms of the Mozilla Public
6 * License, v. 2.0. If a copy of the MPL was not distributed with this
7 * file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 * This file incorporates work covered by the following license notice:
11 * Licensed to the Apache Software Foundation (ASF) under one or more
12 * contributor license agreements. See the NOTICE file distributed
13 * with this work for additional information regarding copyright
14 * ownership. The ASF licenses this file to you under the Apache
15 * License, Version 2.0 (the "License"); you may not use this file
16 * except in compliance with the License. You may obtain a copy of
17 * the License at http://www.apache.org/licenses/LICENSE-2.0 .
20 #include <sal/config.h>
22 #include "alloc_arena.hxx"
24 #include "alloc_impl.hxx"
25 #include <rtllifecycle.h>
40 struct rtl_arena_list_st
42 rtl_memory_lock_type m_lock
;
43 rtl_arena_type m_arena_head
;
48 static rtl_arena_list_st g_arena_list
;
51 provided for arena_type allocations, and hash_table resizing.
55 static rtl_arena_type
* gp_arena_arena
= nullptr;
58 Low level virtual memory (pseudo) arena
59 (platform dependent implementation)
63 static rtl_arena_type
* gp_machdep_arena
= nullptr;
65 rtl_arena_type
* gp_default_arena
= nullptr;
70 void * rtl_machdep_alloc(
71 rtl_arena_type
* pArena
,
75 void rtl_machdep_free(
76 rtl_arena_type
* pArena
,
81 sal_Size
rtl_machdep_pagesize();
83 void rtl_arena_segment_constructor(void * obj
)
85 rtl_arena_segment_type
* segment
= static_cast<rtl_arena_segment_type
*>(obj
);
87 QUEUE_START_NAMED(segment
, s
);
88 QUEUE_START_NAMED(segment
, f
);
91 void rtl_arena_segment_destructor(void * obj
)
93 rtl_arena_segment_type
* segment
= static_cast< rtl_arena_segment_type
* >(
95 assert(QUEUE_STARTED_NAMED(segment
, s
));
96 assert(QUEUE_STARTED_NAMED(segment
, f
));
97 (void) segment
; // avoid warnings
101 @precond arena->m_lock acquired.
103 bool rtl_arena_segment_populate(rtl_arena_type
* arena
)
105 rtl_arena_segment_type
*span
;
106 sal_Size size
= rtl_machdep_pagesize();
108 span
= static_cast< rtl_arena_segment_type
* >(
109 rtl_machdep_alloc(gp_machdep_arena
, &size
));
112 rtl_arena_segment_type
*first
, *last
, *head
;
113 sal_Size count
= size
/ sizeof(rtl_arena_segment_type
);
115 /* insert onto reserve span list */
116 QUEUE_INSERT_TAIL_NAMED(&(arena
->m_segment_reserve_span_head
), span
, s
);
117 QUEUE_START_NAMED(span
, f
);
118 span
->m_addr
= reinterpret_cast<sal_uIntPtr
>(span
);
120 span
->m_type
= RTL_ARENA_SEGMENT_TYPE_SPAN
;
122 /* insert onto reserve list */
123 head
= &(arena
->m_segment_reserve_head
);
124 for (first
= span
+ 1, last
= span
+ count
; first
< last
; ++first
)
126 QUEUE_INSERT_TAIL_NAMED(head
, first
, s
);
127 QUEUE_START_NAMED(first
, f
);
133 return (span
!= nullptr);
137 @precond arena->m_lock acquired.
138 @precond (*ppSegment == 0)
140 void rtl_arena_segment_get(
141 rtl_arena_type
* arena
,
142 rtl_arena_segment_type
** ppSegment
145 rtl_arena_segment_type
* head
;
149 head
= &(arena
->m_segment_reserve_head
);
150 if (head
->m_snext
!= head
|| rtl_arena_segment_populate (arena
))
152 (*ppSegment
) = head
->m_snext
;
153 QUEUE_REMOVE_NAMED(*ppSegment
, s
);
158 @precond arena->m_lock acquired.
159 @postcond (*ppSegment == 0)
161 void rtl_arena_segment_put(
162 rtl_arena_type
* arena
,
163 rtl_arena_segment_type
** ppSegment
166 rtl_arena_segment_type
* head
;
168 assert(QUEUE_STARTED_NAMED(*ppSegment
, s
));
169 assert(QUEUE_STARTED_NAMED(*ppSegment
, f
));
171 (*ppSegment
)->m_addr
= 0;
172 (*ppSegment
)->m_size
= 0;
174 assert((*ppSegment
)->m_type
!= RTL_ARENA_SEGMENT_TYPE_HEAD
);
175 (*ppSegment
)->m_type
= 0;
177 /* keep as reserve */
178 head
= &(arena
->m_segment_reserve_head
);
179 QUEUE_INSERT_HEAD_NAMED(head
, (*ppSegment
), s
);
182 (*ppSegment
) = nullptr;
186 @precond arena->m_lock acquired.
188 void rtl_arena_freelist_insert (
189 rtl_arena_type
* arena
,
190 rtl_arena_segment_type
* segment
193 rtl_arena_segment_type
* head
;
194 const auto bit
= highbit(segment
->m_size
);
196 head
= &(arena
->m_freelist_head
[bit
- 1]);
197 QUEUE_INSERT_TAIL_NAMED(head
, segment
, f
);
199 arena
->m_freelist_bitmap
|= head
->m_size
;
203 @precond arena->m_lock acquired.
205 void rtl_arena_freelist_remove(
206 rtl_arena_type
* arena
,
207 rtl_arena_segment_type
* segment
210 if (segment
->m_fnext
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
&&
211 segment
->m_fprev
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
)
213 rtl_arena_segment_type
* head
;
215 head
= segment
->m_fprev
;
216 assert(arena
->m_freelist_bitmap
& head
->m_size
);
217 arena
->m_freelist_bitmap
^= head
->m_size
;
219 QUEUE_REMOVE_NAMED(segment
, f
);
222 #define RTL_ARENA_HASH_INDEX_IMPL(a, s, q, m) \
223 ((((a) + ((a) >> (s)) + ((a) >> ((s) << 1))) >> (q)) & (m))
225 #define RTL_ARENA_HASH_INDEX(arena, addr) \
226 RTL_ARENA_HASH_INDEX_IMPL((addr), (arena)->m_hash_shift, (arena)->m_quantum_shift, ((arena)->m_hash_size - 1))
229 @precond arena->m_lock released.
231 void rtl_arena_hash_rescale(
232 rtl_arena_type
* arena
,
236 assert(new_size
!= 0);
238 rtl_arena_segment_type
** new_table
;
241 new_bytes
= new_size
* sizeof(rtl_arena_segment_type
*);
242 new_table
= static_cast<rtl_arena_segment_type
**>(rtl_arena_alloc (gp_arena_arena
, &new_bytes
));
246 rtl_arena_segment_type
** old_table
;
247 sal_Size old_size
, i
;
249 memset (new_table
, 0, new_bytes
);
251 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
253 old_table
= arena
->m_hash_table
;
254 old_size
= arena
->m_hash_size
;
256 arena
->m_hash_table
= new_table
;
257 arena
->m_hash_size
= new_size
;
258 arena
->m_hash_shift
= highbit(arena
->m_hash_size
) - 1;
260 for (i
= 0; i
< old_size
; i
++)
262 rtl_arena_segment_type
* curr
= old_table
[i
];
265 rtl_arena_segment_type
* next
= curr
->m_fnext
;
266 rtl_arena_segment_type
** head
;
268 // coverity[negative_shift] - bogus
269 head
= &(arena
->m_hash_table
[RTL_ARENA_HASH_INDEX(arena
, curr
->m_addr
)]);
270 curr
->m_fnext
= (*head
);
275 old_table
[i
] = nullptr;
278 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
280 if (old_table
!= arena
->m_hash_table_0
)
282 sal_Size old_bytes
= old_size
* sizeof(rtl_arena_segment_type
*);
283 rtl_arena_free (gp_arena_arena
, old_table
, old_bytes
);
289 Insert arena hash, and update stats.
291 void rtl_arena_hash_insert(
292 rtl_arena_type
* arena
,
293 rtl_arena_segment_type
* segment
296 rtl_arena_segment_type
** ppSegment
;
298 ppSegment
= &(arena
->m_hash_table
[RTL_ARENA_HASH_INDEX(arena
, segment
->m_addr
)]);
300 segment
->m_fnext
= (*ppSegment
);
301 (*ppSegment
) = segment
;
303 arena
->m_stats
.m_alloc
+= 1;
304 arena
->m_stats
.m_mem_alloc
+= segment
->m_size
;
308 Remove arena hash, and update stats.
310 rtl_arena_segment_type
* rtl_arena_hash_remove(
311 rtl_arena_type
* arena
,
316 rtl_arena_segment_type
*segment
, **segpp
;
317 sal_Size lookups
= 0;
319 segpp
= &(arena
->m_hash_table
[RTL_ARENA_HASH_INDEX(arena
, addr
)]);
320 while ((segment
= *segpp
))
322 if (segment
->m_addr
== addr
)
324 *segpp
= segment
->m_fnext
;
325 segment
->m_fnext
= segment
->m_fprev
= segment
;
329 /* update lookup miss stats */
331 segpp
= &(segment
->m_fnext
);
334 assert(segment
); // bad free
337 assert(segment
->m_size
== size
);
338 (void) size
; // avoid warnings
340 arena
->m_stats
.m_free
+= 1;
341 arena
->m_stats
.m_mem_alloc
-= segment
->m_size
;
345 sal_Size nseg
= static_cast<sal_Size
>(arena
->m_stats
.m_alloc
- arena
->m_stats
.m_free
);
346 if (nseg
> 4 * arena
->m_hash_size
)
348 if (!(arena
->m_flags
& RTL_ARENA_FLAG_RESCALE
))
350 sal_Size ave
= nseg
>> arena
->m_hash_shift
;
352 sal_Size new_size
= arena
->m_hash_size
<< (highbit(ave
) - 1);
354 arena
->m_flags
|= RTL_ARENA_FLAG_RESCALE
;
355 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
356 rtl_arena_hash_rescale (arena
, new_size
);
357 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
358 arena
->m_flags
&= ~RTL_ARENA_FLAG_RESCALE
;
368 allocate (and remove) segment from freelist
370 @precond arena->m_lock acquired
371 @precond (*ppSegment == 0)
373 bool rtl_arena_segment_alloc(
374 rtl_arena_type
* arena
,
376 rtl_arena_segment_type
** ppSegment
382 if (!RTL_MEMORY_ISP2(size
))
384 unsigned int msb
= highbit(size
);
385 if (RTL_ARENA_FREELIST_SIZE
== msb
)
387 /* highest possible freelist: fall back to first fit */
388 rtl_arena_segment_type
*head
, *segment
;
390 head
= &(arena
->m_freelist_head
[msb
- 1]);
391 for (segment
= head
->m_fnext
; segment
!= head
; segment
= segment
->m_fnext
)
393 if (segment
->m_size
>= size
)
395 /* allocate first fit segment */
396 (*ppSegment
) = segment
;
400 goto dequeue_and_leave
;
403 /* roundup to next power of 2 */
404 size
= ((sal_Size(1)) << msb
);
407 index
= lowbit(RTL_MEMORY_P2ALIGN(arena
->m_freelist_bitmap
, size
));
410 /* instant fit: allocate first free segment */
411 rtl_arena_segment_type
*head
;
413 head
= &(arena
->m_freelist_head
[index
- 1]);
414 (*ppSegment
) = head
->m_fnext
;
415 assert((*ppSegment
) != head
);
421 /* remove from freelist */
422 rtl_arena_freelist_remove (arena
, (*ppSegment
));
424 return (*ppSegment
!= nullptr);
428 import new (span) segment from source arena
430 @precond arena->m_lock acquired
431 @precond (*ppSegment == 0)
433 bool rtl_arena_segment_create(
434 rtl_arena_type
* arena
,
436 rtl_arena_segment_type
** ppSegment
440 if (arena
->m_source_alloc
)
442 rtl_arena_segment_get (arena
, ppSegment
);
445 rtl_arena_segment_type
* span
= nullptr;
446 rtl_arena_segment_get (arena
, &span
);
449 /* import new span from source arena */
450 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
453 span
->m_addr
= reinterpret_cast<sal_uIntPtr
>(
454 (arena
->m_source_alloc
)(
455 arena
->m_source_arena
, &(span
->m_size
)));
457 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
458 if (span
->m_addr
!= 0)
460 /* insert onto segment list, update stats */
461 span
->m_type
= RTL_ARENA_SEGMENT_TYPE_SPAN
;
462 QUEUE_INSERT_HEAD_NAMED(&(arena
->m_segment_head
), span
, s
);
463 arena
->m_stats
.m_mem_total
+= span
->m_size
;
465 (*ppSegment
)->m_addr
= span
->m_addr
;
466 (*ppSegment
)->m_size
= span
->m_size
;
467 (*ppSegment
)->m_type
= RTL_ARENA_SEGMENT_TYPE_FREE
;
468 QUEUE_INSERT_HEAD_NAMED(span
, (*ppSegment
), s
);
473 rtl_arena_segment_put (arena
, &span
);
475 rtl_arena_segment_put (arena
, ppSegment
);
478 return false; // failure
482 mark as free and join with adjacent free segment(s)
484 @precond arena->m_lock acquired
485 @precond segment marked 'used'
487 void rtl_arena_segment_coalesce(
488 rtl_arena_type
* arena
,
489 rtl_arena_segment_type
* segment
492 rtl_arena_segment_type
*next
, *prev
;
494 /* mark segment free */
495 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_USED
);
496 segment
->m_type
= RTL_ARENA_SEGMENT_TYPE_FREE
;
498 /* try to merge w/ next segment */
499 next
= segment
->m_snext
;
500 if (next
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
)
502 assert(segment
->m_addr
+ segment
->m_size
== next
->m_addr
);
503 segment
->m_size
+= next
->m_size
;
505 /* remove from freelist */
506 rtl_arena_freelist_remove (arena
, next
);
508 /* remove from segment list */
509 QUEUE_REMOVE_NAMED(next
, s
);
511 /* release segment descriptor */
512 rtl_arena_segment_put (arena
, &next
);
515 /* try to merge w/ prev segment */
516 prev
= segment
->m_sprev
;
517 if (prev
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
)
519 assert(prev
->m_addr
+ prev
->m_size
== segment
->m_addr
);
520 segment
->m_addr
= prev
->m_addr
;
521 segment
->m_size
+= prev
->m_size
;
523 /* remove from freelist */
524 rtl_arena_freelist_remove (arena
, prev
);
526 /* remove from segment list */
527 QUEUE_REMOVE_NAMED(prev
, s
);
529 /* release segment descriptor */
530 rtl_arena_segment_put (arena
, &prev
);
534 void rtl_arena_constructor(void * obj
)
536 rtl_arena_type
* arena
= static_cast<rtl_arena_type
*>(obj
);
537 rtl_arena_segment_type
* head
;
540 memset (arena
, 0, sizeof(rtl_arena_type
));
542 QUEUE_START_NAMED(arena
, arena_
);
544 RTL_MEMORY_LOCK_INIT(&(arena
->m_lock
));
546 head
= &(arena
->m_segment_reserve_span_head
);
547 rtl_arena_segment_constructor (head
);
548 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
550 head
= &(arena
->m_segment_reserve_head
);
551 rtl_arena_segment_constructor (head
);
552 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
554 head
= &(arena
->m_segment_head
);
555 rtl_arena_segment_constructor (head
);
556 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
558 for (i
= 0; i
< RTL_ARENA_FREELIST_SIZE
; i
++)
560 head
= &(arena
->m_freelist_head
[i
]);
561 rtl_arena_segment_constructor (head
);
563 head
->m_size
= ((sal_Size(1)) << i
);
564 head
->m_type
= RTL_ARENA_SEGMENT_TYPE_HEAD
;
567 arena
->m_hash_table
= arena
->m_hash_table_0
;
568 arena
->m_hash_size
= RTL_ARENA_HASH_SIZE
;
569 arena
->m_hash_shift
= highbit(arena
->m_hash_size
) - 1;
572 void rtl_arena_destructor(void * obj
)
574 rtl_arena_type
* arena
= static_cast<rtl_arena_type
*>(obj
);
575 rtl_arena_segment_type
* head
;
578 assert(QUEUE_STARTED_NAMED(arena
, arena_
));
580 RTL_MEMORY_LOCK_DESTROY(&(arena
->m_lock
));
582 head
= &(arena
->m_segment_reserve_span_head
);
583 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
584 rtl_arena_segment_destructor (head
);
586 head
= &(arena
->m_segment_reserve_head
);
587 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
588 rtl_arena_segment_destructor (head
);
590 head
= &(arena
->m_segment_head
);
591 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
592 rtl_arena_segment_destructor (head
);
594 for (i
= 0; i
< RTL_ARENA_FREELIST_SIZE
; i
++)
596 head
= &(arena
->m_freelist_head
[i
]);
598 assert(head
->m_size
== ((sal_Size(1)) << i
));
599 assert(head
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
);
601 rtl_arena_segment_destructor (head
);
604 assert(arena
->m_hash_table
== arena
->m_hash_table_0
);
605 assert(arena
->m_hash_size
== RTL_ARENA_HASH_SIZE
);
606 assert(arena
->m_hash_shift
== highbit(arena
->m_hash_size
) - 1);
609 rtl_arena_type
* rtl_arena_activate(
610 rtl_arena_type
* arena
,
613 rtl_arena_type
* source_arena
,
614 void * (SAL_CALL
* source_alloc
)(rtl_arena_type
*, sal_Size
*),
615 void (SAL_CALL
* source_free
) (rtl_arena_type
*, void *, sal_Size
)
621 (void) snprintf (arena
->m_name
, sizeof(arena
->m_name
), "%s", name
);
623 if (!RTL_MEMORY_ISP2(quantum
))
625 /* roundup to next power of 2 */
626 quantum
= ((sal_Size(1)) << highbit(quantum
));
629 arena
->m_quantum
= quantum
;
630 arena
->m_quantum_shift
= highbit(arena
->m_quantum
) - 1;
632 arena
->m_source_arena
= source_arena
;
633 arena
->m_source_alloc
= source_alloc
;
634 arena
->m_source_free
= source_free
;
636 /* insert into arena list */
637 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list
.m_lock
));
638 QUEUE_INSERT_TAIL_NAMED(&(g_arena_list
.m_arena_head
), arena
, arena_
);
639 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list
.m_lock
));
644 void rtl_arena_deactivate(rtl_arena_type
* arena
)
646 rtl_arena_segment_type
* head
, * segment
;
648 /* remove from arena list */
649 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list
.m_lock
));
650 QUEUE_REMOVE_NAMED(arena
, arena_
);
651 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list
.m_lock
));
653 /* check for leaked segments */
654 if (arena
->m_stats
.m_alloc
> arena
->m_stats
.m_free
)
658 /* cleanup still used segment(s) */
659 for (i
= 0, n
= arena
->m_hash_size
; i
< n
; i
++)
661 while ((segment
= arena
->m_hash_table
[i
]))
663 /* pop from hash table */
664 arena
->m_hash_table
[i
] = segment
->m_fnext
;
665 segment
->m_fnext
= segment
->m_fprev
= segment
;
667 /* coalesce w/ adjacent free segment(s) */
668 rtl_arena_segment_coalesce (arena
, segment
);
670 /* insert onto freelist */
671 rtl_arena_freelist_insert (arena
, segment
);
676 /* cleanup hash table */
677 if (arena
->m_hash_table
!= arena
->m_hash_table_0
)
682 arena
->m_hash_size
* sizeof(rtl_arena_segment_type
*));
684 arena
->m_hash_table
= arena
->m_hash_table_0
;
685 arena
->m_hash_size
= RTL_ARENA_HASH_SIZE
;
686 arena
->m_hash_shift
= highbit(arena
->m_hash_size
) - 1;
689 /* cleanup segment list */
690 head
= &(arena
->m_segment_head
);
691 for (segment
= head
->m_snext
; segment
!= head
; segment
= head
->m_snext
)
693 if (segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
)
695 /* remove from freelist */
696 rtl_arena_freelist_remove (arena
, segment
);
700 /* can have only free and span segments here */
701 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
);
704 /* remove from segment list */
705 QUEUE_REMOVE_NAMED(segment
, s
);
707 /* release segment descriptor */
708 rtl_arena_segment_put (arena
, &segment
);
711 /* cleanup segment reserve list */
712 head
= &(arena
->m_segment_reserve_head
);
713 for (segment
= head
->m_snext
; segment
!= head
; segment
= head
->m_snext
)
715 /* remove from segment list */
716 QUEUE_REMOVE_NAMED(segment
, s
);
719 /* cleanup segment reserve span(s) */
720 head
= &(arena
->m_segment_reserve_span_head
);
721 for (segment
= head
->m_snext
; segment
!= head
; segment
= head
->m_snext
)
723 /* can have only span segments here */
724 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
);
726 /* remove from segment list */
727 QUEUE_REMOVE_NAMED(segment
, s
);
729 /* return span to g_machdep_arena */
730 rtl_machdep_free (gp_machdep_arena
, reinterpret_cast<void*>(segment
->m_addr
), segment
->m_size
);
736 rtl_arena_type
* SAL_CALL
rtl_arena_create(
740 rtl_arena_type
* source_arena
,
741 void * (SAL_CALL
* source_alloc
)(rtl_arena_type
*, sal_Size
*),
742 void (SAL_CALL
* source_free
) (rtl_arena_type
*, void *, sal_Size
),
743 SAL_UNUSED_PARAMETER
int
746 rtl_arena_type
* result
= nullptr;
747 sal_Size size
= sizeof(rtl_arena_type
);
750 result
= static_cast<rtl_arena_type
*>(rtl_arena_alloc (gp_arena_arena
, &size
));
753 rtl_arena_type
* arena
= result
;
754 rtl_arena_constructor (arena
);
758 assert(gp_default_arena
);
759 source_arena
= gp_default_arena
;
762 result
= rtl_arena_activate (
773 rtl_arena_deactivate (arena
);
774 rtl_arena_destructor (arena
);
775 rtl_arena_free (gp_arena_arena
, arena
, size
);
778 else if (!gp_arena_arena
)
780 ensureArenaSingleton();
790 void SAL_CALL
rtl_arena_destroy(rtl_arena_type
* arena
) noexcept
794 rtl_arena_deactivate (arena
);
795 rtl_arena_destructor (arena
);
796 rtl_arena_free (gp_arena_arena
, arena
, sizeof(rtl_arena_type
));
800 void * SAL_CALL
rtl_arena_alloc(
801 rtl_arena_type
* arena
,
805 void * addr
= nullptr;
811 size
= RTL_MEMORY_ALIGN(*pSize
, arena
->m_quantum
);
814 /* allocate from segment list */
815 rtl_arena_segment_type
*segment
= nullptr;
817 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
818 if (rtl_arena_segment_alloc (arena
, size
, &segment
) ||
819 rtl_arena_segment_create(arena
, size
, &segment
) )
824 /* mark segment used */
825 assert(segment
->m_type
== RTL_ARENA_SEGMENT_TYPE_FREE
);
826 segment
->m_type
= RTL_ARENA_SEGMENT_TYPE_USED
;
829 assert(segment
->m_size
>= size
);
830 oversize
= segment
->m_size
- size
;
831 if (oversize
>= arena
->m_quantum
)
833 rtl_arena_segment_type
* remainder
= nullptr;
834 rtl_arena_segment_get (arena
, &remainder
);
837 segment
->m_size
= size
;
839 remainder
->m_addr
= segment
->m_addr
+ segment
->m_size
;
840 remainder
->m_size
= oversize
;
841 remainder
->m_type
= RTL_ARENA_SEGMENT_TYPE_FREE
;
842 QUEUE_INSERT_HEAD_NAMED(segment
, remainder
, s
);
844 rtl_arena_freelist_insert (arena
, remainder
);
848 rtl_arena_hash_insert (arena
, segment
);
850 (*pSize
) = segment
->m_size
;
851 addr
= reinterpret_cast<void*>(segment
->m_addr
);
853 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
859 void SAL_CALL
rtl_arena_free (
860 rtl_arena_type
* arena
,
867 size
= RTL_MEMORY_ALIGN(size
, arena
->m_quantum
);
870 /* free to segment list */
871 rtl_arena_segment_type
* segment
;
873 RTL_MEMORY_LOCK_ACQUIRE(&(arena
->m_lock
));
875 segment
= rtl_arena_hash_remove (arena
, reinterpret_cast<sal_uIntPtr
>(addr
), size
);
878 rtl_arena_segment_type
*next
, *prev
;
880 /* coalesce w/ adjacent free segment(s) */
881 rtl_arena_segment_coalesce (arena
, segment
);
883 /* determine (new) next and prev segment */
884 next
= segment
->m_snext
;
885 prev
= segment
->m_sprev
;
887 /* entire span free when prev is a span, and next is either a span or a list head */
888 if (prev
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
&&
889 ((next
->m_type
== RTL_ARENA_SEGMENT_TYPE_SPAN
) ||
890 (next
->m_type
== RTL_ARENA_SEGMENT_TYPE_HEAD
)))
893 prev
->m_addr
== segment
->m_addr
894 && prev
->m_size
== segment
->m_size
);
896 if (arena
->m_source_free
)
898 addr
= reinterpret_cast<void*>(prev
->m_addr
);
901 /* remove from segment list */
902 QUEUE_REMOVE_NAMED(segment
, s
);
904 /* release segment descriptor */
905 rtl_arena_segment_put (arena
, &segment
);
907 /* remove from segment list */
908 QUEUE_REMOVE_NAMED(prev
, s
);
910 /* release (span) segment descriptor */
911 rtl_arena_segment_put (arena
, &prev
);
913 /* update stats, return span to source arena */
914 arena
->m_stats
.m_mem_total
-= size
;
915 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
917 (arena
->m_source_free
)(arena
->m_source_arena
, addr
, size
);
922 /* insert onto freelist */
923 rtl_arena_freelist_insert (arena
, segment
);
926 RTL_MEMORY_LOCK_RELEASE(&(arena
->m_lock
));
931 void rtl_arena_foreach (rtl_arena_type
*arena
, ArenaForeachFn foreachFn
)
934 for (int i
= 0, n
= arena
->m_hash_size
; i
< n
; i
++)
936 for (rtl_arena_segment_type
*segment
= arena
->m_hash_table
[i
];
937 segment
!= nullptr; segment
= segment
->m_fnext
)
939 foreachFn(reinterpret_cast<void *>(segment
->m_addr
),
946 #include <sys/mman.h>
947 #elif defined(_WIN32)
948 #define MAP_FAILED nullptr
949 #endif /* SAL_UNX || _WIN32 */
954 void * rtl_machdep_alloc(
955 rtl_arena_type
* pArena
,
960 sal_Size size
= *pSize
;
962 assert(pArena
== gp_machdep_arena
);
964 #if defined(__sun) && defined(SPARC)
965 /* see @ mmap(2) man pages */
966 size
+= (pArena
->m_quantum
+ pArena
->m_quantum
); /* "red-zone" pages */
967 if (size
> (4 << 20))
968 size
= RTL_MEMORY_P2ROUNDUP(size
, (4 << 20));
969 else if (size
> (512 << 10))
970 size
= RTL_MEMORY_P2ROUNDUP(size
, (512 << 10));
972 size
= RTL_MEMORY_P2ROUNDUP(size
, (64 << 10));
973 size
-= (pArena
->m_quantum
+ pArena
->m_quantum
); /* "red-zone" pages */
975 /* default allocation granularity */
976 if (pArena
->m_quantum
< (64 << 10))
978 size
= RTL_MEMORY_P2ROUNDUP(size
, (64 << 10));
982 size
= RTL_MEMORY_P2ROUNDUP(size
, pArena
->m_quantum
);
987 addr
= mmap (nullptr, static_cast<size_t>(size
), PROT_READ
| PROT_WRITE
, MAP_PRIVATE
| MAP_ANON
, -1, 0);
988 #elif defined(_WIN32)
989 addr
= VirtualAlloc (nullptr, static_cast<SIZE_T
>(size
), MEM_COMMIT
, PAGE_READWRITE
);
990 #endif /* (SAL_UNX || _WIN32) */
992 if (addr
!= MAP_FAILED
)
994 pArena
->m_stats
.m_alloc
+= 1;
995 pArena
->m_stats
.m_mem_total
+= size
;
996 pArena
->m_stats
.m_mem_alloc
+= size
;
1004 void rtl_machdep_free(
1005 rtl_arena_type
* pArena
,
1010 assert(pArena
== gp_machdep_arena
);
1012 pArena
->m_stats
.m_free
+= 1;
1013 pArena
->m_stats
.m_mem_total
-= nSize
;
1014 pArena
->m_stats
.m_mem_alloc
-= nSize
;
1016 #if defined(SAL_UNX)
1017 (void) munmap(pAddr
, nSize
);
1018 #elif defined(_WIN32)
1019 (void) VirtualFree (pAddr
, SIZE_T(0), MEM_RELEASE
);
1020 #endif /* (SAL_UNX || _WIN32) */
1023 sal_Size
rtl_machdep_pagesize()
1025 #if defined(SAL_UNX)
1026 #if defined(FREEBSD) || defined(NETBSD) || defined(DRAGONFLY)
1027 return (sal_Size
)getpagesize();
1029 return static_cast<sal_Size
>(sysconf(_SC_PAGESIZE
));
1030 #endif /* xBSD || POSIX */
1031 #elif defined(_WIN32)
1033 GetSystemInfo (&info
);
1034 return static_cast<sal_Size
>(info
.dwPageSize
);
1035 #endif /* (SAL_UNX || _WIN32) */
1040 void rtl_arena_init()
1043 /* list of arenas */
1044 RTL_MEMORY_LOCK_INIT(&(g_arena_list
.m_lock
));
1045 rtl_arena_constructor (&(g_arena_list
.m_arena_head
));
1048 /* machdep (pseudo) arena */
1049 static rtl_arena_type g_machdep_arena
;
1051 assert(!gp_machdep_arena
);
1052 rtl_arena_constructor (&g_machdep_arena
);
1054 gp_machdep_arena
= rtl_arena_activate (
1056 "rtl_machdep_arena",
1057 rtl_machdep_pagesize(),
1058 nullptr, nullptr, nullptr /* no source */
1060 assert(gp_machdep_arena
);
1064 static rtl_arena_type g_default_arena
;
1066 assert(!gp_default_arena
);
1067 rtl_arena_constructor (&g_default_arena
);
1069 gp_default_arena
= rtl_arena_activate (
1071 "rtl_default_arena",
1072 rtl_machdep_pagesize(),
1073 gp_machdep_arena
, /* source */
1077 assert(gp_default_arena
);
1080 /* arena internal arena */
1081 static rtl_arena_type g_arena_arena
;
1083 assert(!gp_arena_arena
);
1084 rtl_arena_constructor (&g_arena_arena
);
1086 gp_arena_arena
= rtl_arena_activate(
1088 "rtl_arena_internal_arena",
1090 gp_default_arena
, /* source */
1094 assert(gp_arena_arena
);
1098 void rtl_arena_fini()
1102 rtl_arena_type
* arena
, * head
;
1104 RTL_MEMORY_LOCK_ACQUIRE(&(g_arena_list
.m_lock
));
1105 head
= &(g_arena_list
.m_arena_head
);
1107 for (arena
= head
->m_arena_next
; arena
!= head
; arena
= arena
->m_arena_next
)
1111 RTL_MEMORY_LOCK_RELEASE(&(g_arena_list
.m_lock
));
1115 /* vim:set shiftwidth=4 softtabstop=4 expandtab: */