1 /***********************************************************************/
5 /* Damien Doligez, projet Para, INRIA Rocquencourt */
7 /* Copyright 1996 Institut National de Recherche en Informatique et */
8 /* en Automatique. All rights reserved. This file is distributed */
9 /* under the terms of the GNU Library General Public License, with */
10 /* the special exception on linking described in file ../LICENSE. */
12 /***********************************************************************/
29 extern uintnat caml_percent_free
; /* major_gc.c */
30 extern void caml_shrink_heap (char *); /* memory.c */
32 /* Encoded headers: the color is stored in the 2 least significant bits.
33 (For pointer inversion, we need to distinguish headers from pointers.)
34 s is a Wosize, t is a tag, and c is a color (a two-bit number)
36 For the purpose of compaction, "colors" are:
37 0: pointers (direct or inverted)
38 1: integer or (unencoded) infix header
39 2: inverted pointer for infix header
40 3: integer or encoded (noninfix) header
43 XXX The above assumes that all roots are aligned on a 4-byte boundary,
44 XXX which is not always guaranteed by C.
45 XXX (see [caml_register_global_roots] and [caml_init_exceptions])
46 XXX Should be able to fix it to only assume 2-byte alignment.
48 #define Make_ehd(s,t,c) (((s) << 10) | (t) << 2 | (c))
49 #define Whsize_ehd(h) Whsize_hd (h)
50 #define Wosize_ehd(h) Wosize_hd (h)
51 #define Tag_ehd(h) (((h) >> 2) & 0xFF)
52 #define Ecolor(w) ((w) & 3)
56 static void invert_pointer_at (word
*p
)
59 Assert (Ecolor ((intnat
) p
) == 0);
61 /* Use Ecolor (q) == 0 instead of Is_block (q) because q could be an
62 inverted pointer for an infix header (with Ecolor == 2). */
63 if (Ecolor (q
) == 0 && Is_in_heap (q
)){
64 switch (Ecolor (Hd_val (q
))){
66 case 3: /* Pointer or header: insert in inverted list. */
68 Hd_val (q
) = (header_t
) p
;
70 case 1: /* Infix header: make inverted infix list. */
71 /* Double inversion: the last of the inverted infix list points to
72 the next infix header in this block. The last of the last list
73 contains the original block header. */
75 /* This block as a value. */
76 value val
= (value
) q
- Infix_offset_val (q
);
77 /* Get the block header. */
78 word
*hp
= (word
*) Hp_val (val
);
80 while (Ecolor (*hp
) == 0) hp
= (word
*) *hp
;
81 Assert (Ecolor (*hp
) == 3);
82 if (Tag_ehd (*hp
) == Closure_tag
){
83 /* This is the first infix found in this block. */
84 /* Save original header. */
86 /* Link inverted infix list. */
87 Hd_val (q
) = (header_t
) ((word
) p
| 2);
88 /* Change block header's tag to Infix_tag, and change its size
89 to point to the infix list. */
90 *hp
= Make_ehd (Wosize_bhsize (q
- val
), Infix_tag
, 3);
91 }else{ Assert (Tag_ehd (*hp
) == Infix_tag
);
92 /* Point the last of this infix list to the current first infix
94 *p
= (word
) &Field (val
, Wosize_ehd (*hp
)) | 1;
95 /* Point the head of this infix list to the above. */
96 Hd_val (q
) = (header_t
) ((word
) p
| 2);
97 /* Change block header's size to point to this infix list. */
98 *hp
= Make_ehd (Wosize_bhsize (q
- val
), Infix_tag
, 3);
102 case 2: /* Inverted infix list: insert. */
104 Hd_val (q
) = (header_t
) ((word
) p
| 2);
110 static void invert_root (value v
, value
*p
)
112 invert_pointer_at ((word
*) p
);
115 static char *compact_fl
;
117 static void init_compact_allocate (void)
119 char *ch
= caml_heap_start
;
121 Chunk_alloc (ch
) = 0;
122 ch
= Chunk_next (ch
);
124 compact_fl
= caml_heap_start
;
127 static char *compact_allocate (mlsize_t size
)
128 /* in bytes, including header */
132 while (Chunk_size (compact_fl
) - Chunk_alloc (compact_fl
) <= Bhsize_wosize (3)
133 && Chunk_size (Chunk_next (compact_fl
))
134 - Chunk_alloc (Chunk_next (compact_fl
))
135 <= Bhsize_wosize (3)){
136 compact_fl
= Chunk_next (compact_fl
);
139 while (Chunk_size (chunk
) - Chunk_alloc (chunk
) < size
){
140 chunk
= Chunk_next (chunk
); Assert (chunk
!= NULL
);
142 adr
= chunk
+ Chunk_alloc (chunk
);
143 Chunk_alloc (chunk
) += size
;
147 void caml_compact_heap (void)
150 Assert (caml_gc_phase
== Phase_idle
);
151 caml_gc_message (0x10, "Compacting heap...\n", 0);
157 /* First pass: encode all noninfix headers. */
159 ch
= caml_heap_start
;
161 header_t
*p
= (header_t
*) ch
;
163 chend
= ch
+ Chunk_size (ch
);
164 while ((char *) p
< chend
){
165 header_t hd
= Hd_hp (p
);
166 mlsize_t sz
= Wosize_hd (hd
);
168 if (Is_blue_hd (hd
)){
169 /* Free object. Give it a string tag. */
170 Hd_hp (p
) = Make_ehd (sz
, String_tag
, 3);
171 }else{ Assert (Is_white_hd (hd
));
172 /* Live object. Keep its tag. */
173 Hd_hp (p
) = Make_ehd (sz
, Tag_hd (hd
), 3);
175 p
+= Whsize_wosize (sz
);
177 ch
= Chunk_next (ch
);
182 /* Second pass: invert pointers.
183 Link infix headers in each block in an inverted list of inverted lists.
184 Don't forget roots and weak pointers. */
186 /* Invert roots first because the threads library needs some heap
187 data structures to find its roots. Fortunately, it doesn't need
188 the headers (see above). */
189 caml_do_roots (invert_root
);
190 caml_final_do_weak_roots (invert_root
);
192 ch
= caml_heap_start
;
194 word
*p
= (word
*) ch
;
195 chend
= ch
+ Chunk_size (ch
);
197 while ((char *) p
< chend
){
203 while (Ecolor (q
) == 0) q
= * (word
*) q
;
208 /* Get the original header of this block. */
211 while (Ecolor (q
) != 3) q
= * (word
*) (q
& ~(uintnat
)3);
216 if (t
< No_scan_tag
){
217 for (i
= 1; i
< sz
; i
++) invert_pointer_at (&(p
[i
]));
221 ch
= Chunk_next (ch
);
223 /* Invert weak pointers. */
225 value
*pp
= &caml_weak_list_head
;
232 if (p
== (value
) NULL
) break;
234 while (Ecolor (q
) == 0) q
= * (word
*) q
;
236 for (i
= 1; i
< sz
; i
++){
237 if (Field (p
,i
) != caml_weak_none
){
238 invert_pointer_at ((word
*) &(Field (p
,i
)));
241 invert_pointer_at ((word
*) pp
);
248 /* Third pass: reallocate virtually; revert pointers; decode headers.
249 Rebuild infix headers. */
251 init_compact_allocate ();
252 ch
= caml_heap_start
;
254 word
*p
= (word
*) ch
;
256 chend
= ch
+ Chunk_size (ch
);
257 while ((char *) p
< chend
){
260 if (Ecolor (q
) == 0 || Tag_ehd (q
) == Infix_tag
){
261 /* There were (normal or infix) pointers to this block. */
265 word
*infixes
= NULL
;
267 while (Ecolor (q
) == 0) q
= * (word
*) q
;
272 /* Get the original header of this block. */
274 q
= *infixes
; Assert (Ecolor (q
) == 2);
275 while (Ecolor (q
) != 3) q
= * (word
*) (q
& ~(uintnat
)3);
280 newadr
= compact_allocate (Bsize_wsize (sz
));
282 while (Ecolor (q
) == 0){
283 word next
= * (word
*) q
;
284 * (word
*) q
= (word
) Val_hp (newadr
);
287 *p
= Make_header (Wosize_whsize (sz
), t
, Caml_white
);
289 if (infixes
!= NULL
){
290 /* Rebuild the infix headers and revert the infix pointers. */
291 while (Ecolor ((word
) infixes
) != 3){
292 infixes
= (word
*) ((word
) infixes
& ~(uintnat
) 3);
294 while (Ecolor (q
) == 2){
296 q
= (word
) q
& ~(uintnat
) 3;
298 * (word
*) q
= (word
) Val_hp ((word
*) newadr
+ (infixes
- p
));
300 } Assert (Ecolor (q
) == 1 || Ecolor (q
) == 3);
301 *infixes
= Make_header (infixes
- p
, Infix_tag
, Caml_white
);
302 infixes
= (word
*) q
;
306 }else{ Assert (Ecolor (q
) == 3);
307 /* This is guaranteed only if caml_compact_heap was called after a
308 nonincremental major GC: Assert (Tag_ehd (q) == String_tag);
310 /* No pointers to the header and no infix header:
311 the object was free. */
312 *p
= Make_header (Wosize_ehd (q
), Tag_ehd (q
), Caml_blue
);
316 ch
= Chunk_next (ch
);
321 /* Fourth pass: reallocate and move objects.
322 Use the exact same allocation algorithm as pass 3. */
324 init_compact_allocate ();
325 ch
= caml_heap_start
;
327 word
*p
= (word
*) ch
;
329 chend
= ch
+ Chunk_size (ch
);
330 while ((char *) p
< chend
){
332 if (Color_hd (q
) == Caml_white
){
333 size_t sz
= Bhsize_hd (q
);
334 char *newadr
= compact_allocate (sz
); Assert (newadr
<= (char *)p
);
335 memmove (newadr
, p
, sz
);
336 p
+= Wsize_bsize (sz
);
338 Assert (Color_hd (q
) == Caml_blue
);
342 ch
= Chunk_next (ch
);
346 /* Shrink the heap if needed. */
348 /* Find the amount of live data and the unshrinkable free space. */
353 ch
= caml_heap_start
;
355 if (Chunk_alloc (ch
) != 0){
356 live
+= Wsize_bsize (Chunk_alloc (ch
));
357 free
+= Wsize_bsize (Chunk_size (ch
) - Chunk_alloc (ch
));
359 ch
= Chunk_next (ch
);
362 /* Add up the empty chunks until there are enough, then remove the
363 other empty chunks. */
364 wanted
= caml_percent_free
* (live
/ 100 + 1);
365 ch
= caml_heap_start
;
367 char *next_chunk
= Chunk_next (ch
); /* Chunk_next (ch) will be erased */
369 if (Chunk_alloc (ch
) == 0){
371 free
+= Wsize_bsize (Chunk_size (ch
));
373 caml_shrink_heap (ch
);
380 /* Rebuild the free list. */
382 ch
= caml_heap_start
;
385 if (Chunk_size (ch
) > Chunk_alloc (ch
)){
386 caml_make_free_blocks ((value
*) (ch
+ Chunk_alloc (ch
)),
387 Wsize_bsize (Chunk_size(ch
)-Chunk_alloc(ch
)), 1);
389 ch
= Chunk_next (ch
);
392 ++ caml_stat_compactions
;
393 caml_gc_message (0x10, "done.\n", 0);
396 uintnat caml_percent_max
; /* used in gc_ctrl.c */
398 void caml_compact_heap_maybe (void)
400 /* Estimated free words in the heap:
401 FW = fl_size_at_change + 3 * (caml_fl_cur_size
402 - caml_fl_size_at_phase_change)
403 FW = 3 * caml_fl_cur_size - 2 * caml_fl_size_at_phase_change
404 Estimated live words: LW = caml_stat_heap_size - FW
405 Estimated free percentage: FP = 100 * FW / LW
406 We compact the heap if FP > caml_percent_max
409 Assert (caml_gc_phase
== Phase_idle
);
410 if (caml_percent_max
>= 1000000) return;
411 if (caml_stat_major_collections
< 5 || caml_stat_heap_chunks
< 2) return;
413 fw
= 3.0 * caml_fl_cur_size
- 2.0 * caml_fl_size_at_phase_change
;
414 if (fw
< 0) fw
= caml_fl_cur_size
;
416 if (fw
>= Wsize_bsize (caml_stat_heap_size
)){
419 fp
= 100.0 * fw
/ (Wsize_bsize (caml_stat_heap_size
) - fw
);
420 if (fp
> 1000000.0) fp
= 1000000.0;
422 caml_gc_message (0x200, "FL size at phase change = %"
423 ARCH_INTNAT_PRINTF_FORMAT
"u\n",
424 (uintnat
) caml_fl_size_at_phase_change
);
425 caml_gc_message (0x200, "Estimated overhead = %"
426 ARCH_INTNAT_PRINTF_FORMAT
"u%%\n",
428 if (fp
>= caml_percent_max
){
429 caml_gc_message (0x200, "Automatic compaction triggered.\n", 0);
430 caml_finish_major_cycle ();
432 /* We just did a complete GC, so we can measure the overhead exactly. */
433 fw
= caml_fl_cur_size
;
434 fp
= 100.0 * fw
/ (Wsize_bsize (caml_stat_heap_size
) - fw
);
435 caml_gc_message (0x200, "Measured overhead: %"
436 ARCH_INTNAT_PRINTF_FORMAT
"u%%\n",
439 caml_compact_heap ();