1 // boehm.cc - interface between libjava and Boehm GC.
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation
5 This file is part of libgcj.
7 This software is copyrighted work licensed under the terms of the
8 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
19 #include <java/lang/Class.h>
20 #include <java/lang/reflect/Modifier.h>
21 #include <java-interp.h>
23 // More nastiness: the GC wants to define TRUE and FALSE. We don't
24 // need the Java definitions (themselves a hack), so we undefine them.
30 #include <private/gc_pmark.h>
33 #ifdef THREAD_LOCAL_ALLOC
34 # define GC_REDIRECT_TO_LOCAL
35 # include <gc_local_alloc.h>
38 // These aren't declared in any Boehm GC header.
39 void GC_finalize_all (void);
40 ptr_t
GC_debug_generic_malloc (size_t size
, int k
, GC_EXTRA_PARAMS
);
43 #define MAYBE_MARK(Obj, Top, Limit, Source, Exit) \
44 Top=GC_MARK_AND_PUSH((GC_PTR)Obj, Top, Limit, (GC_PTR *)Source)
46 // `kind' index used when allocating Java arrays.
47 static int array_kind_x
;
49 // Freelist used for Java arrays.
50 static ptr_t
*array_free_list
;
52 // Lock used to protect access to Boehm's GC_enable/GC_disable functions.
53 static _Jv_Mutex_t disable_gc_mutex
;
57 // This is called by the GC during the mark phase. It marks a Java
58 // object. We use `void *' arguments and return, and not what the
59 // Boehm GC wants, to avoid pollution in our headers.
61 _Jv_MarkObj (void *addr
, void *msp
, void *msl
, void * /* env */)
63 mse
*mark_stack_ptr
= (mse
*) msp
;
64 mse
*mark_stack_limit
= (mse
*) msl
;
65 jobject obj
= (jobject
) addr
;
67 // FIXME: if env is 1, this object was allocated through the debug
68 // interface, and addr points to the beginning of the debug header.
69 // In that case, we should really add the size of the header to addr.
71 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
72 // The object might not yet have its vtable set, or it might
73 // really be an object on the freelist. In either case, the vtable slot
74 // will either be 0, or it will point to a cleared object.
75 // This assumes Java objects have size at least 3 words,
76 // including the header. But this should remain true, since this
77 // should only be used with debugging allocation or with large objects.
78 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
79 return mark_stack_ptr
;
80 jclass klass
= dt
->clas
;
83 # ifndef JV_HASH_SYNCHRONIZATION
84 // Every object has a sync_info pointer.
85 p
= (ptr_t
) obj
->sync_info
;
86 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
, o1label
);
88 // Mark the object's class.
90 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
, o2label
);
92 if (__builtin_expect (klass
== &java::lang::Class::class$
, false))
94 // Currently we allocate some of the memory referenced from class objects
95 // as pointerfree memory, and then mark it more intelligently here.
96 // We ensure that the ClassClass mark descriptor forces invocation of
98 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
99 // collector, we need to make sure that the class object is written whenever
100 // any of the subobjects are altered and may need rescanning. This may be tricky
101 // during construction, and this may not be the right way to do this with
102 // incremental collection.
103 // If we overflow the mark stack, we will rescan the class object, so we should
104 // be OK. The same applies if we redo the mark phase because win32 unmapped part
105 // of our root set. - HB
106 jclass c
= (jclass
) addr
;
109 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c3label
);
110 p
= (ptr_t
) c
->superclass
;
111 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c4label
);
112 for (int i
= 0; i
< c
->constants
.size
; ++i
)
114 /* FIXME: We could make this more precise by using the tags -KKT */
115 p
= (ptr_t
) c
->constants
.data
[i
].p
;
116 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c5label
);
120 if (_Jv_IsInterpretedClass (c
))
122 p
= (ptr_t
) c
->constants
.tags
;
123 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c5alabel
);
124 p
= (ptr_t
) c
->constants
.data
;
125 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c5blabel
);
126 p
= (ptr_t
) c
->vtable
;
127 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c5clabel
);
131 // If the class is an array, then the methods field holds a
132 // pointer to the element class. If the class is primitive,
133 // then the methods field holds a pointer to the array class.
134 p
= (ptr_t
) c
->methods
;
135 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c6label
);
137 // The vtable might have been set, but the rest of the class
138 // could still be uninitialized. If this is the case, then
139 // c.isArray will SEGV. We check for this, and if it is the
140 // case we just return.
141 if (__builtin_expect (c
->name
== NULL
, false))
142 return mark_stack_ptr
;
144 if (! c
->isArray() && ! c
->isPrimitive())
146 // Scan each method in the cases where `methods' really
147 // points to a methods structure.
148 for (int i
= 0; i
< c
->method_count
; ++i
)
150 p
= (ptr_t
) c
->methods
[i
].name
;
151 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
,
153 p
= (ptr_t
) c
->methods
[i
].signature
;
154 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
,
159 // Mark all the fields.
160 p
= (ptr_t
) c
->fields
;
161 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c8label
);
162 for (int i
= 0; i
< c
->field_count
; ++i
)
164 _Jv_Field
* field
= &c
->fields
[i
];
166 #ifndef COMPACT_FIELDS
167 p
= (ptr_t
) field
->name
;
168 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c8alabel
);
170 p
= (ptr_t
) field
->type
;
171 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c8blabel
);
173 // For the interpreter, we also need to mark the memory
174 // containing static members
175 if ((field
->flags
& java::lang::reflect::Modifier::STATIC
))
177 p
= (ptr_t
) field
->u
.addr
;
178 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c8clabel
);
180 // also, if the static member is a reference,
181 // mark also the value pointed to. We check for isResolved
182 // since marking can happen before memory is allocated for
184 if (JvFieldIsRef (field
) && field
->isResolved())
186 jobject val
= *(jobject
*) field
->u
.addr
;
188 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
,
194 p
= (ptr_t
) c
->vtable
;
195 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, c9label
);
196 p
= (ptr_t
) c
->interfaces
;
197 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, cAlabel
);
198 for (int i
= 0; i
< c
->interface_count
; ++i
)
200 p
= (ptr_t
) c
->interfaces
[i
];
201 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, cClabel
);
203 p
= (ptr_t
) c
->loader
;
204 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, cBlabel
);
205 p
= (ptr_t
) c
->arrayclass
;
206 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, cDlabel
);
207 p
= (ptr_t
) c
->protectionDomain
;
208 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, cPlabel
);
209 p
= (ptr_t
) c
->hack_signers
;
210 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
, cSlabel
);
213 if (_Jv_IsInterpretedClass (c
))
215 _Jv_InterpClass
* ic
= (_Jv_InterpClass
*) c
;
217 p
= (ptr_t
) ic
->interpreted_methods
;
218 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
, cElabel
);
220 for (int i
= 0; i
< c
->method_count
; i
++)
222 p
= (ptr_t
) ic
->interpreted_methods
[i
];
223 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
, \
226 // Mark the direct-threaded code.
227 if ((c
->methods
[i
].accflags
228 & java::lang::reflect::Modifier::NATIVE
) == 0)
231 = (_Jv_InterpMethod
*) ic
->interpreted_methods
[i
];
234 p
= (ptr_t
) im
->prepared
;
235 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
, \
240 // The interpreter installs a heap-allocated trampoline
241 // here, so we'll mark it.
242 p
= (ptr_t
) c
->methods
[i
].ncode
;
243 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
,
247 p
= (ptr_t
) ic
->field_initializers
;
248 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
, cGlabel
);
256 // NOTE: each class only holds information about the class
257 // itself. So we must do the marking for the entire inheritance
258 // tree in order to mark all fields. FIXME: what about
259 // interfaces? We skip Object here, because Object only has a
260 // sync_info, and we handled that earlier.
261 // Note: occasionally `klass' can be null. For instance, this
262 // can happen if a GC occurs between the point where an object
263 // is allocated and where the vtbl slot is set.
264 while (klass
&& klass
!= &java::lang::Object::class$
)
266 jfieldID field
= JvGetFirstInstanceField (klass
);
267 jint max
= JvNumInstanceFields (klass
);
269 for (int i
= 0; i
< max
; ++i
)
271 if (JvFieldIsRef (field
))
273 jobject val
= JvGetObjectField (obj
, field
);
275 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
,
278 field
= field
->getNextField ();
280 klass
= klass
->getSuperclass();
284 return mark_stack_ptr
;
287 // This is called by the GC during the mark phase. It marks a Java
288 // array (of objects). We use `void *' arguments and return, and not
289 // what the Boehm GC wants, to avoid pollution in our headers.
291 _Jv_MarkArray (void *addr
, void *msp
, void *msl
, void * /*env*/)
293 mse
*mark_stack_ptr
= (mse
*) msp
;
294 mse
*mark_stack_limit
= (mse
*) msl
;
295 jobjectArray array
= (jobjectArray
) addr
;
297 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
298 // Assumes size >= 3 words. That's currently true since arrays have
299 // a vtable, sync pointer, and size. If the sync pointer goes away,
300 // we may need to round up the size.
301 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
302 return mark_stack_ptr
;
303 jclass klass
= dt
->clas
;
306 # ifndef JV_HASH_SYNCHRONIZATION
307 // Every object has a sync_info pointer.
308 p
= (ptr_t
) array
->sync_info
;
309 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
, e1label
);
311 // Mark the object's class.
313 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, &(dt
-> clas
), o2label
);
315 for (int i
= 0; i
< JvGetArrayLength (array
); ++i
)
317 jobject obj
= elements (array
)[i
];
319 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
, e2label
);
322 return mark_stack_ptr
;
325 // Generate a GC marking descriptor for a class.
327 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
328 // since another one could be registered first. But the compiler also
329 // knows this, so in that case everything else will break, too.
330 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
333 _Jv_BuildGCDescr(jclass self
)
336 jint bits_per_word
= CHAR_BIT
* sizeof (void *);
338 // Note: for now we only consider a bitmap mark descriptor. We
339 // could also handle the case where the first N fields of a type are
340 // references. However, this is not very likely to be used by many
341 // classes, and it is easier to compute things this way.
343 // The vtable pointer.
344 desc
|= 1ULL << (bits_per_word
- 1);
345 #ifndef JV_HASH_SYNCHRONIZATION
346 // The sync_info field.
347 desc
|= 1ULL << (bits_per_word
- 2);
350 for (jclass klass
= self
; klass
!= NULL
; klass
= klass
->getSuperclass())
352 jfieldID field
= JvGetFirstInstanceField(klass
);
353 int count
= JvNumInstanceFields(klass
);
355 for (int i
= 0; i
< count
; ++i
)
359 unsigned int off
= field
->getOffset();
360 // If we run into a weird situation, we bail.
361 if (off
% sizeof (void *) != 0)
362 return (void *) (GCJ_DEFAULT_DESCR
);
363 off
/= sizeof (void *);
364 // If we find a field outside the range of our bitmap,
365 // fall back to procedure marker. The bottom 2 bits are
367 if (off
>= bits_per_word
- 2)
368 return (void *) (GCJ_DEFAULT_DESCR
);
369 desc
|= 1ULL << (bits_per_word
- off
- 1);
372 field
= field
->getNextField();
376 // For bitmap mark type, bottom bits are 01.
378 // Bogus warning avoidance (on many platforms).
379 return (void *) (unsigned long) desc
;
382 // Allocate some space that is known to be pointer-free.
384 _Jv_AllocBytes (jsize size
)
386 void *r
= GC_MALLOC_ATOMIC (size
);
387 // We have to explicitly zero memory here, as the GC doesn't
388 // guarantee that PTRFREE allocations are zeroed. Note that we
389 // don't have to do this for other allocation types because we set
390 // the `ok_init' flag in the type descriptor.
395 // Allocate space for a new Java array.
396 // Used only for arrays of objects.
398 _Jv_AllocArray (jsize size
, jclass klass
)
401 const jsize min_heap_addr
= 16*1024;
402 // A heuristic. If size is less than this value, the size
403 // stored in the array can't possibly be misinterpreted as
404 // a pointer. Thus we lose nothing by scanning the object
405 // completely conservatively, since no misidentification can
409 // There isn't much to lose by scanning this conservatively.
410 // If we didn't, the mark proc would have to understand that
411 // it needed to skip the header.
412 obj
= GC_MALLOC(size
);
414 if (size
< min_heap_addr
)
415 obj
= GC_MALLOC(size
);
417 obj
= GC_generic_malloc (size
, array_kind_x
);
419 *((_Jv_VTable
**) obj
) = klass
->vtable
;
423 /* Allocate space for a new non-Java object, which does not have the usual
424 Java object header but may contain pointers to other GC'ed objects. */
426 _Jv_AllocRawObj (jsize size
)
428 return (void *) GC_MALLOC (size
);
432 call_finalizer (GC_PTR obj
, GC_PTR client_data
)
434 _Jv_FinalizerFunc
*fn
= (_Jv_FinalizerFunc
*) client_data
;
435 jobject jobj
= (jobject
) obj
;
441 _Jv_RegisterFinalizer (void *object
, _Jv_FinalizerFunc
*meth
)
443 GC_REGISTER_FINALIZER_NO_ORDER (object
, call_finalizer
, (GC_PTR
) meth
,
448 _Jv_RunFinalizers (void)
450 GC_invoke_finalizers ();
454 _Jv_RunAllFinalizers (void)
466 _Jv_GCTotalMemory (void)
468 return GC_get_heap_size ();
472 _Jv_GCFreeMemory (void)
474 return GC_get_free_bytes ();
478 _Jv_GCSetInitialHeapSize (size_t size
)
480 size_t current
= GC_get_heap_size ();
482 GC_expand_hp (size
- current
);
486 _Jv_GCSetMaximumHeapSize (size_t size
)
488 GC_set_max_heap_size ((GC_word
) size
);
491 // From boehm's misc.c
492 extern "C" void GC_enable();
493 extern "C" void GC_disable();
498 _Jv_MutexLock (&disable_gc_mutex
);
500 _Jv_MutexUnlock (&disable_gc_mutex
);
506 _Jv_MutexLock (&disable_gc_mutex
);
508 _Jv_MutexUnlock (&disable_gc_mutex
);
511 static void * handle_out_of_memory(size_t)
521 // Ignore pointers that do not point to the start of an object.
522 GC_all_interior_pointers
= 0;
524 // Configure the collector to use the bitmap marking descriptors that we
525 // stash in the class vtable.
526 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj
);
528 // Cause an out of memory error to be thrown from the allocators,
529 // instead of returning 0. This is cheaper than checking on allocation.
530 GC_oom_fn
= handle_out_of_memory
;
532 GC_java_finalization
= 1;
534 // We use a different mark procedure for object arrays. This code
535 // configures a different object `kind' for object array allocation and
536 // marking. FIXME: see above.
537 array_free_list
= (ptr_t
*) GC_generic_malloc_inner ((MAXOBJSZ
+ 1)
540 memset (array_free_list
, 0, (MAXOBJSZ
+ 1) * sizeof (ptr_t
));
542 proc
= GC_n_mark_procs
++;
543 GC_mark_procs
[proc
] = (GC_mark_proc
) _Jv_MarkArray
;
545 array_kind_x
= GC_n_kinds
++;
546 GC_obj_kinds
[array_kind_x
].ok_freelist
= array_free_list
;
547 GC_obj_kinds
[array_kind_x
].ok_reclaim_list
= 0;
548 GC_obj_kinds
[array_kind_x
].ok_descriptor
= GC_MAKE_PROC (proc
, 0);
549 GC_obj_kinds
[array_kind_x
].ok_relocate_descr
= FALSE
;
550 GC_obj_kinds
[array_kind_x
].ok_init
= TRUE
;
552 _Jv_MutexInit (&disable_gc_mutex
);
555 #ifdef JV_HASH_SYNCHRONIZATION
556 // Allocate an object with a fake vtable pointer, which causes only
557 // the first field (beyond the fake vtable pointer) to be traced.
558 // Eventually this should probably be generalized.
560 static _Jv_VTable trace_one_vtable
= {
562 (void *)(2 * sizeof(void *)),
563 // descriptor; scan 2 words incl. vtable ptr.
564 // Least significant bits must be zero to
565 // identify this as a length descriptor
570 _Jv_AllocTraceOne (jsize size
/* includes vtable slot */)
572 return GC_GCJ_MALLOC (size
, &trace_one_vtable
);
575 // Ditto for two words.
576 // the first field (beyond the fake vtable pointer) to be traced.
577 // Eventually this should probably be generalized.
579 static _Jv_VTable trace_two_vtable
=
582 (void *)(3 * sizeof(void *)),
583 // descriptor; scan 3 words incl. vtable ptr.
588 _Jv_AllocTraceTwo (jsize size
/* includes vtable slot */)
590 return GC_GCJ_MALLOC (size
, &trace_two_vtable
);
593 #endif /* JV_HASH_SYNCHRONIZATION */
596 _Jv_GCInitializeFinalizers (void (*notifier
) (void))
598 GC_finalize_on_demand
= 1;
599 GC_finalizer_notifier
= notifier
;
603 _Jv_GCRegisterDisappearingLink (jobject
*objp
)
605 GC_general_register_disappearing_link ((GC_PTR
*) objp
, (GC_PTR
) *objp
);
609 _Jv_GCCanReclaimSoftReference (jobject
)
611 // For now, always reclaim soft references. FIXME.