1 |
749 |
jeremybenn |
// boehm.cc - interface between libjava and Boehm GC.
|
2 |
|
|
|
3 |
|
|
/* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
|
4 |
|
|
Free Software Foundation
|
5 |
|
|
|
6 |
|
|
This file is part of libgcj.
|
7 |
|
|
|
8 |
|
|
This software is copyrighted work licensed under the terms of the
|
9 |
|
|
Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
|
10 |
|
|
details. */
|
11 |
|
|
|
12 |
|
|
#include <config.h>
|
13 |
|
|
|
14 |
|
|
#include <stdio.h>
|
15 |
|
|
#include <limits.h>
|
16 |
|
|
|
17 |
|
|
#include <jvm.h>
|
18 |
|
|
#include <gcj/cni.h>
|
19 |
|
|
|
20 |
|
|
#include <java/lang/Class.h>
|
21 |
|
|
#include <java/lang/reflect/Modifier.h>
|
22 |
|
|
#include <java-interp.h>
|
23 |
|
|
|
24 |
|
|
// More nastiness: the GC wants to define TRUE and FALSE. We don't
|
25 |
|
|
// need the Java definitions (themselves a hack), so we undefine them.
|
26 |
|
|
#undef TRUE
|
27 |
|
|
#undef FALSE
|
28 |
|
|
|
29 |
|
|
// We include two autoconf headers. Avoid multiple definition warnings.
|
30 |
|
|
#undef PACKAGE_NAME
|
31 |
|
|
#undef PACKAGE_STRING
|
32 |
|
|
#undef PACKAGE_TARNAME
|
33 |
|
|
#undef PACKAGE_VERSION
|
34 |
|
|
|
35 |
|
|
#ifdef HAVE_DLFCN_H
|
36 |
|
|
#undef _GNU_SOURCE
|
37 |
|
|
#define _GNU_SOURCE
|
38 |
|
|
#include <dlfcn.h>
|
39 |
|
|
#endif
|
40 |
|
|
|
41 |
|
|
extern "C"
|
42 |
|
|
{
|
43 |
|
|
#include <gc_config.h>
|
44 |
|
|
|
45 |
|
|
// Set GC_DEBUG before including gc.h!
|
46 |
|
|
#ifdef LIBGCJ_GC_DEBUG
|
47 |
|
|
# define GC_DEBUG
|
48 |
|
|
#endif
|
49 |
|
|
|
50 |
|
|
#include <gc_mark.h>
|
51 |
|
|
#include <gc_gcj.h>
|
52 |
|
|
#include <javaxfc.h> // GC_finalize_all declaration.
|
53 |
|
|
|
54 |
|
|
#ifdef THREAD_LOCAL_ALLOC
|
55 |
|
|
# define GC_REDIRECT_TO_LOCAL
|
56 |
|
|
# include <gc_local_alloc.h>
|
57 |
|
|
#endif
|
58 |
|
|
|
59 |
|
|
// From boehm's misc.c
|
60 |
|
|
void GC_enable();
|
61 |
|
|
void GC_disable();
|
62 |
|
|
};
|
63 |
|
|
|
64 |
|
|
#define MAYBE_MARK(Obj, Top, Limit, Source) \
|
65 |
|
|
Top=GC_MARK_AND_PUSH((GC_PTR) Obj, Top, Limit, (GC_PTR *) Source)
|
66 |
|
|
|
67 |
|
|
// `kind' index used when allocating Java arrays.
|
68 |
|
|
static int array_kind_x;
|
69 |
|
|
|
70 |
|
|
// Freelist used for Java arrays.
|
71 |
|
|
static void **array_free_list;
|
72 |
|
|
|
73 |
|
|
static int _Jv_GC_has_static_roots (const char *filename, void *, size_t);
|
74 |
|
|
|
75 |
|
|
|
76 |
|
|
|
77 |
|
|
// This is called by the GC during the mark phase. It marks a Java
|
78 |
|
|
// object. We use `void *' arguments and return, and not what the
|
79 |
|
|
// Boehm GC wants, to avoid pollution in our headers.
|
80 |
|
|
void *
|
81 |
|
|
_Jv_MarkObj (void *addr, void *msp, void *msl, void *env)
|
82 |
|
|
{
|
83 |
|
|
struct GC_ms_entry *mark_stack_ptr = (struct GC_ms_entry *)msp;
|
84 |
|
|
struct GC_ms_entry *mark_stack_limit = (struct GC_ms_entry *)msl;
|
85 |
|
|
|
86 |
|
|
if (env == (void *)1) /* Object allocated with debug allocator. */
|
87 |
|
|
addr = (GC_PTR)GC_USR_PTR_FROM_BASE(addr);
|
88 |
|
|
jobject obj = (jobject) addr;
|
89 |
|
|
|
90 |
|
|
_Jv_VTable *dt = *(_Jv_VTable **) addr;
|
91 |
|
|
// The object might not yet have its vtable set, or it might
|
92 |
|
|
// really be an object on the freelist. In either case, the vtable slot
|
93 |
|
|
// will either be 0, or it will point to a cleared object.
|
94 |
|
|
// This assumes Java objects have size at least 3 words,
|
95 |
|
|
// including the header. But this should remain true, since this
|
96 |
|
|
// should only be used with debugging allocation or with large objects.
|
97 |
|
|
if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
|
98 |
|
|
return mark_stack_ptr;
|
99 |
|
|
jclass klass = dt->clas;
|
100 |
|
|
GC_PTR p;
|
101 |
|
|
|
102 |
|
|
p = (GC_PTR) dt;
|
103 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj);
|
104 |
|
|
|
105 |
|
|
# ifndef JV_HASH_SYNCHRONIZATION
|
106 |
|
|
// Every object has a sync_info pointer.
|
107 |
|
|
p = (GC_PTR) obj->sync_info;
|
108 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj);
|
109 |
|
|
# endif
|
110 |
|
|
|
111 |
|
|
if (__builtin_expect (klass == &java::lang::Class::class$, false))
|
112 |
|
|
{
|
113 |
|
|
// Currently we allocate some of the memory referenced from class objects
|
114 |
|
|
// as pointerfree memory, and then mark it more intelligently here.
|
115 |
|
|
// We ensure that the ClassClass mark descriptor forces invocation of
|
116 |
|
|
// this procedure.
|
117 |
|
|
// Correctness of this is subtle, but it looks OK to me for now. For the incremental
|
118 |
|
|
// collector, we need to make sure that the class object is written whenever
|
119 |
|
|
// any of the subobjects are altered and may need rescanning. This may be tricky
|
120 |
|
|
// during construction, and this may not be the right way to do this with
|
121 |
|
|
// incremental collection.
|
122 |
|
|
// If we overflow the mark stack, we will rescan the class object, so we should
|
123 |
|
|
// be OK. The same applies if we redo the mark phase because win32 unmapped part
|
124 |
|
|
// of our root set. - HB
|
125 |
|
|
jclass c = (jclass) addr;
|
126 |
|
|
|
127 |
|
|
p = (GC_PTR) c->name;
|
128 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
129 |
|
|
p = (GC_PTR) c->superclass;
|
130 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
131 |
|
|
|
132 |
|
|
p = (GC_PTR) c->constants.tags;
|
133 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
134 |
|
|
p = (GC_PTR) c->constants.data;
|
135 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
136 |
|
|
|
137 |
|
|
// If the class is an array, then the methods field holds a
|
138 |
|
|
// pointer to the element class. If the class is primitive,
|
139 |
|
|
// then the methods field holds a pointer to the array class.
|
140 |
|
|
p = (GC_PTR) c->methods;
|
141 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
142 |
|
|
|
143 |
|
|
p = (GC_PTR) c->fields;
|
144 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
145 |
|
|
|
146 |
|
|
// The vtable might be allocated even for compiled code.
|
147 |
|
|
p = (GC_PTR) c->vtable;
|
148 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
149 |
|
|
|
150 |
|
|
p = (GC_PTR) c->interfaces;
|
151 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
152 |
|
|
p = (GC_PTR) c->loader;
|
153 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
154 |
|
|
|
155 |
|
|
// The dispatch tables can be allocated at runtime.
|
156 |
|
|
p = (GC_PTR) c->ancestors;
|
157 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
158 |
|
|
|
159 |
|
|
p = (GC_PTR) c->idt;
|
160 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
161 |
|
|
|
162 |
|
|
p = (GC_PTR) c->arrayclass;
|
163 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
164 |
|
|
p = (GC_PTR) c->protectionDomain;
|
165 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
166 |
|
|
p = (GC_PTR) c->hack_signers;
|
167 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
168 |
|
|
p = (GC_PTR) c->aux_info;
|
169 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
170 |
|
|
|
171 |
|
|
p = (GC_PTR) c->reflection_data;
|
172 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
173 |
|
|
|
174 |
|
|
// The class chain must be marked for runtime-allocated Classes
|
175 |
|
|
// loaded by the bootstrap ClassLoader.
|
176 |
|
|
p = (GC_PTR) c->next_or_version;
|
177 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
|
178 |
|
|
}
|
179 |
|
|
else
|
180 |
|
|
{
|
181 |
|
|
// NOTE: each class only holds information about the class
|
182 |
|
|
// itself. So we must do the marking for the entire inheritance
|
183 |
|
|
// tree in order to mark all fields. FIXME: what about
|
184 |
|
|
// interfaces? We skip Object here, because Object only has a
|
185 |
|
|
// sync_info, and we handled that earlier.
|
186 |
|
|
// Note: occasionally `klass' can be null. For instance, this
|
187 |
|
|
// can happen if a GC occurs between the point where an object
|
188 |
|
|
// is allocated and where the vtbl slot is set.
|
189 |
|
|
while (klass && klass != &java::lang::Object::class$)
|
190 |
|
|
{
|
191 |
|
|
jfieldID field = JvGetFirstInstanceField (klass);
|
192 |
|
|
jint max = JvNumInstanceFields (klass);
|
193 |
|
|
|
194 |
|
|
for (int i = 0; i < max; ++i)
|
195 |
|
|
{
|
196 |
|
|
if (JvFieldIsRef (field))
|
197 |
|
|
{
|
198 |
|
|
jobject val = JvGetObjectField (obj, field);
|
199 |
|
|
p = (GC_PTR) val;
|
200 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj);
|
201 |
|
|
}
|
202 |
|
|
field = field->getNextField ();
|
203 |
|
|
}
|
204 |
|
|
klass = klass->getSuperclass();
|
205 |
|
|
}
|
206 |
|
|
}
|
207 |
|
|
|
208 |
|
|
return mark_stack_ptr;
|
209 |
|
|
}
|
210 |
|
|
|
211 |
|
|
// This is called by the GC during the mark phase. It marks a Java
|
212 |
|
|
// array (of objects). We use `void *' arguments and return, and not
|
213 |
|
|
// what the Boehm GC wants, to avoid pollution in our headers.
|
214 |
|
|
void *
|
215 |
|
|
_Jv_MarkArray (void *addr, void *msp, void *msl, void *env)
|
216 |
|
|
{
|
217 |
|
|
struct GC_ms_entry *mark_stack_ptr = (struct GC_ms_entry *)msp;
|
218 |
|
|
struct GC_ms_entry *mark_stack_limit = (struct GC_ms_entry *)msl;
|
219 |
|
|
|
220 |
|
|
if (env == (void *)1) /* Object allocated with debug allocator. */
|
221 |
|
|
addr = (void *)GC_USR_PTR_FROM_BASE(addr);
|
222 |
|
|
jobjectArray array = (jobjectArray) addr;
|
223 |
|
|
|
224 |
|
|
_Jv_VTable *dt = *(_Jv_VTable **) addr;
|
225 |
|
|
// Assumes size >= 3 words. That's currently true since arrays have
|
226 |
|
|
// a vtable, sync pointer, and size. If the sync pointer goes away,
|
227 |
|
|
// we may need to round up the size.
|
228 |
|
|
if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
|
229 |
|
|
return mark_stack_ptr;
|
230 |
|
|
GC_PTR p;
|
231 |
|
|
|
232 |
|
|
p = (GC_PTR) dt;
|
233 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array);
|
234 |
|
|
|
235 |
|
|
# ifndef JV_HASH_SYNCHRONIZATION
|
236 |
|
|
// Every object has a sync_info pointer.
|
237 |
|
|
p = (GC_PTR) array->sync_info;
|
238 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array);
|
239 |
|
|
# endif
|
240 |
|
|
|
241 |
|
|
for (int i = 0; i < JvGetArrayLength (array); ++i)
|
242 |
|
|
{
|
243 |
|
|
jobject obj = elements (array)[i];
|
244 |
|
|
p = (GC_PTR) obj;
|
245 |
|
|
MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array);
|
246 |
|
|
}
|
247 |
|
|
|
248 |
|
|
return mark_stack_ptr;
|
249 |
|
|
}
|
250 |
|
|
|
251 |
|
|
// Generate a GC marking descriptor for a class.
|
252 |
|
|
//
|
253 |
|
|
// We assume that the gcj mark proc has index 0. This is a dubious assumption,
|
254 |
|
|
// since another one could be registered first. But the compiler also
|
255 |
|
|
// knows this, so in that case everything else will break, too.
|
256 |
|
|
#define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
|
257 |
|
|
|
258 |
|
|
void *
|
259 |
|
|
_Jv_BuildGCDescr(jclass self)
|
260 |
|
|
{
|
261 |
|
|
jlong desc = 0;
|
262 |
|
|
jint bits_per_word = CHAR_BIT * sizeof (void *);
|
263 |
|
|
|
264 |
|
|
// Note: for now we only consider a bitmap mark descriptor. We
|
265 |
|
|
// could also handle the case where the first N fields of a type are
|
266 |
|
|
// references. However, this is not very likely to be used by many
|
267 |
|
|
// classes, and it is easier to compute things this way.
|
268 |
|
|
|
269 |
|
|
// The vtable pointer.
|
270 |
|
|
desc |= 1ULL << (bits_per_word - 1);
|
271 |
|
|
#ifndef JV_HASH_SYNCHRONIZATION
|
272 |
|
|
// The sync_info field.
|
273 |
|
|
desc |= 1ULL << (bits_per_word - 2);
|
274 |
|
|
#endif
|
275 |
|
|
|
276 |
|
|
for (jclass klass = self; klass != NULL; klass = klass->getSuperclass())
|
277 |
|
|
{
|
278 |
|
|
jfieldID field = JvGetFirstInstanceField(klass);
|
279 |
|
|
int count = JvNumInstanceFields(klass);
|
280 |
|
|
|
281 |
|
|
for (int i = 0; i < count; ++i)
|
282 |
|
|
{
|
283 |
|
|
if (field->isRef())
|
284 |
|
|
{
|
285 |
|
|
unsigned int off = field->getOffset();
|
286 |
|
|
// If we run into a weird situation, we bail.
|
287 |
|
|
if (off % sizeof (void *) != 0)
|
288 |
|
|
return (void *) (GCJ_DEFAULT_DESCR);
|
289 |
|
|
off /= sizeof (void *);
|
290 |
|
|
// If we find a field outside the range of our bitmap,
|
291 |
|
|
// fall back to procedure marker. The bottom 2 bits are
|
292 |
|
|
// reserved.
|
293 |
|
|
if (off >= (unsigned) bits_per_word - 2)
|
294 |
|
|
return (void *) (GCJ_DEFAULT_DESCR);
|
295 |
|
|
desc |= 1ULL << (bits_per_word - off - 1);
|
296 |
|
|
}
|
297 |
|
|
|
298 |
|
|
field = field->getNextField();
|
299 |
|
|
}
|
300 |
|
|
}
|
301 |
|
|
|
302 |
|
|
// For bitmap mark type, bottom bits are 01.
|
303 |
|
|
desc |= 1;
|
304 |
|
|
// Bogus warning avoidance (on many platforms).
|
305 |
|
|
return (void *) (unsigned long) desc;
|
306 |
|
|
}
|
307 |
|
|
|
308 |
|
|
// Allocate some space that is known to be pointer-free.
|
309 |
|
|
void *
|
310 |
|
|
_Jv_AllocBytes (jsize size)
|
311 |
|
|
{
|
312 |
|
|
void *r = GC_MALLOC_ATOMIC (size);
|
313 |
|
|
// We have to explicitly zero memory here, as the GC doesn't
|
314 |
|
|
// guarantee that PTRFREE allocations are zeroed. Note that we
|
315 |
|
|
// don't have to do this for other allocation types because we set
|
316 |
|
|
// the `ok_init' flag in the type descriptor.
|
317 |
|
|
memset (r, 0, size);
|
318 |
|
|
return r;
|
319 |
|
|
}
|
320 |
|
|
|
321 |
|
|
#ifdef LIBGCJ_GC_DEBUG
|
322 |
|
|
|
323 |
|
|
void *
|
324 |
|
|
_Jv_AllocObj (jsize size, jclass klass)
|
325 |
|
|
{
|
326 |
|
|
return GC_GCJ_MALLOC (size, klass->vtable);
|
327 |
|
|
}
|
328 |
|
|
|
329 |
|
|
void *
|
330 |
|
|
_Jv_AllocPtrFreeObj (jsize size, jclass klass)
|
331 |
|
|
{
|
332 |
|
|
#ifdef JV_HASH_SYNCHRONIZATION
|
333 |
|
|
void * obj = GC_MALLOC_ATOMIC(size);
|
334 |
|
|
*((_Jv_VTable **) obj) = klass->vtable;
|
335 |
|
|
#else
|
336 |
|
|
void * obj = GC_GCJ_MALLOC(size, klass->vtable);
|
337 |
|
|
#endif
|
338 |
|
|
return obj;
|
339 |
|
|
}
|
340 |
|
|
|
341 |
|
|
#endif /* LIBGCJ_GC_DEBUG */
|
342 |
|
|
// In the non-debug case, the above two functions are defined
|
343 |
|
|
// as inline functions in boehm-gc.h. In the debug case we
|
344 |
|
|
// really want to take advantage of the definitions in gc_gcj.h.
|
345 |
|
|
|
346 |
|
|
// Allocate space for a new Java array.
|
347 |
|
|
// Used only for arrays of objects.
|
348 |
|
|
void *
|
349 |
|
|
_Jv_AllocArray (jsize size, jclass klass)
|
350 |
|
|
{
|
351 |
|
|
void *obj;
|
352 |
|
|
|
353 |
|
|
#ifdef LIBGCJ_GC_DEBUG
|
354 |
|
|
// There isn't much to lose by scanning this conservatively.
|
355 |
|
|
// If we didn't, the mark proc would have to understand that
|
356 |
|
|
// it needed to skip the header.
|
357 |
|
|
obj = GC_MALLOC(size);
|
358 |
|
|
#else
|
359 |
|
|
const jsize min_heap_addr = 16*1024;
|
360 |
|
|
// A heuristic. If size is less than this value, the size
|
361 |
|
|
// stored in the array can't possibly be misinterpreted as
|
362 |
|
|
// a pointer. Thus we lose nothing by scanning the object
|
363 |
|
|
// completely conservatively, since no misidentification can
|
364 |
|
|
// take place.
|
365 |
|
|
|
366 |
|
|
if (size < min_heap_addr)
|
367 |
|
|
obj = GC_MALLOC(size);
|
368 |
|
|
else
|
369 |
|
|
obj = GC_generic_malloc (size, array_kind_x);
|
370 |
|
|
#endif
|
371 |
|
|
*((_Jv_VTable **) obj) = klass->vtable;
|
372 |
|
|
return obj;
|
373 |
|
|
}
|
374 |
|
|
|
375 |
|
|
/* Allocate space for a new non-Java object, which does not have the usual
|
376 |
|
|
Java object header but may contain pointers to other GC'ed objects. */
|
377 |
|
|
void *
|
378 |
|
|
_Jv_AllocRawObj (jsize size)
|
379 |
|
|
{
|
380 |
|
|
return (void *) GC_MALLOC (size ? size : 1);
|
381 |
|
|
}
|
382 |
|
|
|
383 |
|
|
#ifdef INTERPRETER
|
384 |
|
|
typedef _Jv_ClosureList *closure_list_pointer;
|
385 |
|
|
|
386 |
|
|
/* Release closures in a _Jv_ClosureList. */
|
387 |
|
|
static void
|
388 |
|
|
finalize_closure_list (GC_PTR obj, GC_PTR)
|
389 |
|
|
{
|
390 |
|
|
_Jv_ClosureList **clpp = (_Jv_ClosureList **)obj;
|
391 |
|
|
_Jv_ClosureList::releaseClosures (clpp);
|
392 |
|
|
}
|
393 |
|
|
|
394 |
|
|
/* Allocate a double-indirect pointer to a _Jv_ClosureList that will
|
395 |
|
|
get garbage-collected after this double-indirect pointer becomes
|
396 |
|
|
unreachable by any other objects, including finalizable ones. */
|
397 |
|
|
_Jv_ClosureList **
|
398 |
|
|
_Jv_ClosureListFinalizer ()
|
399 |
|
|
{
|
400 |
|
|
_Jv_ClosureList **clpp;
|
401 |
|
|
clpp = (_Jv_ClosureList **)_Jv_AllocBytes (sizeof (*clpp));
|
402 |
|
|
GC_REGISTER_FINALIZER_UNREACHABLE (clpp, finalize_closure_list,
|
403 |
|
|
NULL, NULL, NULL);
|
404 |
|
|
return clpp;
|
405 |
|
|
}
|
406 |
|
|
#endif // INTERPRETER
|
407 |
|
|
|
408 |
|
|
static void
|
409 |
|
|
call_finalizer (GC_PTR obj, GC_PTR client_data)
|
410 |
|
|
{
|
411 |
|
|
_Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
|
412 |
|
|
jobject jobj = (jobject) obj;
|
413 |
|
|
|
414 |
|
|
(*fn) (jobj);
|
415 |
|
|
}
|
416 |
|
|
|
417 |
|
|
void
|
418 |
|
|
_Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
|
419 |
|
|
{
|
420 |
|
|
GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
|
421 |
|
|
NULL, NULL);
|
422 |
|
|
}
|
423 |
|
|
|
424 |
|
|
void
|
425 |
|
|
_Jv_RunFinalizers (void)
|
426 |
|
|
{
|
427 |
|
|
GC_invoke_finalizers ();
|
428 |
|
|
}
|
429 |
|
|
|
430 |
|
|
void
|
431 |
|
|
_Jv_RunAllFinalizers (void)
|
432 |
|
|
{
|
433 |
|
|
GC_finalize_all ();
|
434 |
|
|
}
|
435 |
|
|
|
436 |
|
|
void
|
437 |
|
|
_Jv_RunGC (void)
|
438 |
|
|
{
|
439 |
|
|
GC_gcollect ();
|
440 |
|
|
}
|
441 |
|
|
|
442 |
|
|
long
|
443 |
|
|
_Jv_GCTotalMemory (void)
|
444 |
|
|
{
|
445 |
|
|
return GC_get_heap_size ();
|
446 |
|
|
}
|
447 |
|
|
|
448 |
|
|
long
|
449 |
|
|
_Jv_GCFreeMemory (void)
|
450 |
|
|
{
|
451 |
|
|
return GC_get_free_bytes ();
|
452 |
|
|
}
|
453 |
|
|
|
454 |
|
|
void
|
455 |
|
|
_Jv_GCSetInitialHeapSize (size_t size)
|
456 |
|
|
{
|
457 |
|
|
size_t current = GC_get_heap_size ();
|
458 |
|
|
if (size > current)
|
459 |
|
|
GC_expand_hp (size - current);
|
460 |
|
|
}
|
461 |
|
|
|
462 |
|
|
void
|
463 |
|
|
_Jv_GCSetMaximumHeapSize (size_t size)
|
464 |
|
|
{
|
465 |
|
|
GC_set_max_heap_size ((GC_word) size);
|
466 |
|
|
}
|
467 |
|
|
|
468 |
|
|
int
|
469 |
|
|
_Jv_SetGCFreeSpaceDivisor (int div)
|
470 |
|
|
{
|
471 |
|
|
return (int)GC_set_free_space_divisor ((GC_word)div);
|
472 |
|
|
}
|
473 |
|
|
|
474 |
|
|
void
|
475 |
|
|
_Jv_DisableGC (void)
|
476 |
|
|
{
|
477 |
|
|
GC_disable();
|
478 |
|
|
}
|
479 |
|
|
|
480 |
|
|
void
|
481 |
|
|
_Jv_EnableGC (void)
|
482 |
|
|
{
|
483 |
|
|
GC_enable();
|
484 |
|
|
}
|
485 |
|
|
|
486 |
|
|
static void * handle_out_of_memory(size_t)
|
487 |
|
|
{
|
488 |
|
|
_Jv_ThrowNoMemory();
|
489 |
|
|
}
|
490 |
|
|
|
491 |
|
|
static void
|
492 |
|
|
gcj_describe_type_fn(void *obj, char *out_buf)
|
493 |
|
|
{
|
494 |
|
|
_Jv_VTable *dt = *(_Jv_VTable **) obj;
|
495 |
|
|
|
496 |
|
|
if (! dt /* Shouldn't happen */)
|
497 |
|
|
{
|
498 |
|
|
strcpy(out_buf, "GCJ (bad)");
|
499 |
|
|
return;
|
500 |
|
|
}
|
501 |
|
|
jclass klass = dt->clas;
|
502 |
|
|
if (!klass /* shouldn't happen */)
|
503 |
|
|
{
|
504 |
|
|
strcpy(out_buf, "GCJ (bad)");
|
505 |
|
|
return;
|
506 |
|
|
}
|
507 |
|
|
jstring name = klass -> getName();
|
508 |
|
|
size_t len = name -> length();
|
509 |
|
|
if (len >= GC_TYPE_DESCR_LEN) len = GC_TYPE_DESCR_LEN - 1;
|
510 |
|
|
JvGetStringUTFRegion (name, 0, len, out_buf);
|
511 |
|
|
out_buf[len] = '\0';
|
512 |
|
|
}
|
513 |
|
|
|
514 |
|
|
void
|
515 |
|
|
_Jv_InitGC (void)
|
516 |
|
|
{
|
517 |
|
|
int proc;
|
518 |
|
|
static bool gc_initialized;
|
519 |
|
|
|
520 |
|
|
if (gc_initialized)
|
521 |
|
|
return;
|
522 |
|
|
|
523 |
|
|
gc_initialized = 1;
|
524 |
|
|
|
525 |
|
|
// Ignore pointers that do not point to the start of an object.
|
526 |
|
|
GC_all_interior_pointers = 0;
|
527 |
|
|
|
528 |
|
|
#if defined (HAVE_DLFCN_H) && defined (HAVE_DLADDR)
|
529 |
|
|
// Tell the collector to ask us before scanning DSOs.
|
530 |
|
|
GC_register_has_static_roots_callback (_Jv_GC_has_static_roots);
|
531 |
|
|
#endif
|
532 |
|
|
|
533 |
|
|
// Configure the collector to use the bitmap marking descriptors that we
|
534 |
|
|
// stash in the class vtable.
|
535 |
|
|
// We always use mark proc descriptor 0, since the compiler knows
|
536 |
|
|
// about it.
|
537 |
|
|
GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
|
538 |
|
|
|
539 |
|
|
// Cause an out of memory error to be thrown from the allocators,
|
540 |
|
|
// instead of returning 0. This is cheaper than checking on allocation.
|
541 |
|
|
GC_oom_fn = handle_out_of_memory;
|
542 |
|
|
|
543 |
|
|
GC_java_finalization = 1;
|
544 |
|
|
|
545 |
|
|
// We use a different mark procedure for object arrays. This code
|
546 |
|
|
// configures a different object `kind' for object array allocation and
|
547 |
|
|
// marking.
|
548 |
|
|
array_free_list = GC_new_free_list();
|
549 |
|
|
proc = GC_new_proc((GC_mark_proc)_Jv_MarkArray);
|
550 |
|
|
array_kind_x = GC_new_kind(array_free_list, GC_MAKE_PROC (proc, 0), 0, 1);
|
551 |
|
|
|
552 |
|
|
// Arrange to have the GC print Java class names in backtraces, etc.
|
553 |
|
|
GC_register_describe_type_fn(GC_gcj_kind, gcj_describe_type_fn);
|
554 |
|
|
GC_register_describe_type_fn(GC_gcj_debug_kind, gcj_describe_type_fn);
|
555 |
|
|
}
|
556 |
|
|
|
557 |
|
|
#ifdef JV_HASH_SYNCHRONIZATION
|
558 |
|
|
// Allocate an object with a fake vtable pointer, which causes only
|
559 |
|
|
// the first field (beyond the fake vtable pointer) to be traced.
|
560 |
|
|
// Eventually this should probably be generalized.
|
561 |
|
|
|
562 |
|
|
static _Jv_VTable trace_one_vtable = {
|
563 |
|
|
0, // class pointer
|
564 |
|
|
(void *)(2 * sizeof(void *)),
|
565 |
|
|
// descriptor; scan 2 words incl. vtable ptr.
|
566 |
|
|
// Least significant bits must be zero to
|
567 |
|
|
// identify this as a length descriptor
|
568 |
|
|
{0} // First method
|
569 |
|
|
};
|
570 |
|
|
|
571 |
|
|
void *
|
572 |
|
|
_Jv_AllocTraceOne (jsize size /* includes vtable slot */)
|
573 |
|
|
{
|
574 |
|
|
return GC_GCJ_MALLOC (size, &trace_one_vtable);
|
575 |
|
|
}
|
576 |
|
|
|
577 |
|
|
// Ditto for two words.
|
578 |
|
|
// the first field (beyond the fake vtable pointer) to be traced.
|
579 |
|
|
// Eventually this should probably be generalized.
|
580 |
|
|
|
581 |
|
|
static _Jv_VTable trace_two_vtable =
|
582 |
|
|
{
|
583 |
|
|
0, // class pointer
|
584 |
|
|
(void *)(3 * sizeof(void *)),
|
585 |
|
|
// descriptor; scan 3 words incl. vtable ptr.
|
586 |
|
|
{0} // First method
|
587 |
|
|
};
|
588 |
|
|
|
589 |
|
|
void *
|
590 |
|
|
_Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
|
591 |
|
|
{
|
592 |
|
|
return GC_GCJ_MALLOC (size, &trace_two_vtable);
|
593 |
|
|
}
|
594 |
|
|
|
595 |
|
|
#endif /* JV_HASH_SYNCHRONIZATION */
|
596 |
|
|
|
597 |
|
|
void
|
598 |
|
|
_Jv_GCInitializeFinalizers (void (*notifier) (void))
|
599 |
|
|
{
|
600 |
|
|
GC_finalize_on_demand = 1;
|
601 |
|
|
GC_finalizer_notifier = notifier;
|
602 |
|
|
}
|
603 |
|
|
|
604 |
|
|
void
|
605 |
|
|
_Jv_GCRegisterDisappearingLink (jobject *objp)
|
606 |
|
|
{
|
607 |
|
|
// This test helps to ensure that we meet a precondition of
|
608 |
|
|
// GC_general_register_disappearing_link, viz. "Obj must be a
|
609 |
|
|
// pointer to the first word of an object we allocated."
|
610 |
|
|
if (GC_base(*objp))
|
611 |
|
|
GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
|
612 |
|
|
}
|
613 |
|
|
|
614 |
|
|
jboolean
|
615 |
|
|
_Jv_GCCanReclaimSoftReference (jobject)
|
616 |
|
|
{
|
617 |
|
|
// For now, always reclaim soft references. FIXME.
|
618 |
|
|
return true;
|
619 |
|
|
}
|
620 |
|
|
|
621 |
|
|
|
622 |
|
|
|
623 |
|
|
#if defined (HAVE_DLFCN_H) && defined (HAVE_DLADDR)
|
624 |
|
|
|
625 |
|
|
// We keep a store of the filenames of DSOs that need to be
|
626 |
|
|
// conservatively scanned by the garbage collector. During collection
|
627 |
|
|
// the gc calls _Jv_GC_has_static_roots() to see if the data segment
|
628 |
|
|
// of a DSO should be scanned.
|
629 |
|
|
typedef struct filename_node
|
630 |
|
|
{
|
631 |
|
|
char *name;
|
632 |
|
|
struct filename_node *link;
|
633 |
|
|
} filename_node;
|
634 |
|
|
|
635 |
|
|
#define FILENAME_STORE_SIZE 17
|
636 |
|
|
static filename_node *filename_store[FILENAME_STORE_SIZE];
|
637 |
|
|
|
638 |
|
|
// Find a filename in filename_store.
|
639 |
|
|
static filename_node **
|
640 |
|
|
find_file (const char *filename)
|
641 |
|
|
{
|
642 |
|
|
int index = strlen (filename) % FILENAME_STORE_SIZE;
|
643 |
|
|
filename_node **node = &filename_store[index];
|
644 |
|
|
|
645 |
|
|
while (*node)
|
646 |
|
|
{
|
647 |
|
|
if (strcmp ((*node)->name, filename) == 0)
|
648 |
|
|
return node;
|
649 |
|
|
node = &(*node)->link;
|
650 |
|
|
}
|
651 |
|
|
|
652 |
|
|
return node;
|
653 |
|
|
}
|
654 |
|
|
|
655 |
|
|
// Print the store of filenames of DSOs that need collection.
|
656 |
|
|
void
|
657 |
|
|
_Jv_print_gc_store (void)
|
658 |
|
|
{
|
659 |
|
|
for (int i = 0; i < FILENAME_STORE_SIZE; i++)
|
660 |
|
|
{
|
661 |
|
|
filename_node *node = filename_store[i];
|
662 |
|
|
while (node)
|
663 |
|
|
{
|
664 |
|
|
fprintf (stderr, "%s\n", node->name);
|
665 |
|
|
node = node->link;
|
666 |
|
|
}
|
667 |
|
|
}
|
668 |
|
|
}
|
669 |
|
|
|
670 |
|
|
// Create a new node in the store of libraries to collect.
|
671 |
|
|
static filename_node *
|
672 |
|
|
new_node (const char *filename)
|
673 |
|
|
{
|
674 |
|
|
filename_node *node = (filename_node*)_Jv_Malloc (sizeof (filename_node));
|
675 |
|
|
node->name = (char *)_Jv_Malloc (strlen (filename) + 1);
|
676 |
|
|
node->link = NULL;
|
677 |
|
|
strcpy (node->name, filename);
|
678 |
|
|
|
679 |
|
|
return node;
|
680 |
|
|
}
|
681 |
|
|
|
682 |
|
|
// Nonzero if the gc should scan this lib.
|
683 |
|
|
static int
|
684 |
|
|
_Jv_GC_has_static_roots (const char *filename, void *, size_t)
|
685 |
|
|
{
|
686 |
|
|
if (filename == NULL || strlen (filename) == 0)
|
687 |
|
|
// No filename; better safe than sorry.
|
688 |
|
|
return 1;
|
689 |
|
|
|
690 |
|
|
filename_node **node = find_file (filename);
|
691 |
|
|
if (*node)
|
692 |
|
|
return 1;
|
693 |
|
|
|
694 |
|
|
return 0;
|
695 |
|
|
}
|
696 |
|
|
|
697 |
|
|
#endif
|
698 |
|
|
|
699 |
|
|
// Register the DSO that contains p for collection.
|
700 |
|
|
void
|
701 |
|
|
_Jv_RegisterLibForGc (const void *p __attribute__ ((__unused__)))
|
702 |
|
|
{
|
703 |
|
|
#if defined (HAVE_DLFCN_H) && defined (HAVE_DLADDR)
|
704 |
|
|
Dl_info info;
|
705 |
|
|
|
706 |
|
|
if (dladdr (const_cast<void *>(p), &info) != 0)
|
707 |
|
|
{
|
708 |
|
|
filename_node **node = find_file (info.dli_fname);
|
709 |
|
|
if (! *node)
|
710 |
|
|
*node = new_node (info.dli_fname);
|
711 |
|
|
}
|
712 |
|
|
#endif
|
713 |
|
|
}
|
714 |
|
|
|
715 |
|
|
void
|
716 |
|
|
_Jv_SuspendThread (_Jv_Thread_t *thread)
|
717 |
|
|
{
|
718 |
|
|
#if defined(GC_PTHREADS) && !defined(GC_SOLARIS_THREADS) \
|
719 |
|
|
&& !defined(GC_WIN32_THREADS) && !defined(GC_DARWIN_THREADS)
|
720 |
|
|
GC_suspend_thread (_Jv_GetPlatformThreadID (thread));
|
721 |
|
|
#endif
|
722 |
|
|
}
|
723 |
|
|
|
724 |
|
|
void
|
725 |
|
|
_Jv_ResumeThread (_Jv_Thread_t *thread)
|
726 |
|
|
{
|
727 |
|
|
#if defined(GC_PTHREADS) && !defined(GC_SOLARIS_THREADS) \
|
728 |
|
|
&& !defined(GC_WIN32_THREADS) && !defined(GC_DARWIN_THREADS)
|
729 |
|
|
GC_resume_thread (_Jv_GetPlatformThreadID (thread));
|
730 |
|
|
#endif
|
731 |
|
|
}
|
732 |
|
|
|
733 |
|
|
int
|
734 |
|
|
_Jv_IsThreadSuspended (_Jv_Thread_t *thread)
|
735 |
|
|
{
|
736 |
|
|
#if defined(GC_PTHREADS) && !defined(GC_SOLARIS_THREADS) \
|
737 |
|
|
&& !defined(GC_WIN32_THREADS) && !defined(GC_DARWIN_THREADS)
|
738 |
|
|
return GC_is_thread_suspended (_Jv_GetPlatformThreadID (thread));
|
739 |
|
|
#else
|
740 |
|
|
return 0;
|
741 |
|
|
#endif
|
742 |
|
|
}
|
743 |
|
|
|
744 |
|
|
void
|
745 |
|
|
_Jv_GCAttachThread ()
|
746 |
|
|
{
|
747 |
|
|
// The registration interface is only defined on posixy systems and
|
748 |
|
|
// only actually works if pthread_getattr_np is defined.
|
749 |
|
|
// FIXME: until gc7 it is simpler to disable this on solaris.
|
750 |
|
|
#if defined(HAVE_PTHREAD_GETATTR_NP) && !defined(GC_SOLARIS_THREADS)
|
751 |
|
|
GC_register_my_thread ();
|
752 |
|
|
#endif
|
753 |
|
|
}
|
754 |
|
|
|
755 |
|
|
void
|
756 |
|
|
_Jv_GCDetachThread ()
|
757 |
|
|
{
|
758 |
|
|
#if defined(HAVE_PTHREAD_GETATTR_NP) && !defined(GC_SOLARIS_THREADS)
|
759 |
|
|
GC_unregister_my_thread ();
|
760 |
|
|
#endif
|
761 |
|
|
}
|