1 |
721 |
jeremybenn |
/*
|
2 |
|
|
* Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
|
3 |
|
|
* Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
|
4 |
|
|
*
|
5 |
|
|
* THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
|
6 |
|
|
* OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
|
7 |
|
|
*
|
8 |
|
|
* Permission is hereby granted to use or copy this program
|
9 |
|
|
* for any purpose, provided the above notices are retained on all copies.
|
10 |
|
|
* Permission to modify the code and to distribute modified code is granted,
|
11 |
|
|
* provided the above notices are retained, and a notice that the code was
|
12 |
|
|
* modified is included with the above copyright notice.
|
13 |
|
|
*/
|
14 |
|
|
/* Boehm, August 9, 1995 6:09 pm PDT */
|
15 |
|
|
# include "private/gc_priv.h"
|
16 |
|
|
|
17 |
|
|
/*
|
18 |
|
|
* We maintain several hash tables of hblks that have had false hits.
|
19 |
|
|
* Each contains one bit per hash bucket; If any page in the bucket
|
20 |
|
|
* has had a false hit, we assume that all of them have.
|
21 |
|
|
* See the definition of page_hash_table in gc_private.h.
|
22 |
|
|
* False hits from the stack(s) are much more dangerous than false hits
|
23 |
|
|
* from elsewhere, since the former can pin a large object that spans the
|
24 |
|
|
* block, eventhough it does not start on the dangerous block.
|
25 |
|
|
*/
|
26 |
|
|
|
27 |
|
|
/*
|
28 |
|
|
* Externally callable routines are:
|
29 |
|
|
|
30 |
|
|
* GC_add_to_black_list_normal
|
31 |
|
|
* GC_add_to_black_list_stack
|
32 |
|
|
* GC_promote_black_lists
|
33 |
|
|
* GC_is_black_listed
|
34 |
|
|
*
|
35 |
|
|
* All require that the allocator lock is held.
|
36 |
|
|
*/
|
37 |
|
|
|
38 |
|
|
/* Pointers to individual tables. We replace one table by another by */
|
39 |
|
|
/* switching these pointers. */
|
40 |
|
|
word * GC_old_normal_bl;
|
41 |
|
|
/* Nonstack false references seen at last full */
|
42 |
|
|
/* collection. */
|
43 |
|
|
word * GC_incomplete_normal_bl;
|
44 |
|
|
/* Nonstack false references seen since last */
|
45 |
|
|
/* full collection. */
|
46 |
|
|
word * GC_old_stack_bl;
|
47 |
|
|
word * GC_incomplete_stack_bl;
|
48 |
|
|
|
49 |
|
|
word GC_total_stack_black_listed;
|
50 |
|
|
|
51 |
|
|
word GC_black_list_spacing = MINHINCR*HBLKSIZE; /* Initial rough guess */
|
52 |
|
|
|
53 |
|
|
void GC_clear_bl();
|
54 |
|
|
|
55 |
|
|
# if defined(__STDC__) || defined(__cplusplus)
|
56 |
|
|
void GC_default_print_heap_obj_proc(ptr_t p)
|
57 |
|
|
# else
|
58 |
|
|
void GC_default_print_heap_obj_proc(p)
|
59 |
|
|
ptr_t p;
|
60 |
|
|
# endif
|
61 |
|
|
{
|
62 |
|
|
ptr_t base = GC_base(p);
|
63 |
|
|
|
64 |
|
|
GC_err_printf2("start: 0x%lx, appr. length: %ld", base, GC_size(base));
|
65 |
|
|
}
|
66 |
|
|
|
67 |
|
|
void (*GC_print_heap_obj) GC_PROTO((ptr_t p)) =
|
68 |
|
|
GC_default_print_heap_obj_proc;
|
69 |
|
|
|
70 |
|
|
void GC_print_source_ptr(p)
|
71 |
|
|
ptr_t p;
|
72 |
|
|
{
|
73 |
|
|
ptr_t base = GC_base(p);
|
74 |
|
|
if (0 == base) {
|
75 |
|
|
if (0 == p) {
|
76 |
|
|
GC_err_printf0("in register");
|
77 |
|
|
} else {
|
78 |
|
|
GC_err_printf0("in root set");
|
79 |
|
|
}
|
80 |
|
|
} else {
|
81 |
|
|
GC_err_printf0("in object at ");
|
82 |
|
|
(*GC_print_heap_obj)(base);
|
83 |
|
|
}
|
84 |
|
|
}
|
85 |
|
|
|
86 |
|
|
void GC_bl_init()
|
87 |
|
|
{
|
88 |
|
|
if (!GC_all_interior_pointers) {
|
89 |
|
|
GC_old_normal_bl = (word *)
|
90 |
|
|
GC_scratch_alloc((word)(sizeof (page_hash_table)));
|
91 |
|
|
GC_incomplete_normal_bl = (word *)GC_scratch_alloc
|
92 |
|
|
((word)(sizeof(page_hash_table)));
|
93 |
|
|
if (GC_old_normal_bl == 0 || GC_incomplete_normal_bl == 0) {
|
94 |
|
|
GC_err_printf0("Insufficient memory for black list\n");
|
95 |
|
|
EXIT();
|
96 |
|
|
}
|
97 |
|
|
GC_clear_bl(GC_old_normal_bl);
|
98 |
|
|
GC_clear_bl(GC_incomplete_normal_bl);
|
99 |
|
|
}
|
100 |
|
|
GC_old_stack_bl = (word *)GC_scratch_alloc((word)(sizeof(page_hash_table)));
|
101 |
|
|
GC_incomplete_stack_bl = (word *)GC_scratch_alloc
|
102 |
|
|
((word)(sizeof(page_hash_table)));
|
103 |
|
|
if (GC_old_stack_bl == 0 || GC_incomplete_stack_bl == 0) {
|
104 |
|
|
GC_err_printf0("Insufficient memory for black list\n");
|
105 |
|
|
EXIT();
|
106 |
|
|
}
|
107 |
|
|
GC_clear_bl(GC_old_stack_bl);
|
108 |
|
|
GC_clear_bl(GC_incomplete_stack_bl);
|
109 |
|
|
}
|
110 |
|
|
|
111 |
|
|
void GC_clear_bl(doomed)
|
112 |
|
|
word *doomed;
|
113 |
|
|
{
|
114 |
|
|
BZERO(doomed, sizeof(page_hash_table));
|
115 |
|
|
}
|
116 |
|
|
|
117 |
|
|
void GC_copy_bl(old, new)
|
118 |
|
|
word *new, *old;
|
119 |
|
|
{
|
120 |
|
|
BCOPY(old, new, sizeof(page_hash_table));
|
121 |
|
|
}
|
122 |
|
|
|
123 |
|
|
static word total_stack_black_listed();
|
124 |
|
|
|
125 |
|
|
/* Signal the completion of a collection. Turn the incomplete black */
|
126 |
|
|
/* lists into new black lists, etc. */
|
127 |
|
|
void GC_promote_black_lists()
|
128 |
|
|
{
|
129 |
|
|
word * very_old_normal_bl = GC_old_normal_bl;
|
130 |
|
|
word * very_old_stack_bl = GC_old_stack_bl;
|
131 |
|
|
|
132 |
|
|
GC_old_normal_bl = GC_incomplete_normal_bl;
|
133 |
|
|
GC_old_stack_bl = GC_incomplete_stack_bl;
|
134 |
|
|
if (!GC_all_interior_pointers) {
|
135 |
|
|
GC_clear_bl(very_old_normal_bl);
|
136 |
|
|
}
|
137 |
|
|
GC_clear_bl(very_old_stack_bl);
|
138 |
|
|
GC_incomplete_normal_bl = very_old_normal_bl;
|
139 |
|
|
GC_incomplete_stack_bl = very_old_stack_bl;
|
140 |
|
|
GC_total_stack_black_listed = total_stack_black_listed();
|
141 |
|
|
# ifdef PRINTSTATS
|
142 |
|
|
GC_printf1("%ld bytes in heap blacklisted for interior pointers\n",
|
143 |
|
|
(unsigned long)GC_total_stack_black_listed);
|
144 |
|
|
# endif
|
145 |
|
|
if (GC_total_stack_black_listed != 0) {
|
146 |
|
|
GC_black_list_spacing =
|
147 |
|
|
HBLKSIZE*(GC_heapsize/GC_total_stack_black_listed);
|
148 |
|
|
}
|
149 |
|
|
if (GC_black_list_spacing < 3 * HBLKSIZE) {
|
150 |
|
|
GC_black_list_spacing = 3 * HBLKSIZE;
|
151 |
|
|
}
|
152 |
|
|
if (GC_black_list_spacing > MAXHINCR * HBLKSIZE) {
|
153 |
|
|
GC_black_list_spacing = MAXHINCR * HBLKSIZE;
|
154 |
|
|
/* Makes it easier to allocate really huge blocks, which otherwise */
|
155 |
|
|
/* may have problems with nonuniform blacklist distributions. */
|
156 |
|
|
/* This way we should always succeed immediately after growing the */
|
157 |
|
|
/* heap. */
|
158 |
|
|
}
|
159 |
|
|
}
|
160 |
|
|
|
161 |
|
|
void GC_unpromote_black_lists()
|
162 |
|
|
{
|
163 |
|
|
if (!GC_all_interior_pointers) {
|
164 |
|
|
GC_copy_bl(GC_old_normal_bl, GC_incomplete_normal_bl);
|
165 |
|
|
}
|
166 |
|
|
GC_copy_bl(GC_old_stack_bl, GC_incomplete_stack_bl);
|
167 |
|
|
}
|
168 |
|
|
|
169 |
|
|
/* P is not a valid pointer reference, but it falls inside */
|
170 |
|
|
/* the plausible heap bounds. */
|
171 |
|
|
/* Add it to the normal incomplete black list if appropriate. */
|
172 |
|
|
#ifdef PRINT_BLACK_LIST
|
173 |
|
|
void GC_add_to_black_list_normal(p, source)
|
174 |
|
|
ptr_t source;
|
175 |
|
|
#else
|
176 |
|
|
void GC_add_to_black_list_normal(p)
|
177 |
|
|
#endif
|
178 |
|
|
word p;
|
179 |
|
|
{
|
180 |
|
|
if (!(GC_modws_valid_offsets[p & (sizeof(word)-1)])) return;
|
181 |
|
|
{
|
182 |
|
|
register int index = PHT_HASH(p);
|
183 |
|
|
|
184 |
|
|
if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_normal_bl, index)) {
|
185 |
|
|
# ifdef PRINT_BLACK_LIST
|
186 |
|
|
if (!get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
|
187 |
|
|
GC_err_printf2(
|
188 |
|
|
"Black listing (normal) 0x%lx referenced from 0x%lx ",
|
189 |
|
|
(unsigned long) p, (unsigned long) source);
|
190 |
|
|
GC_print_source_ptr(source);
|
191 |
|
|
GC_err_puts("\n");
|
192 |
|
|
}
|
193 |
|
|
# endif
|
194 |
|
|
set_pht_entry_from_index(GC_incomplete_normal_bl, index);
|
195 |
|
|
} /* else this is probably just an interior pointer to an allocated */
|
196 |
|
|
/* object, and isn't worth black listing. */
|
197 |
|
|
}
|
198 |
|
|
}
|
199 |
|
|
|
200 |
|
|
/* And the same for false pointers from the stack. */
|
201 |
|
|
#ifdef PRINT_BLACK_LIST
|
202 |
|
|
void GC_add_to_black_list_stack(p, source)
|
203 |
|
|
ptr_t source;
|
204 |
|
|
#else
|
205 |
|
|
void GC_add_to_black_list_stack(p)
|
206 |
|
|
#endif
|
207 |
|
|
word p;
|
208 |
|
|
{
|
209 |
|
|
register int index = PHT_HASH(p);
|
210 |
|
|
|
211 |
|
|
if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_stack_bl, index)) {
|
212 |
|
|
# ifdef PRINT_BLACK_LIST
|
213 |
|
|
if (!get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
|
214 |
|
|
GC_err_printf2(
|
215 |
|
|
"Black listing (stack) 0x%lx referenced from 0x%lx ",
|
216 |
|
|
(unsigned long)p, (unsigned long)source);
|
217 |
|
|
GC_print_source_ptr(source);
|
218 |
|
|
GC_err_puts("\n");
|
219 |
|
|
}
|
220 |
|
|
# endif
|
221 |
|
|
set_pht_entry_from_index(GC_incomplete_stack_bl, index);
|
222 |
|
|
}
|
223 |
|
|
}
|
224 |
|
|
|
225 |
|
|
/*
|
226 |
|
|
* Is the block starting at h of size len bytes black listed? If so,
|
227 |
|
|
* return the address of the next plausible r such that (r, len) might not
|
228 |
|
|
* be black listed. (R may not actually be in the heap. We guarantee only
|
229 |
|
|
* that every smaller value of r after h is also black listed.)
|
230 |
|
|
* If (h,len) is not black listed, return 0.
|
231 |
|
|
* Knows about the structure of the black list hash tables.
|
232 |
|
|
*/
|
233 |
|
|
struct hblk * GC_is_black_listed(h, len)
|
234 |
|
|
struct hblk * h;
|
235 |
|
|
word len;
|
236 |
|
|
{
|
237 |
|
|
register int index = PHT_HASH((word)h);
|
238 |
|
|
register word i;
|
239 |
|
|
word nblocks = divHBLKSZ(len);
|
240 |
|
|
|
241 |
|
|
if (!GC_all_interior_pointers) {
|
242 |
|
|
if (get_pht_entry_from_index(GC_old_normal_bl, index)
|
243 |
|
|
|| get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
|
244 |
|
|
return(h+1);
|
245 |
|
|
}
|
246 |
|
|
}
|
247 |
|
|
|
248 |
|
|
for (i = 0; ; ) {
|
249 |
|
|
if (GC_old_stack_bl[divWORDSZ(index)] == 0
|
250 |
|
|
&& GC_incomplete_stack_bl[divWORDSZ(index)] == 0) {
|
251 |
|
|
/* An easy case */
|
252 |
|
|
i += WORDSZ - modWORDSZ(index);
|
253 |
|
|
} else {
|
254 |
|
|
if (get_pht_entry_from_index(GC_old_stack_bl, index)
|
255 |
|
|
|| get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
|
256 |
|
|
return(h+i+1);
|
257 |
|
|
}
|
258 |
|
|
i++;
|
259 |
|
|
}
|
260 |
|
|
if (i >= nblocks) break;
|
261 |
|
|
index = PHT_HASH((word)(h+i));
|
262 |
|
|
}
|
263 |
|
|
return(0);
|
264 |
|
|
}
|
265 |
|
|
|
266 |
|
|
|
267 |
|
|
/* Return the number of blacklisted blocks in a given range. */
|
268 |
|
|
/* Used only for statistical purposes. */
|
269 |
|
|
/* Looks only at the GC_incomplete_stack_bl. */
|
270 |
|
|
word GC_number_stack_black_listed(start, endp1)
|
271 |
|
|
struct hblk *start, *endp1;
|
272 |
|
|
{
|
273 |
|
|
register struct hblk * h;
|
274 |
|
|
word result = 0;
|
275 |
|
|
|
276 |
|
|
for (h = start; h < endp1; h++) {
|
277 |
|
|
register int index = PHT_HASH((word)h);
|
278 |
|
|
|
279 |
|
|
if (get_pht_entry_from_index(GC_old_stack_bl, index)) result++;
|
280 |
|
|
}
|
281 |
|
|
return(result);
|
282 |
|
|
}
|
283 |
|
|
|
284 |
|
|
|
285 |
|
|
/* Return the total number of (stack) black-listed bytes. */
|
286 |
|
|
static word total_stack_black_listed()
|
287 |
|
|
{
|
288 |
|
|
register unsigned i;
|
289 |
|
|
word total = 0;
|
290 |
|
|
|
291 |
|
|
for (i = 0; i < GC_n_heap_sects; i++) {
|
292 |
|
|
struct hblk * start = (struct hblk *) GC_heap_sects[i].hs_start;
|
293 |
|
|
word len = (word) GC_heap_sects[i].hs_bytes;
|
294 |
|
|
struct hblk * endp1 = start + len/HBLKSIZE;
|
295 |
|
|
|
296 |
|
|
total += GC_number_stack_black_listed(start, endp1);
|
297 |
|
|
}
|
298 |
|
|
return(total * HBLKSIZE);
|
299 |
|
|
}
|
300 |
|
|
|